diff --git a/packages/google-cloud-bigquery-connection/.gitignore b/packages/google-cloud-bigquery-connection/.gitignore
index b87e1ed580d9..b9daa52f118d 100644
--- a/packages/google-cloud-bigquery-connection/.gitignore
+++ b/packages/google-cloud-bigquery-connection/.gitignore
@@ -46,6 +46,7 @@ pip-log.txt
# Built documentation
docs/_build
bigquery/docs/generated
+docs.metadata
# Virtual environment
env/
@@ -57,4 +58,4 @@ system_tests/local_test_setup
# Make sure a generated file isn't accidentally committed.
pylintrc
-pylintrc.test
\ No newline at end of file
+pylintrc.test
diff --git a/packages/google-cloud-bigquery-connection/.kokoro/build.sh b/packages/google-cloud-bigquery-connection/.kokoro/build.sh
index c54af54c200e..de1d6a1aee73 100755
--- a/packages/google-cloud-bigquery-connection/.kokoro/build.sh
+++ b/packages/google-cloud-bigquery-connection/.kokoro/build.sh
@@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation
python3.6 -m pip install --upgrade --quiet nox
python3.6 -m nox --version
-python3.6 -m nox
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+ python3.6 -m nox -s "${NOX_SESSION:-}"
+else
+ python3.6 -m nox
+fi
diff --git a/packages/google-cloud-bigquery-connection/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-bigquery-connection/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 000000000000..412b0b56a921
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,98 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:20.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ build-essential \
+ ca-certificates \
+ curl \
+ dirmngr \
+ git \
+ gpg-agent \
+ graphviz \
+ libbz2-dev \
+ libdb5.3-dev \
+ libexpat1-dev \
+ libffi-dev \
+ liblzma-dev \
+ libreadline-dev \
+ libsnappy-dev \
+ libssl-dev \
+ libsqlite3-dev \
+ portaudio19-dev \
+ redis-server \
+ software-properties-common \
+ ssh \
+ sudo \
+ tcl \
+ tcl-dev \
+ tk \
+ tk-dev \
+ uuid-dev \
+ wget \
+ zlib1g-dev \
+ && add-apt-repository universe \
+ && apt-get update \
+ && apt-get -y install jq \
+ && apt-get clean autoclean \
+ && apt-get autoremove -y \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /var/cache/apt/archives/*.deb
+
+
+COPY fetch_gpg_keys.sh /tmp
+# Install the desired versions of Python.
+RUN set -ex \
+ && export GNUPGHOME="$(mktemp -d)" \
+ && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \
+ && /tmp/fetch_gpg_keys.sh \
+ && for PYTHON_VERSION in 3.7.8 3.8.5; do \
+ wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
+ && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
+ && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \
+ && rm -r python-${PYTHON_VERSION}.tar.xz.asc \
+ && mkdir -p /usr/src/python-${PYTHON_VERSION} \
+ && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \
+ && rm python-${PYTHON_VERSION}.tar.xz \
+ && cd /usr/src/python-${PYTHON_VERSION} \
+ && ./configure \
+ --enable-shared \
+ # This works only on Python 2.7 and throws a warning on every other
+ # version, but seems otherwise harmless.
+ --enable-unicode=ucs4 \
+ --with-system-ffi \
+ --without-ensurepip \
+ && make -j$(nproc) \
+ && make install \
+ && ldconfig \
+ ; done \
+ && rm -rf "${GNUPGHOME}" \
+ && rm -rf /usr/src/python* \
+ && rm -rf ~/.cache/
+
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+ && python3.7 /tmp/get-pip.py \
+ && python3.8 /tmp/get-pip.py \
+ && rm /tmp/get-pip.py
+
+CMD ["python3.7"]
diff --git a/packages/google-cloud-bigquery-connection/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-bigquery-connection/.kokoro/docker/docs/fetch_gpg_keys.sh
new file mode 100755
index 000000000000..d653dd868e4b
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/.kokoro/docker/docs/fetch_gpg_keys.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A script to fetch gpg keys with retry.
+# Avoid jinja parsing the file.
+#
+
+function retry {
+ if [[ "${#}" -le 1 ]]; then
+ echo "Usage: ${0} retry_count commands.."
+ exit 1
+ fi
+ local retries=${1}
+ local command="${@:2}"
+ until [[ "${retries}" -le 0 ]]; do
+ $command && return 0
+ if [[ $? -ne 0 ]]; then
+ echo "command failed, retrying"
+ ((retries--))
+ fi
+ done
+ return 1
+}
+
+# 3.6.9, 3.7.5 (Ned Deily)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D
+
+# 3.8.0 (Ćukasz Langa)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ E3FF2839C048B25C084DEBE9B26995E310250568
+
+#
diff --git a/packages/google-cloud-bigquery-connection/.kokoro/docs/common.cfg b/packages/google-cloud-bigquery-connection/.kokoro/docs/common.cfg
index 5e56b4a835bd..6fd1f17d2c04 100644
--- a/packages/google-cloud-bigquery-connection/.kokoro/docs/common.cfg
+++ b/packages/google-cloud-bigquery-connection/.kokoro/docs/common.cfg
@@ -11,12 +11,12 @@ action {
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-bigquery-connection/.kokoro/trampoline.sh"
+build_file: "python-bigquery-connection/.kokoro/trampoline_v2.sh"
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
}
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
@@ -28,6 +28,23 @@ env_vars: {
value: "docs-staging"
}
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "docs-staging-v2-staging"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+ key: "TRAMPOLINE_DOCKERFILE"
+ value: ".kokoro/docker/docs/Dockerfile"
+}
+
# Fetch the token needed for reporting release status to GitHub
before_action {
fetch_keystore {
diff --git a/packages/google-cloud-bigquery-connection/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-bigquery-connection/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 000000000000..1118107829b7
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,17 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "false"
+}
diff --git a/packages/google-cloud-bigquery-connection/.kokoro/publish-docs.sh b/packages/google-cloud-bigquery-connection/.kokoro/publish-docs.sh
index 9727c31854a0..8acb14e802b0 100755
--- a/packages/google-cloud-bigquery-connection/.kokoro/publish-docs.sh
+++ b/packages/google-cloud-bigquery-connection/.kokoro/publish-docs.sh
@@ -18,26 +18,16 @@ set -eo pipefail
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
-cd github/python-bigquery-connection
-
-# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+export PATH="${HOME}/.local/bin:${PATH}"
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --user --upgrade --quiet nox
+python3 -m nox --version
# build docs
nox -s docs
-python3 -m pip install gcp-docuploader
-
-# install a json parser
-sudo apt-get update
-sudo apt-get -y install software-properties-common
-sudo add-apt-repository universe
-sudo apt-get update
-sudo apt-get -y install jq
+python3 -m pip install --user gcp-docuploader
# create metadata
python3 -m docuploader create-metadata \
@@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \
cat docs.metadata
# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/packages/google-cloud-bigquery-connection/.kokoro/trampoline_v2.sh b/packages/google-cloud-bigquery-connection/.kokoro/trampoline_v2.sh
new file mode 100755
index 000000000000..719bcd5ba84d
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For Build Cop Bot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/packages/google-cloud-bigquery-connection/.trampolinerc b/packages/google-cloud-bigquery-connection/.trampolinerc
new file mode 100644
index 000000000000..995ee29111e1
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/.trampolinerc
@@ -0,0 +1,51 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Template for .trampolinerc
+
+# Add required env vars here.
+required_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/packages/google-cloud-bigquery-connection/docs/_templates/layout.html b/packages/google-cloud-bigquery-connection/docs/_templates/layout.html
index 228529efe2d2..6316a537f72b 100644
--- a/packages/google-cloud-bigquery-connection/docs/_templates/layout.html
+++ b/packages/google-cloud-bigquery-connection/docs/_templates/layout.html
@@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/packages/google-cloud-bigquery-connection/docs/conf.py b/packages/google-cloud-bigquery-connection/docs/conf.py
index cc4888fcdb0a..9bfebf1b3bb0 100644
--- a/packages/google-cloud-bigquery-connection/docs/conf.py
+++ b/packages/google-cloud-bigquery-connection/docs/conf.py
@@ -20,6 +20,10 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
__version__ = ""
# -- General configuration ------------------------------------------------
@@ -43,7 +47,7 @@
# autodoc/autosummary flags
autoclass_content = "both"
-autodoc_default_flags = ["members"]
+autodoc_default_options = {"members": True}
autosummary_generate = True
@@ -90,7 +94,12 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ["_build"]
+exclude_patterns = [
+ "_build",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -337,7 +346,7 @@
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
}
diff --git a/packages/google-cloud-bigquery-connection/docs/connection_v1/services.rst b/packages/google-cloud-bigquery-connection/docs/connection_v1/services.rst
index f2f80d0b5bbc..4891cc88d818 100644
--- a/packages/google-cloud-bigquery-connection/docs/connection_v1/services.rst
+++ b/packages/google-cloud-bigquery-connection/docs/connection_v1/services.rst
@@ -1,6 +1,6 @@
-Client for Google Cloud Bigquery Connection API
-===============================================
+Services for Google Cloud Bigquery Connection v1 API
+====================================================
-.. automodule:: google.cloud.bigquery.connection_v1
+.. automodule:: google.cloud.bigquery.connection_v1.services.connection_service
:members:
:inherited-members:
diff --git a/packages/google-cloud-bigquery-connection/docs/connection_v1/types.rst b/packages/google-cloud-bigquery-connection/docs/connection_v1/types.rst
index 76b7d791dd5a..3a687b079bdc 100644
--- a/packages/google-cloud-bigquery-connection/docs/connection_v1/types.rst
+++ b/packages/google-cloud-bigquery-connection/docs/connection_v1/types.rst
@@ -1,5 +1,5 @@
-Types for Google Cloud Bigquery Connection API
-==============================================
+Types for Google Cloud Bigquery Connection v1 API
+=================================================
.. automodule:: google.cloud.bigquery.connection_v1.types
:members:
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection/__init__.py
index 688096ffeaa5..6743e02ed208 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection/__init__.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection/__init__.py
@@ -15,7 +15,9 @@
# limitations under the License.
#
-
+from google.cloud.bigquery.connection_v1.services.connection_service.async_client import (
+ ConnectionServiceAsyncClient,
+)
from google.cloud.bigquery.connection_v1.services.connection_service.client import (
ConnectionServiceClient,
)
@@ -33,6 +35,7 @@
"CloudSqlCredential",
"CloudSqlProperties",
"Connection",
+ "ConnectionServiceAsyncClient",
"ConnectionServiceClient",
"CreateConnectionRequest",
"DeleteConnectionRequest",
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/__init__.py
index d32621b158a5..82e9291e6f1f 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/__init__.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/__init__.py
@@ -15,7 +15,6 @@
# limitations under the License.
#
-
from .services.connection_service import ConnectionServiceClient
from .types.connection import CloudSqlCredential
from .types.connection import CloudSqlProperties
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/__init__.py
index 922fbb06091d..c7becdb00417 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/__init__.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/__init__.py
@@ -16,5 +16,9 @@
#
from .client import ConnectionServiceClient
+from .async_client import ConnectionServiceAsyncClient
-__all__ = ("ConnectionServiceClient",)
+__all__ = (
+ "ConnectionServiceClient",
+ "ConnectionServiceAsyncClient",
+)
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/async_client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/async_client.py
new file mode 100644
index 000000000000..bb083c813fdc
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/async_client.py
@@ -0,0 +1,919 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.bigquery.connection_v1.services.connection_service import pagers
+from google.cloud.bigquery.connection_v1.types import connection
+from google.cloud.bigquery.connection_v1.types import connection as gcbc_connection
+from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
+from google.iam.v1 import policy_pb2 as policy # type: ignore
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+
+from .transports.base import ConnectionServiceTransport
+from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
+from .client import ConnectionServiceClient
+
+
+class ConnectionServiceAsyncClient:
+ """Manages external data source connections and credentials."""
+
+ _client: ConnectionServiceClient
+
+ DEFAULT_ENDPOINT = ConnectionServiceClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT
+
+ connection_path = staticmethod(ConnectionServiceClient.connection_path)
+
+ from_service_account_file = ConnectionServiceClient.from_service_account_file
+ from_service_account_json = from_service_account_file
+
+ get_transport_class = functools.partial(
+ type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, ConnectionServiceTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ ) -> None:
+ """Instantiate the connection service client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.ConnectionServiceTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint, this is the default value for
+ the environment variable) and "auto" (auto switch to the default
+ mTLS endpoint if client SSL credentials is present). However,
+ the ``api_endpoint`` property takes precedence if provided.
+ (2) The ``client_cert_source`` property is used to provide client
+ SSL credentials for mutual TLS transport. If not provided, the
+ default SSL credentials will be used if present.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = ConnectionServiceClient(
+ credentials=credentials, transport=transport, client_options=client_options,
+ )
+
+ async def create_connection(
+ self,
+ request: gcbc_connection.CreateConnectionRequest = None,
+ *,
+ parent: str = None,
+ connection: gcbc_connection.Connection = None,
+ connection_id: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> gcbc_connection.Connection:
+ r"""Creates a new connection.
+
+ Args:
+ request (:class:`~.gcbc_connection.CreateConnectionRequest`):
+ The request object. The request for
+ [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection].
+ parent (:class:`str`):
+ Required. Parent resource name. Must be in the format
+ ``projects/{project_id}/locations/{location_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ connection (:class:`~.gcbc_connection.Connection`):
+ Required. Connection to create.
+ This corresponds to the ``connection`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ connection_id (:class:`str`):
+ Optional. Connection id that should
+ be assigned to the created connection.
+ This corresponds to the ``connection_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.gcbc_connection.Connection:
+ Configuration parameters to establish
+ connection with an external data source,
+ except the credential attributes.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([parent, connection, connection_id]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = gcbc_connection.CreateConnectionRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if connection is not None:
+ request.connection = connection
+ if connection_id is not None:
+ request.connection_id = connection_id
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_connection,
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def get_connection(
+ self,
+ request: connection.GetConnectionRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> connection.Connection:
+ r"""Returns specified connection.
+
+ Args:
+ request (:class:`~.connection.GetConnectionRequest`):
+ The request object. The request for
+ [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection].
+ name (:class:`str`):
+ Required. Name of the requested connection, for example:
+ ``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.connection.Connection:
+ Configuration parameters to establish
+ connection with an external data source,
+ except the credential attributes.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = connection.GetConnectionRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_connection,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_connections(
+ self,
+ request: connection.ListConnectionsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListConnectionsAsyncPager:
+ r"""Returns a list of connections in the given project.
+
+ Args:
+ request (:class:`~.connection.ListConnectionsRequest`):
+ The request object. The request for
+ [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
+ parent (:class:`str`):
+ Required. Parent resource name. Must be in the form:
+ ``projects/{project_id}/locations/{location_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.pagers.ListConnectionsAsyncPager:
+ The response for
+ [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([parent]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = connection.ListConnectionsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_connections,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListConnectionsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def update_connection(
+ self,
+ request: gcbc_connection.UpdateConnectionRequest = None,
+ *,
+ name: str = None,
+ connection: gcbc_connection.Connection = None,
+ update_mask: field_mask.FieldMask = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> gcbc_connection.Connection:
+ r"""Updates the specified connection. For security
+ reasons, also resets credential if connection properties
+ are in the update field mask.
+
+ Args:
+ request (:class:`~.gcbc_connection.UpdateConnectionRequest`):
+ The request object. The request for
+ [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection].
+ name (:class:`str`):
+ Required. Name of the connection to update, for example:
+ ``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ connection (:class:`~.gcbc_connection.Connection`):
+ Required. Connection containing the
+ updated fields.
+ This corresponds to the ``connection`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ update_mask (:class:`~.field_mask.FieldMask`):
+ Required. Update mask for the
+ connection fields to be updated.
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.gcbc_connection.Connection:
+ Configuration parameters to establish
+ connection with an external data source,
+ except the credential attributes.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name, connection, update_mask]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = gcbc_connection.UpdateConnectionRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if connection is not None:
+ request.connection = connection
+ if update_mask is not None:
+ request.update_mask = update_mask
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.update_connection,
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def delete_connection(
+ self,
+ request: connection.DeleteConnectionRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes connection and associated credential.
+
+ Args:
+ request (:class:`~.connection.DeleteConnectionRequest`):
+ The request object. The request for
+ [ConnectionService.DeleteConnectionRequest][].
+ name (:class:`str`):
+ Required. Name of the deleted connection, for example:
+ ``projects/{project_id}/locations/{location_id}/connections/{connection_id}``
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([name]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = connection.DeleteConnectionRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_connection,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ async def get_iam_policy(
+ self,
+ request: iam_policy.GetIamPolicyRequest = None,
+ *,
+ resource: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> policy.Policy:
+ r"""Gets the access control policy for a resource.
+ Returns an empty policy if the resource exists and does
+ not have a policy set.
+
+ Args:
+ request (:class:`~.iam_policy.GetIamPolicyRequest`):
+ The request object. Request message for `GetIamPolicy`
+ method.
+ resource (:class:`str`):
+ REQUIRED: The resource for which the
+ policy is being requested. See the
+ operation documentation for the
+ appropriate value for this field.
+ This corresponds to the ``resource`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.policy.Policy:
+ Defines an Identity and Access Management (IAM) policy.
+ It is used to specify access control policies for Cloud
+ Platform resources.
+
+ A ``Policy`` is a collection of ``bindings``. A
+ ``binding`` binds one or more ``members`` to a single
+ ``role``. Members can be user accounts, service
+ accounts, Google groups, and domains (such as G Suite).
+ A ``role`` is a named list of permissions (defined by
+ IAM or configured by users). A ``binding`` can
+ optionally specify a ``condition``, which is a logic
+ expression that further constrains the role binding
+ based on attributes about the request and/or target
+ resource.
+
+ **JSON Example**
+
+ ::
+
+ {
+ "bindings": [
+ {
+ "role": "roles/resourcemanager.organizationAdmin",
+ "members": [
+ "user:mike@example.com",
+ "group:admins@example.com",
+ "domain:google.com",
+ "serviceAccount:my-project-id@appspot.gserviceaccount.com"
+ ]
+ },
+ {
+ "role": "roles/resourcemanager.organizationViewer",
+ "members": ["user:eve@example.com"],
+ "condition": {
+ "title": "expirable access",
+ "description": "Does not grant access after Sep 2020",
+ "expression": "request.time <
+ timestamp('2020-10-01T00:00:00.000Z')",
+ }
+ }
+ ]
+ }
+
+ **YAML Example**
+
+ ::
+
+ bindings:
+ - members:
+ - user:mike@example.com
+ - group:admins@example.com
+ - domain:google.com
+ - serviceAccount:my-project-id@appspot.gserviceaccount.com
+ role: roles/resourcemanager.organizationAdmin
+ - members:
+ - user:eve@example.com
+ role: roles/resourcemanager.organizationViewer
+ condition:
+ title: expirable access
+ description: Does not grant access after Sep 2020
+ expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
+
+ For a description of IAM and its features, see the `IAM
+ developer's
+ guide
`__.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([resource]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # The request isn't a proto-plus wrapped type,
+ # so it must be constructed via keyword expansion.
+ if isinstance(request, dict):
+ request = iam_policy.GetIamPolicyRequest(**request)
+
+ elif not request:
+ request = iam_policy.GetIamPolicyRequest()
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if resource is not None:
+ request.resource = resource
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_iam_policy,
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def set_iam_policy(
+ self,
+ request: iam_policy.SetIamPolicyRequest = None,
+ *,
+ resource: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> policy.Policy:
+ r"""Sets the access control policy on the specified resource.
+ Replaces any existing policy.
+
+ Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and
+ ``PERMISSION_DENIED`` errors.
+
+ Args:
+ request (:class:`~.iam_policy.SetIamPolicyRequest`):
+ The request object. Request message for `SetIamPolicy`
+ method.
+ resource (:class:`str`):
+ REQUIRED: The resource for which the
+ policy is being specified. See the
+ operation documentation for the
+ appropriate value for this field.
+ This corresponds to the ``resource`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.policy.Policy:
+ Defines an Identity and Access Management (IAM) policy.
+ It is used to specify access control policies for Cloud
+ Platform resources.
+
+ A ``Policy`` is a collection of ``bindings``. A
+ ``binding`` binds one or more ``members`` to a single
+ ``role``. Members can be user accounts, service
+ accounts, Google groups, and domains (such as G Suite).
+ A ``role`` is a named list of permissions (defined by
+ IAM or configured by users). A ``binding`` can
+ optionally specify a ``condition``, which is a logic
+ expression that further constrains the role binding
+ based on attributes about the request and/or target
+ resource.
+
+ **JSON Example**
+
+ ::
+
+ {
+ "bindings": [
+ {
+ "role": "roles/resourcemanager.organizationAdmin",
+ "members": [
+ "user:mike@example.com",
+ "group:admins@example.com",
+ "domain:google.com",
+ "serviceAccount:my-project-id@appspot.gserviceaccount.com"
+ ]
+ },
+ {
+ "role": "roles/resourcemanager.organizationViewer",
+ "members": ["user:eve@example.com"],
+ "condition": {
+ "title": "expirable access",
+ "description": "Does not grant access after Sep 2020",
+ "expression": "request.time <
+ timestamp('2020-10-01T00:00:00.000Z')",
+ }
+ }
+ ]
+ }
+
+ **YAML Example**
+
+ ::
+
+ bindings:
+ - members:
+ - user:mike@example.com
+ - group:admins@example.com
+ - domain:google.com
+ - serviceAccount:my-project-id@appspot.gserviceaccount.com
+ role: roles/resourcemanager.organizationAdmin
+ - members:
+ - user:eve@example.com
+ role: roles/resourcemanager.organizationViewer
+ condition:
+ title: expirable access
+ description: Does not grant access after Sep 2020
+ expression: request.time < timestamp('2020-10-01T00:00:00.000Z')
+
+ For a description of IAM and its features, see the `IAM
+ developer's
+ guide `__.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([resource]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # The request isn't a proto-plus wrapped type,
+ # so it must be constructed via keyword expansion.
+ if isinstance(request, dict):
+ request = iam_policy.SetIamPolicyRequest(**request)
+
+ elif not request:
+ request = iam_policy.SetIamPolicyRequest()
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if resource is not None:
+ request.resource = resource
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.set_iam_policy,
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def test_iam_permissions(
+ self,
+ request: iam_policy.TestIamPermissionsRequest = None,
+ *,
+ resource: str = None,
+ permissions: Sequence[str] = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> iam_policy.TestIamPermissionsResponse:
+ r"""Returns permissions that a caller has on the specified resource.
+ If the resource does not exist, this will return an empty set of
+ permissions, not a ``NOT_FOUND`` error.
+
+ Note: This operation is designed to be used for building
+ permission-aware UIs and command-line tools, not for
+ authorization checking. This operation may "fail open" without
+ warning.
+
+ Args:
+ request (:class:`~.iam_policy.TestIamPermissionsRequest`):
+ The request object. Request message for
+ `TestIamPermissions` method.
+ resource (:class:`str`):
+ REQUIRED: The resource for which the
+ policy detail is being requested. See
+ the operation documentation for the
+ appropriate value for this field.
+ This corresponds to the ``resource`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ permissions (:class:`Sequence[str]`):
+ The set of permissions to check for the ``resource``.
+ Permissions with wildcards (such as '*' or 'storage.*')
+ are not allowed. For more information see `IAM
+ Overview `__.
+ This corresponds to the ``permissions`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.iam_policy.TestIamPermissionsResponse:
+ Response message for ``TestIamPermissions`` method.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ if request is not None and any([resource, permissions]):
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # The request isn't a proto-plus wrapped type,
+ # so it must be constructed via keyword expansion.
+ if isinstance(request, dict):
+ request = iam_policy.TestIamPermissionsRequest(**request)
+
+ elif not request:
+ request = iam_policy.TestIamPermissionsRequest()
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if resource is not None:
+ request.resource = resource
+
+ if permissions:
+ request.permissions.extend(permissions)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.test_iam_permissions,
+ default_timeout=60.0,
+ client_info=_client_info,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+
+try:
+ _client_info = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-bigquery-connection",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ _client_info = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("ConnectionServiceAsyncClient",)
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/client.py
index d3b7ef0578b3..28f6edeafad7 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/client.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/client.py
@@ -16,6 +16,7 @@
#
from collections import OrderedDict
+import os
import re
from typing import Callable, Dict, Sequence, Tuple, Type, Union
import pkg_resources
@@ -25,6 +26,8 @@
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.bigquery.connection_v1.services.connection_service import pagers
@@ -36,6 +39,7 @@
from .transports.base import ConnectionServiceTransport
from .transports.grpc import ConnectionServiceGrpcTransport
+from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
class ConnectionServiceClientMeta(type):
@@ -50,8 +54,11 @@ class ConnectionServiceClientMeta(type):
OrderedDict()
) # type: Dict[str, Type[ConnectionServiceTransport]]
_transport_registry["grpc"] = ConnectionServiceGrpcTransport
+ _transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport
- def get_transport_class(cls, label: str = None) -> Type[ConnectionServiceTransport]:
+ def get_transport_class(
+ cls, label: str = None,
+ ) -> Type[ConnectionServiceTransport]:
"""Return an appropriate transport class.
Args:
@@ -128,10 +135,10 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
@staticmethod
- def connection_path(project: str, location: str, connection: str) -> str:
+ def connection_path(project: str, location: str, connection: str,) -> str:
"""Return a fully-qualified connection string."""
return "projects/{project}/locations/{location}/connections/{connection}".format(
- project=project, location=location, connection=connection
+ project=project, location=location, connection=connection,
)
@staticmethod
@@ -161,66 +168,76 @@ def __init__(
transport (Union[str, ~.ConnectionServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client.
- (2) If ``transport`` argument is None, ``client_options`` can be
- used to create a mutual TLS transport. If ``client_cert_source``
- is provided, mutual TLS transport will be created with the given
- ``api_endpoint`` or the default mTLS endpoint, and the client
- SSL credentials obtained from ``client_cert_source``.
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint, this is the default value for
+ the environment variable) and "auto" (auto switch to the default
+ mTLS endpoint if client SSL credentials is present). However,
+ the ``api_endpoint`` property takes precedence if provided.
+ (2) The ``client_cert_source`` property is used to provide client
+ SSL credentials for mutual TLS transport. If not provided, the
+ default SSL credentials will be used if present.
Raises:
- google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = ClientOptions.from_dict(client_options)
+ if client_options is None:
+ client_options = ClientOptions.ClientOptions()
+
+ if client_options.api_endpoint is None:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ if use_mtls_env == "never":
+ client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ has_client_cert_source = (
+ client_options.client_cert_source is not None
+ or mtls.has_default_client_cert_source()
+ )
+ client_options.api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT
+ if has_client_cert_source
+ else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ )
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, ConnectionServiceTransport):
# transport is a ConnectionServiceTransport instance.
- if credentials:
+ if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
self._transport = transport
- elif client_options is None or (
- client_options.api_endpoint is None
- and client_options.client_cert_source is None
- ):
- # Don't trigger mTLS if we get an empty ClientOptions.
+ else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
- credentials=credentials, host=self.DEFAULT_ENDPOINT
- )
- else:
- # We have a non-empty ClientOptions. If client_cert_source is
- # provided, trigger mTLS with user provided endpoint or the default
- # mTLS endpoint.
- if client_options.client_cert_source:
- api_mtls_endpoint = (
- client_options.api_endpoint
- if client_options.api_endpoint
- else self.DEFAULT_MTLS_ENDPOINT
- )
- else:
- api_mtls_endpoint = None
-
- api_endpoint = (
- client_options.api_endpoint
- if client_options.api_endpoint
- else self.DEFAULT_ENDPOINT
- )
-
- self._transport = ConnectionServiceGrpcTransport(
credentials=credentials,
- host=api_endpoint,
- api_mtls_endpoint=api_mtls_endpoint,
+ credentials_file=client_options.credentials_file,
+ host=client_options.api_endpoint,
+ scopes=client_options.scopes,
+ api_mtls_endpoint=client_options.api_endpoint,
client_cert_source=client_options.client_cert_source,
+ quota_project_id=client_options.quota_project_id,
)
def create_connection(
@@ -274,34 +291,42 @@ def create_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, connection, connection_id]):
+ has_flattened_params = any([parent, connection, connection_id])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = gcbc_connection.CreateConnectionRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a gcbc_connection.CreateConnectionRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, gcbc_connection.CreateConnectionRequest):
+ request = gcbc_connection.CreateConnectionRequest(request)
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- if parent is not None:
- request.parent = parent
- if connection is not None:
- request.connection = connection
- if connection_id is not None:
- request.connection_id = connection_id
+ if parent is not None:
+ request.parent = parent
+ if connection is not None:
+ request.connection = connection
+ if connection_id is not None:
+ request.connection_id = connection_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.create_connection,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.create_connection]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -344,27 +369,29 @@ def get_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = connection.GetConnectionRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a connection.GetConnectionRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, connection.GetConnectionRequest):
+ request = connection.GetConnectionRequest(request)
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- if name is not None:
- request.name = name
+ if name is not None:
+ request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_connection,
- default_timeout=None,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_connection]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -373,7 +400,7 @@ def get_connection(
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -382,6 +409,7 @@ def list_connections(
self,
request: connection.ListConnectionsRequest = None,
*,
+ parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -392,6 +420,12 @@ def list_connections(
request (:class:`~.connection.ListConnectionsRequest`):
The request object. The request for
[ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
+ parent (:class:`str`):
+ Required. Parent resource name. Must be in the form:
+ ``projects/{project_id}/locations/{location_id}``
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -409,16 +443,31 @@ def list_connections(
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a connection.ListConnectionsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, connection.ListConnectionsRequest):
+ request = connection.ListConnectionsRequest(request)
- request = connection.ListConnectionsRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_connections,
- default_timeout=None,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_connections]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -427,12 +476,12 @@ def list_connections(
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListConnectionsPager(
- method=rpc, request=request, response=response
+ method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
@@ -492,34 +541,42 @@ def update_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, connection, update_mask]):
+ has_flattened_params = any([name, connection, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = gcbc_connection.UpdateConnectionRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a gcbc_connection.UpdateConnectionRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, gcbc_connection.UpdateConnectionRequest):
+ request = gcbc_connection.UpdateConnectionRequest(request)
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- if name is not None:
- request.name = name
- if connection is not None:
- request.connection = connection
- if update_mask is not None:
- request.update_mask = update_mask
+ if name is not None:
+ request.name = name
+ if connection is not None:
+ request.connection = connection
+ if update_mask is not None:
+ request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.update_connection,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.update_connection]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -555,30 +612,40 @@ def delete_connection(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = connection.DeleteConnectionRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a connection.DeleteConnectionRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, connection.DeleteConnectionRequest):
+ request = connection.DeleteConnectionRequest(request)
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- if name is not None:
- request.name = name
+ if name is not None:
+ request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.delete_connection,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.delete_connection]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
- rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
def get_iam_policy(
self,
@@ -684,7 +751,8 @@ def get_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -698,22 +766,24 @@ def get_iam_policy(
elif not request:
request = iam_policy.GetIamPolicyRequest()
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- if resource is not None:
- request.resource = resource
+ if resource is not None:
+ request.resource = resource
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_iam_policy,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.get_iam_policy]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -730,8 +800,8 @@ def set_iam_policy(
r"""Sets the access control policy on the specified resource.
Replaces any existing policy.
- Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and
- PERMISSION_DENIED
+ Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and
+ ``PERMISSION_DENIED`` errors.
Args:
request (:class:`~.iam_policy.SetIamPolicyRequest`):
@@ -824,7 +894,8 @@ def set_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -838,22 +909,24 @@ def set_iam_policy(
elif not request:
request = iam_policy.SetIamPolicyRequest()
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- if resource is not None:
- request.resource = resource
+ if resource is not None:
+ request.resource = resource
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.set_iam_policy,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.set_iam_policy]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -870,7 +943,7 @@ def test_iam_permissions(
) -> iam_policy.TestIamPermissionsResponse:
r"""Returns permissions that a caller has on the specified resource.
If the resource does not exist, this will return an empty set of
- permissions, not a NOT_FOUND error.
+ permissions, not a ``NOT_FOUND`` error.
Note: This operation is designed to be used for building
permission-aware UIs and command-line tools, not for
@@ -911,7 +984,8 @@ def test_iam_permissions(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource, permissions]):
+ has_flattened_params = any([resource, permissions])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -925,25 +999,27 @@ def test_iam_permissions(
elif not request:
request = iam_policy.TestIamPermissionsRequest()
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- if resource is not None:
- request.resource = resource
+ if resource is not None:
+ request.resource = resource
- if permissions:
- request.permissions.extend(permissions)
+ if permissions:
+ request.permissions.extend(permissions)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.test_iam_permissions,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -952,8 +1028,8 @@ def test_iam_permissions(
try:
_client_info = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
- "google-cloud-bigquery-connection"
- ).version
+ "google-cloud-bigquery-connection",
+ ).version,
)
except pkg_resources.DistributionNotFound:
_client_info = gapic_v1.client_info.ClientInfo()
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/pagers.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/pagers.py
index 7a23de511a48..e6a2948372eb 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/pagers.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/pagers.py
@@ -15,7 +15,7 @@
# limitations under the License.
#
-from typing import Any, Callable, Iterable
+from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
from google.cloud.bigquery.connection_v1.types import connection
@@ -40,11 +40,11 @@ class ListConnectionsPager:
def __init__(
self,
- method: Callable[
- [connection.ListConnectionsRequest], connection.ListConnectionsResponse
- ],
+ method: Callable[..., connection.ListConnectionsResponse],
request: connection.ListConnectionsRequest,
response: connection.ListConnectionsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
@@ -55,10 +55,13 @@ def __init__(
The initial request object.
response (:class:`~.connection.ListConnectionsResponse`):
The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
"""
self._method = method
self._request = connection.ListConnectionsRequest(request)
self._response = response
+ self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@@ -68,7 +71,7 @@ def pages(self) -> Iterable[connection.ListConnectionsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request)
+ self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[connection.Connection]:
@@ -77,3 +80,69 @@ def __iter__(self) -> Iterable[connection.Connection]:
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListConnectionsAsyncPager:
+ """A pager for iterating through ``list_connections`` requests.
+
+ This class thinly wraps an initial
+ :class:`~.connection.ListConnectionsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``connections`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListConnections`` requests and continue to iterate
+ through the ``connections`` field on the
+ corresponding responses.
+
+ All the usual :class:`~.connection.ListConnectionsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[connection.ListConnectionsResponse]],
+ request: connection.ListConnectionsRequest,
+ response: connection.ListConnectionsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (:class:`~.connection.ListConnectionsRequest`):
+ The initial request object.
+ response (:class:`~.connection.ListConnectionsResponse`):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = connection.ListConnectionsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[connection.ListConnectionsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[connection.Connection]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.connections:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/__init__.py
index 809a51e65914..e1321d3b9164 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/__init__.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/__init__.py
@@ -20,11 +20,17 @@
from .base import ConnectionServiceTransport
from .grpc import ConnectionServiceGrpcTransport
+from .grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[ConnectionServiceTransport]]
_transport_registry["grpc"] = ConnectionServiceGrpcTransport
+_transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport
-__all__ = ("ConnectionServiceTransport", "ConnectionServiceGrpcTransport")
+__all__ = (
+ "ConnectionServiceTransport",
+ "ConnectionServiceGrpcTransport",
+ "ConnectionServiceGrpcAsyncIOTransport",
+)
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py
index b04d4b920c8e..c0e71595aa97 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/base.py
@@ -17,8 +17,12 @@
import abc
import typing
+import pkg_resources
from google import auth
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.cloud.bigquery.connection_v1.types import connection
@@ -28,7 +32,17 @@
from google.protobuf import empty_pb2 as empty # type: ignore
-class ConnectionServiceTransport(metaclass=abc.ABCMeta):
+try:
+ _client_info = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-bigquery-connection",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ _client_info = gapic_v1.client_info.ClientInfo()
+
+
+class ConnectionServiceTransport(abc.ABC):
"""Abstract transport class for ConnectionService."""
AUTH_SCOPES = (
@@ -41,6 +55,10 @@ def __init__(
*,
host: str = "bigqueryconnection.googleapis.com",
credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ **kwargs,
) -> None:
"""Instantiate the transport.
@@ -51,6 +69,12 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -59,67 +83,169 @@ def __init__(
# If no credentials are provided, then determine the appropriate
# defaults.
- if credentials is None:
- credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=scopes, quota_project_id=quota_project_id
+ )
# Save the credentials.
self._credentials = credentials
+ # Lifted into its own function so it can be stubbed out during tests.
+ self._prep_wrapped_messages()
+
+ def _prep_wrapped_messages(self):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.create_connection: gapic_v1.method.wrap_method(
+ self.create_connection, default_timeout=60.0, client_info=_client_info,
+ ),
+ self.get_connection: gapic_v1.method.wrap_method(
+ self.get_connection,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=_client_info,
+ ),
+ self.list_connections: gapic_v1.method.wrap_method(
+ self.list_connections,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=_client_info,
+ ),
+ self.update_connection: gapic_v1.method.wrap_method(
+ self.update_connection, default_timeout=60.0, client_info=_client_info,
+ ),
+ self.delete_connection: gapic_v1.method.wrap_method(
+ self.delete_connection,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.ServiceUnavailable, exceptions.DeadlineExceeded,
+ ),
+ ),
+ default_timeout=60.0,
+ client_info=_client_info,
+ ),
+ self.get_iam_policy: gapic_v1.method.wrap_method(
+ self.get_iam_policy, default_timeout=60.0, client_info=_client_info,
+ ),
+ self.set_iam_policy: gapic_v1.method.wrap_method(
+ self.set_iam_policy, default_timeout=60.0, client_info=_client_info,
+ ),
+ self.test_iam_permissions: gapic_v1.method.wrap_method(
+ self.test_iam_permissions,
+ default_timeout=60.0,
+ client_info=_client_info,
+ ),
+ }
+
@property
def create_connection(
- self
+ self,
) -> typing.Callable[
- [gcbc_connection.CreateConnectionRequest], gcbc_connection.Connection
+ [gcbc_connection.CreateConnectionRequest],
+ typing.Union[
+ gcbc_connection.Connection, typing.Awaitable[gcbc_connection.Connection]
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def get_connection(
- self
- ) -> typing.Callable[[connection.GetConnectionRequest], connection.Connection]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [connection.GetConnectionRequest],
+ typing.Union[connection.Connection, typing.Awaitable[connection.Connection]],
+ ]:
+ raise NotImplementedError()
@property
def list_connections(
- self
+ self,
) -> typing.Callable[
- [connection.ListConnectionsRequest], connection.ListConnectionsResponse
+ [connection.ListConnectionsRequest],
+ typing.Union[
+ connection.ListConnectionsResponse,
+ typing.Awaitable[connection.ListConnectionsResponse],
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def update_connection(
- self
+ self,
) -> typing.Callable[
- [gcbc_connection.UpdateConnectionRequest], gcbc_connection.Connection
+ [gcbc_connection.UpdateConnectionRequest],
+ typing.Union[
+ gcbc_connection.Connection, typing.Awaitable[gcbc_connection.Connection]
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def delete_connection(
- self
- ) -> typing.Callable[[connection.DeleteConnectionRequest], empty.Empty]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [connection.DeleteConnectionRequest],
+ typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ ]:
+ raise NotImplementedError()
@property
def get_iam_policy(
- self
- ) -> typing.Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [iam_policy.GetIamPolicyRequest],
+ typing.Union[policy.Policy, typing.Awaitable[policy.Policy]],
+ ]:
+ raise NotImplementedError()
@property
def set_iam_policy(
- self
- ) -> typing.Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [iam_policy.SetIamPolicyRequest],
+ typing.Union[policy.Policy, typing.Awaitable[policy.Policy]],
+ ]:
+ raise NotImplementedError()
@property
def test_iam_permissions(
- self
+ self,
) -> typing.Callable[
- [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse
+ [iam_policy.TestIamPermissionsRequest],
+ typing.Union[
+ iam_policy.TestIamPermissionsResponse,
+ typing.Awaitable[iam_policy.TestIamPermissionsResponse],
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
__all__ = ("ConnectionServiceTransport",)
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py
index ed4bfc70f797..8c1a3f2b6fb7 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc.py
@@ -15,9 +15,10 @@
# limitations under the License.
#
-from typing import Callable, Dict, Tuple
+from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -46,14 +47,19 @@ class ConnectionServiceGrpcTransport(ConnectionServiceTransport):
top of HTTP/2); the ``grpcio`` package must be installed.
"""
+ _stubs: Dict[str, Callable]
+
def __init__(
self,
*,
host: str = "bigqueryconnection.googleapis.com",
credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
- client_cert_source: Callable[[], Tuple[bytes, bytes]] = None
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id: Optional[str] = None
) -> None:
"""Instantiate the transport.
@@ -65,6 +71,11 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
@@ -75,10 +86,14 @@ def __init__(
callback to provide client SSL certificate bytes and private key
bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
is None.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
Raises:
- google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
if channel:
# Sanity check: Ensure that channel and credentials are not both
@@ -94,6 +109,11 @@ def __init__(
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -105,22 +125,34 @@ def __init__(
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
- self._grpc_channel = grpc_helpers.create_channel(
+ self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
+ credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
- scopes=self.AUTH_SCOPES,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
)
- # Run the base constructor.
- super().__init__(host=host, credentials=credentials)
self._stubs = {} # type: Dict[str, Callable]
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
@classmethod
def create_channel(
cls,
host: str = "bigqueryconnection.googleapis.com",
credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
**kwargs
) -> grpc.Channel:
"""Create and return a gRPC channel object.
@@ -131,13 +163,31 @@ def create_channel(
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
+ scopes = scopes or cls.AUTH_SCOPES
return grpc_helpers.create_channel(
- host, credentials=credentials, scopes=cls.AUTH_SCOPES, **kwargs
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs
)
@property
@@ -151,7 +201,7 @@ def grpc_channel(self) -> grpc.Channel:
# have one.
if not hasattr(self, "_grpc_channel"):
self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials
+ self._host, credentials=self._credentials,
)
# Return the channel from cache.
@@ -159,7 +209,7 @@ def grpc_channel(self) -> grpc.Channel:
@property
def create_connection(
- self
+ self,
) -> Callable[
[gcbc_connection.CreateConnectionRequest], gcbc_connection.Connection
]:
@@ -187,7 +237,7 @@ def create_connection(
@property
def get_connection(
- self
+ self,
) -> Callable[[connection.GetConnectionRequest], connection.Connection]:
r"""Return a callable for the get connection method over gRPC.
@@ -213,7 +263,7 @@ def get_connection(
@property
def list_connections(
- self
+ self,
) -> Callable[
[connection.ListConnectionsRequest], connection.ListConnectionsResponse
]:
@@ -241,7 +291,7 @@ def list_connections(
@property
def update_connection(
- self
+ self,
) -> Callable[
[gcbc_connection.UpdateConnectionRequest], gcbc_connection.Connection
]:
@@ -271,7 +321,7 @@ def update_connection(
@property
def delete_connection(
- self
+ self,
) -> Callable[[connection.DeleteConnectionRequest], empty.Empty]:
r"""Return a callable for the delete connection method over gRPC.
@@ -297,7 +347,7 @@ def delete_connection(
@property
def get_iam_policy(
- self
+ self,
) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]:
r"""Return a callable for the get iam policy method over gRPC.
@@ -325,15 +375,15 @@ def get_iam_policy(
@property
def set_iam_policy(
- self
+ self,
) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the access control policy on the specified resource.
Replaces any existing policy.
- Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and
- PERMISSION_DENIED
+ Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and
+ ``PERMISSION_DENIED`` errors.
Returns:
Callable[[~.SetIamPolicyRequest],
@@ -355,7 +405,7 @@ def set_iam_policy(
@property
def test_iam_permissions(
- self
+ self,
) -> Callable[
[iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse
]:
@@ -363,7 +413,7 @@ def test_iam_permissions(
Returns permissions that a caller has on the specified resource.
If the resource does not exist, this will return an empty set of
- permissions, not a NOT_FOUND error.
+ permissions, not a ``NOT_FOUND`` error.
Note: This operation is designed to be used for building
permission-aware UIs and command-line tools, not for
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..342bdc9f1cf6
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/services/connection_service/transports/grpc_asyncio.py
@@ -0,0 +1,437 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import grpc_helpers_async # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.bigquery.connection_v1.types import connection
+from google.cloud.bigquery.connection_v1.types import connection as gcbc_connection
+from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
+from google.iam.v1 import policy_pb2 as policy # type: ignore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .base import ConnectionServiceTransport
+from .grpc import ConnectionServiceGrpcTransport
+
+
+class ConnectionServiceGrpcAsyncIOTransport(ConnectionServiceTransport):
+ """gRPC AsyncIO backend transport for ConnectionService.
+
+ Manages external data source connections and credentials.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "bigqueryconnection.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ address (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "bigqueryconnection.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id=None,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
+ provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
+ callback to provide client SSL certificate bytes and private key
+ bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
+ is None.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ elif api_mtls_endpoint:
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ self._stubs = {}
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Sanity check: Only create a new channel if we do not already
+ # have one.
+ if not hasattr(self, "_grpc_channel"):
+ self._grpc_channel = self.create_channel(
+ self._host, credentials=self._credentials,
+ )
+
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def create_connection(
+ self,
+ ) -> Callable[
+ [gcbc_connection.CreateConnectionRequest], Awaitable[gcbc_connection.Connection]
+ ]:
+ r"""Return a callable for the create connection method over gRPC.
+
+ Creates a new connection.
+
+ Returns:
+ Callable[[~.CreateConnectionRequest],
+ Awaitable[~.Connection]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_connection" not in self._stubs:
+ self._stubs["create_connection"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/CreateConnection",
+ request_serializer=gcbc_connection.CreateConnectionRequest.serialize,
+ response_deserializer=gcbc_connection.Connection.deserialize,
+ )
+ return self._stubs["create_connection"]
+
+ @property
+ def get_connection(
+ self,
+ ) -> Callable[[connection.GetConnectionRequest], Awaitable[connection.Connection]]:
+ r"""Return a callable for the get connection method over gRPC.
+
+ Returns specified connection.
+
+ Returns:
+ Callable[[~.GetConnectionRequest],
+ Awaitable[~.Connection]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_connection" not in self._stubs:
+ self._stubs["get_connection"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/GetConnection",
+ request_serializer=connection.GetConnectionRequest.serialize,
+ response_deserializer=connection.Connection.deserialize,
+ )
+ return self._stubs["get_connection"]
+
+ @property
+ def list_connections(
+ self,
+ ) -> Callable[
+ [connection.ListConnectionsRequest],
+ Awaitable[connection.ListConnectionsResponse],
+ ]:
+ r"""Return a callable for the list connections method over gRPC.
+
+ Returns a list of connections in the given project.
+
+ Returns:
+ Callable[[~.ListConnectionsRequest],
+ Awaitable[~.ListConnectionsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_connections" not in self._stubs:
+ self._stubs["list_connections"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/ListConnections",
+ request_serializer=connection.ListConnectionsRequest.serialize,
+ response_deserializer=connection.ListConnectionsResponse.deserialize,
+ )
+ return self._stubs["list_connections"]
+
+ @property
+ def update_connection(
+ self,
+ ) -> Callable[
+ [gcbc_connection.UpdateConnectionRequest], Awaitable[gcbc_connection.Connection]
+ ]:
+ r"""Return a callable for the update connection method over gRPC.
+
+ Updates the specified connection. For security
+ reasons, also resets credential if connection properties
+ are in the update field mask.
+
+ Returns:
+ Callable[[~.UpdateConnectionRequest],
+ Awaitable[~.Connection]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_connection" not in self._stubs:
+ self._stubs["update_connection"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/UpdateConnection",
+ request_serializer=gcbc_connection.UpdateConnectionRequest.serialize,
+ response_deserializer=gcbc_connection.Connection.deserialize,
+ )
+ return self._stubs["update_connection"]
+
+ @property
+ def delete_connection(
+ self,
+ ) -> Callable[[connection.DeleteConnectionRequest], Awaitable[empty.Empty]]:
+ r"""Return a callable for the delete connection method over gRPC.
+
+ Deletes connection and associated credential.
+
+ Returns:
+ Callable[[~.DeleteConnectionRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_connection" not in self._stubs:
+ self._stubs["delete_connection"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/DeleteConnection",
+ request_serializer=connection.DeleteConnectionRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["delete_connection"]
+
+ @property
+ def get_iam_policy(
+ self,
+ ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]:
+ r"""Return a callable for the get iam policy method over gRPC.
+
+ Gets the access control policy for a resource.
+ Returns an empty policy if the resource exists and does
+ not have a policy set.
+
+ Returns:
+ Callable[[~.GetIamPolicyRequest],
+ Awaitable[~.Policy]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_iam_policy" not in self._stubs:
+ self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/GetIamPolicy",
+ request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString,
+ response_deserializer=policy.Policy.FromString,
+ )
+ return self._stubs["get_iam_policy"]
+
+ @property
+ def set_iam_policy(
+ self,
+ ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]:
+ r"""Return a callable for the set iam policy method over gRPC.
+
+ Sets the access control policy on the specified resource.
+ Replaces any existing policy.
+
+ Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and
+ ``PERMISSION_DENIED`` errors.
+
+ Returns:
+ Callable[[~.SetIamPolicyRequest],
+ Awaitable[~.Policy]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "set_iam_policy" not in self._stubs:
+ self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/SetIamPolicy",
+ request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString,
+ response_deserializer=policy.Policy.FromString,
+ )
+ return self._stubs["set_iam_policy"]
+
+ @property
+ def test_iam_permissions(
+ self,
+ ) -> Callable[
+ [iam_policy.TestIamPermissionsRequest],
+ Awaitable[iam_policy.TestIamPermissionsResponse],
+ ]:
+ r"""Return a callable for the test iam permissions method over gRPC.
+
+ Returns permissions that a caller has on the specified resource.
+ If the resource does not exist, this will return an empty set of
+ permissions, not a ``NOT_FOUND`` error.
+
+ Note: This operation is designed to be used for building
+ permission-aware UIs and command-line tools, not for
+ authorization checking. This operation may "fail open" without
+ warning.
+
+ Returns:
+ Callable[[~.TestIamPermissionsRequest],
+ Awaitable[~.TestIamPermissionsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "test_iam_permissions" not in self._stubs:
+ self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary(
+ "/google.cloud.bigquery.connection.v1.ConnectionService/TestIamPermissions",
+ request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString,
+ response_deserializer=iam_policy.TestIamPermissionsResponse.FromString,
+ )
+ return self._stubs["test_iam_permissions"]
+
+
+__all__ = ("ConnectionServiceGrpcAsyncIOTransport",)
diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/types/connection.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/types/connection.py
index 06aab1e84755..35c8c3963eee 100644
--- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/types/connection.py
+++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery/connection_v1/types/connection.py
@@ -53,8 +53,10 @@ class CreateConnectionRequest(proto.Message):
"""
parent = proto.Field(proto.STRING, number=1)
+
connection_id = proto.Field(proto.STRING, number=2)
- connection = proto.Field(proto.MESSAGE, number=3, message="Connection")
+
+ connection = proto.Field(proto.MESSAGE, number=3, message="Connection",)
class GetConnectionRequest(proto.Message):
@@ -85,7 +87,9 @@ class ListConnectionsRequest(proto.Message):
"""
parent = proto.Field(proto.STRING, number=1)
+
page_size = proto.Field(proto.INT32, number=4)
+
page_token = proto.Field(proto.STRING, number=3)
@@ -105,7 +109,8 @@ def raw_page(self):
return self
next_page_token = proto.Field(proto.STRING, number=1)
- connections = proto.RepeatedField(proto.MESSAGE, number=2, message="Connection")
+
+ connections = proto.RepeatedField(proto.MESSAGE, number=2, message="Connection",)
class UpdateConnectionRequest(proto.Message):
@@ -125,8 +130,10 @@ class UpdateConnectionRequest(proto.Message):
"""
name = proto.Field(proto.STRING, number=1)
- connection = proto.Field(proto.MESSAGE, number=2, message="Connection")
- update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask)
+
+ connection = proto.Field(proto.MESSAGE, number=2, message="Connection",)
+
+ update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,)
class DeleteConnectionRequest(proto.Message):
@@ -168,11 +175,19 @@ class Connection(proto.Message):
"""
name = proto.Field(proto.STRING, number=1)
+
friendly_name = proto.Field(proto.STRING, number=2)
+
description = proto.Field(proto.STRING, number=3)
- cloud_sql = proto.Field(proto.MESSAGE, number=4, message="CloudSqlProperties")
+
+ cloud_sql = proto.Field(
+ proto.MESSAGE, number=4, oneof="properties", message="CloudSqlProperties",
+ )
+
creation_time = proto.Field(proto.INT64, number=5)
+
last_modified_time = proto.Field(proto.INT64, number=6)
+
has_credential = proto.Field(proto.BOOL, number=7)
@@ -198,9 +213,12 @@ class DatabaseType(proto.Enum):
MYSQL = 2
instance_id = proto.Field(proto.STRING, number=1)
+
database = proto.Field(proto.STRING, number=2)
- type = proto.Field(proto.ENUM, number=3, enum=DatabaseType)
- credential = proto.Field(proto.MESSAGE, number=4, message="CloudSqlCredential")
+
+ type = proto.Field(proto.ENUM, number=3, enum=DatabaseType,)
+
+ credential = proto.Field(proto.MESSAGE, number=4, message="CloudSqlCredential",)
class CloudSqlCredential(proto.Message):
@@ -214,6 +232,7 @@ class CloudSqlCredential(proto.Message):
"""
username = proto.Field(proto.STRING, number=1)
+
password = proto.Field(proto.STRING, number=2)
diff --git a/packages/google-cloud-bigquery-connection/noxfile.py b/packages/google-cloud-bigquery-connection/noxfile.py
index a78102e66a27..7269a238d2c1 100644
--- a/packages/google-cloud-bigquery-connection/noxfile.py
+++ b/packages/google-cloud-bigquery-connection/noxfile.py
@@ -23,11 +23,11 @@
import nox
-BLACK_VERSION = "black==19.3b0"
+BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-DEFAULT_PYTHON_VERSION = "3.7"
-SYSTEM_TEST_PYTHON_VERSIONS = ["3.7"]
+DEFAULT_PYTHON_VERSION = "3.8"
+SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
@@ -39,7 +39,9 @@ def lint(session):
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
- session.run("black", "--check", *BLACK_PATHS)
+ session.run(
+ "black", "--check", *BLACK_PATHS,
+ )
session.run("flake8", "google", "tests")
@@ -54,7 +56,9 @@ def blacken(session):
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install(BLACK_VERSION)
- session.run("black", *BLACK_PATHS)
+ session.run(
+ "black", *BLACK_PATHS,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -66,6 +70,8 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
+ session.install("asyncmock", "pytest-asyncio")
+
session.install("mock", "pytest", "pytest-cov")
session.install("-e", ".")
@@ -96,6 +102,10 @@ def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
@@ -111,7 +121,9 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install("mock", "pytest", "google-cloud-testutils")
+ session.install(
+ "mock", "pytest", "google-cloud-testutils",
+ )
session.install("-e", ".")
# Run py.test against the system tests.
@@ -129,7 +141,7 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=100")
+ session.run("coverage", "report", "--show-missing", "--fail-under=99")
session.run("coverage", "erase")
@@ -154,3 +166,36 @@ def docs(session):
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
diff --git a/packages/google-cloud-bigquery-connection/scripts/fixup_connection_v1_keywords.py b/packages/google-cloud-bigquery-connection/scripts/fixup_connection_v1_keywords.py
new file mode 100644
index 000000000000..b945c3007fe3
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/scripts/fixup_connection_v1_keywords.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import os
+import libcst as cst
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+ predicate: Callable[[Any], bool],
+ iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+ """A stable, out-of-place partition."""
+ results = ([], [])
+
+ for i in iterator:
+ results[int(predicate(i))].append(i)
+
+ # Returns trueList, falseList
+ return results[1], results[0]
+
+
+class connectionCallTransformer(cst.CSTTransformer):
+ CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+ METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+ 'create_connection': ('parent', 'connection', 'connection_id', ),
+ 'delete_connection': ('name', ),
+ 'get_connection': ('name', ),
+ 'get_iam_policy': ('resource', 'options', ),
+ 'list_connections': ('parent', 'page_size', 'page_token', ),
+ 'set_iam_policy': ('resource', 'policy', ),
+ 'test_iam_permissions': ('resource', 'permissions', ),
+ 'update_connection': ('name', 'connection', 'update_mask', ),
+
+ }
+
+ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+ try:
+ key = original.func.attr.value
+ kword_params = self.METHOD_TO_PARAMS[key]
+ except (AttributeError, KeyError):
+ # Either not a method from the API or too convoluted to be sure.
+ return updated
+
+ # If the existing code is valid, keyword args come after positional args.
+ # Therefore, all positional args must map to the first parameters.
+ args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+ if any(k.keyword.value == "request" for k in kwargs):
+ # We've already fixed this file, don't fix it again.
+ return updated
+
+ kwargs, ctrl_kwargs = partition(
+ lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ kwargs
+ )
+
+ args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+ ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+ for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+ request_arg = cst.Arg(
+ value=cst.Dict([
+ cst.DictElement(
+ cst.SimpleString("'{}'".format(name)),
+ cst.Element(value=arg.value)
+ )
+ # Note: the args + kwargs looks silly, but keep in mind that
+ # the control parameters had to be stripped out, and that
+ # those could have been passed positionally or by keyword.
+ for name, arg in zip(kword_params, args + kwargs)]),
+ keyword=cst.Name("request")
+ )
+
+ return updated.with_changes(
+ args=[request_arg] + ctrl_kwargs
+ )
+
+
+def fix_files(
+ in_dir: pathlib.Path,
+ out_dir: pathlib.Path,
+ *,
+ transformer=connectionCallTransformer(),
+):
+ """Duplicate the input dir to the output dir, fixing file method calls.
+
+ Preconditions:
+ * in_dir is a real directory
+ * out_dir is a real, empty directory
+ """
+ pyfile_gen = (
+ pathlib.Path(os.path.join(root, f))
+ for root, _, files in os.walk(in_dir)
+ for f in files if os.path.splitext(f)[1] == ".py"
+ )
+
+ for fpath in pyfile_gen:
+ with open(fpath, 'r') as f:
+ src = f.read()
+
+ # Parse the code and insert method call fixes.
+ tree = cst.parse_module(src)
+ updated = tree.visit(transformer)
+
+ # Create the path and directory structure for the new file.
+ updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+ updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Generate the updated source file at the corresponding path.
+ with open(updated_path, 'w') as f:
+ f.write(updated.code)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="""Fix up source that uses the connection client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+ parameters in client method calls to keyword based parameters.
+ Cases where it WILL FAIL include
+ A) * or ** expansion in a method call.
+ B) Calls via function or method alias (includes free function calls)
+ C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+ These all constitute false negatives. The tool will also detect false
+ positives when an API method shares a name with another method.
+""")
+ parser.add_argument(
+ '-d',
+ '--input-directory',
+ required=True,
+ dest='input_dir',
+ help='the input directory to walk for python files to fix up',
+ )
+ parser.add_argument(
+ '-o',
+ '--output-directory',
+ required=True,
+ dest='output_dir',
+ help='the directory to output files fixed via un-flattening',
+ )
+ args = parser.parse_args()
+ input_dir = pathlib.Path(args.input_dir)
+ output_dir = pathlib.Path(args.output_dir)
+ if not input_dir.is_dir():
+ print(
+ f"input directory '{input_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if not output_dir.is_dir():
+ print(
+ f"output directory '{output_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if os.listdir(output_dir):
+ print(
+ f"output directory '{output_dir}' is not empty",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ fix_files(input_dir, output_dir)
diff --git a/packages/google-cloud-bigquery-connection/setup.py b/packages/google-cloud-bigquery-connection/setup.py
index 69455c0d86e8..8547507e0671 100644
--- a/packages/google-cloud-bigquery-connection/setup.py
+++ b/packages/google-cloud-bigquery-connection/setup.py
@@ -40,15 +40,12 @@
platforms="Posix; MacOS X; Windows",
include_package_data=True,
install_requires=(
- "google-auth >= 1.14.0",
- "google-api-core >= 1.17.0, < 2.0.0dev",
- "googleapis-common-protos >= 1.5.8",
- "grpcio >= 1.10.0",
- "proto-plus >= 0.4.0",
+ "google-api-core >= 1.21.0, < 2.0.0dev",
+ "proto-plus >= 1.4.0",
"grpc-google-iam-v1",
+ "libcst >= 0.2.5",
),
python_requires=">=3.6",
- setup_requires=["libcst >= 0.2.5"],
scripts=["scripts/fixup_keywords.py"],
classifiers=[
"Development Status :: 4 - Beta",
diff --git a/packages/google-cloud-bigquery-connection/synth.metadata b/packages/google-cloud-bigquery-connection/synth.metadata
index c7f0c134e8ff..021ffc0a0769 100644
--- a/packages/google-cloud-bigquery-connection/synth.metadata
+++ b/packages/google-cloud-bigquery-connection/synth.metadata
@@ -4,29 +4,21 @@
"git": {
"name": ".",
"remote": "git@github.com:googleapis/python-bigquery-connection",
- "sha": "e01632cd5afd1adb1f546d90fb2f2bc9ada14070"
- }
- },
- {
- "git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "9309ee73424d2798a2557d607dfb3e91c8a433fb",
- "internalRef": "313826599"
+ "sha": "d4ad8bbdb90ad0427d199cbc9a6db2fa2f402700"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "cb3c683e958a4b5c016bb3734436fc1cb887eb7b"
+ "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "cb3c683e958a4b5c016bb3734436fc1cb887eb7b"
+ "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3"
}
}
],
@@ -37,7 +29,7 @@
"apiName": "bigquery/connection",
"apiVersion": "v1",
"language": "python",
- "generator": "gapic-generator-python"
+ "generator": "bazel"
}
}
]
diff --git a/packages/google-cloud-bigquery-connection/synth.py b/packages/google-cloud-bigquery-connection/synth.py
index 8059c4ed4c42..d773546251ee 100644
--- a/packages/google-cloud-bigquery-connection/synth.py
+++ b/packages/google-cloud-bigquery-connection/synth.py
@@ -19,34 +19,29 @@
import synthtool.gcp as gcp
from synthtool.languages import python
-gapic = gcp.GAPICMicrogenerator()
+gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
# ----------------------------------------------------------------------------
# Generate access approval GAPIC layer
# ----------------------------------------------------------------------------
-library = gapic.py_library("bigquery/connection", "v1")
+library = gapic.py_library(
+ service="bigquery/connection",
+ version="v1",
+ bazel_target=f"//google/cloud/bigquery/connection/v1:bigquery-connection-v1-py"
+)
-s.move(library, excludes=["nox.py", "setup.py", "README.rst", "docs/index.rst"])
+s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"])
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(
- cov_level=100,
- unit_test_python_versions=["3.6", "3.7", "3.8"],
- system_test_python_versions=["3.7"],
+ cov_level=99,
+ microgenerator=True,
)
s.move(
templated_files, excludes=[".coveragerc"]
) # the microgenerator has a good coveragerc file
-
-# Expand flake errors permitted to accomodate the Microgenerator
-# TODO: remove extra error codes once issues below are resolved
-# F401: https://github.com/googleapis/gapic-generator-python/issues/324
-# F841: local variable 'client'/'response' is assigned to but never use
-s.replace(".flake8", "ignore = .*", "ignore = E203, E266, E501, W503, F401, F841")
-
-
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/packages/google-cloud-bigquery-connection/tests/unit/connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/connection_v1/test_connection_service.py
deleted file mode 100644
index bafa298d3ccb..000000000000
--- a/packages/google-cloud-bigquery-connection/tests/unit/connection_v1/test_connection_service.py
+++ /dev/null
@@ -1,1036 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from unittest import mock
-
-import grpc
-import math
-import pytest
-
-from google import auth
-from google.api_core import client_options
-from google.api_core import grpc_helpers
-from google.auth import credentials
-from google.cloud.bigquery.connection_v1.services.connection_service import (
- ConnectionServiceClient,
-)
-from google.cloud.bigquery.connection_v1.services.connection_service import pagers
-from google.cloud.bigquery.connection_v1.services.connection_service import transports
-from google.cloud.bigquery.connection_v1.types import connection
-from google.cloud.bigquery.connection_v1.types import connection as gcbc_connection
-from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
-from google.iam.v1 import options_pb2 as options # type: ignore
-from google.iam.v1 import policy_pb2 as policy # type: ignore
-from google.oauth2 import service_account
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from google.type import expr_pb2 as expr # type: ignore
-
-
-def client_cert_source_callback():
- return b"cert bytes", b"key bytes"
-
-
-def test__get_default_mtls_endpoint():
- api_endpoint = "example.googleapis.com"
- api_mtls_endpoint = "example.mtls.googleapis.com"
- sandbox_endpoint = "example.sandbox.googleapis.com"
- sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
- non_googleapi = "api.example.com"
-
- assert ConnectionServiceClient._get_default_mtls_endpoint(None) is None
- assert (
- ConnectionServiceClient._get_default_mtls_endpoint(api_endpoint)
- == api_mtls_endpoint
- )
- assert (
- ConnectionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
- == api_mtls_endpoint
- )
- assert (
- ConnectionServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
- == sandbox_mtls_endpoint
- )
- assert (
- ConnectionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
- == sandbox_mtls_endpoint
- )
- assert (
- ConnectionServiceClient._get_default_mtls_endpoint(non_googleapi)
- == non_googleapi
- )
-
-
-def test_connection_service_client_from_service_account_file():
- creds = credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = ConnectionServiceClient.from_service_account_file(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- client = ConnectionServiceClient.from_service_account_json(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- assert client._transport._host == "bigqueryconnection.googleapis.com:443"
-
-
-def test_connection_service_client_client_options():
- # Check that if channel is provided we won't create a new one.
- with mock.patch(
- "google.cloud.bigquery.connection_v1.services.connection_service.ConnectionServiceClient.get_transport_class"
- ) as gtc:
- transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- client = ConnectionServiceClient(transport=transport)
- gtc.assert_not_called()
-
- # Check mTLS is not triggered with empty client options.
- options = client_options.ClientOptions()
- with mock.patch(
- "google.cloud.bigquery.connection_v1.services.connection_service.ConnectionServiceClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = ConnectionServiceClient(client_options=options)
- transport.assert_called_once_with(
- credentials=None, host=client.DEFAULT_ENDPOINT
- )
-
- # Check mTLS is not triggered if api_endpoint is provided but
- # client_cert_source is None.
- options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
- with mock.patch(
- "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__"
- ) as grpc_transport:
- grpc_transport.return_value = None
- client = ConnectionServiceClient(client_options=options)
- grpc_transport.assert_called_once_with(
- api_mtls_endpoint=None,
- client_cert_source=None,
- credentials=None,
- host="squid.clam.whelk",
- )
-
- # Check mTLS is triggered if client_cert_source is provided.
- options = client_options.ClientOptions(
- client_cert_source=client_cert_source_callback
- )
- with mock.patch(
- "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__"
- ) as grpc_transport:
- grpc_transport.return_value = None
- client = ConnectionServiceClient(client_options=options)
- grpc_transport.assert_called_once_with(
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- credentials=None,
- host=client.DEFAULT_ENDPOINT,
- )
-
- # Check mTLS is triggered if api_endpoint and client_cert_source are provided.
- options = client_options.ClientOptions(
- api_endpoint="squid.clam.whelk", client_cert_source=client_cert_source_callback
- )
- with mock.patch(
- "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__"
- ) as grpc_transport:
- grpc_transport.return_value = None
- client = ConnectionServiceClient(client_options=options)
- grpc_transport.assert_called_once_with(
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- credentials=None,
- host="squid.clam.whelk",
- )
-
-
-def test_connection_service_client_client_options_from_dict():
- with mock.patch(
- "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__"
- ) as grpc_transport:
- grpc_transport.return_value = None
- client = ConnectionServiceClient(
- client_options={"api_endpoint": "squid.clam.whelk"}
- )
- grpc_transport.assert_called_once_with(
- api_mtls_endpoint=None,
- client_cert_source=None,
- credentials=None,
- host="squid.clam.whelk",
- )
-
-
-def test_create_connection(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = gcbc_connection.CreateConnectionRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.create_connection), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = gcbc_connection.Connection(
- name="name_value",
- friendly_name="friendly_name_value",
- description="description_value",
- creation_time=1379,
- last_modified_time=1890,
- has_credential=True,
- )
-
- response = client.create_connection(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, gcbc_connection.Connection)
- assert response.name == "name_value"
- assert response.friendly_name == "friendly_name_value"
- assert response.description == "description_value"
- assert response.creation_time == 1379
- assert response.last_modified_time == 1890
-
- assert response.has_credential is True
-
-
-def test_create_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.create_connection), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = gcbc_connection.Connection()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.create_connection(
- parent="parent_value",
- connection=gcbc_connection.Connection(name="name_value"),
- connection_id="connection_id_value",
- )
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].parent == "parent_value"
- assert args[0].connection == gcbc_connection.Connection(name="name_value")
- assert args[0].connection_id == "connection_id_value"
-
-
-def test_create_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.create_connection(
- gcbc_connection.CreateConnectionRequest(),
- parent="parent_value",
- connection=gcbc_connection.Connection(name="name_value"),
- connection_id="connection_id_value",
- )
-
-
-def test_get_connection(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = connection.GetConnectionRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = connection.Connection(
- name="name_value",
- friendly_name="friendly_name_value",
- description="description_value",
- creation_time=1379,
- last_modified_time=1890,
- has_credential=True,
- )
-
- response = client.get_connection(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, connection.Connection)
- assert response.name == "name_value"
- assert response.friendly_name == "friendly_name_value"
- assert response.description == "description_value"
- assert response.creation_time == 1379
- assert response.last_modified_time == 1890
-
- assert response.has_credential is True
-
-
-def test_get_connection_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = connection.GetConnectionRequest(name="name/value")
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
- call.return_value = connection.Connection()
- client.get_connection(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- # Establish that the field header was sent.
- _, _, kw = call.mock_calls[0]
- assert ("x-goog-request-params", "name=name/value") in kw["metadata"]
-
-
-def test_get_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = connection.Connection()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.get_connection(name="name_value")
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].name == "name_value"
-
-
-def test_get_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.get_connection(connection.GetConnectionRequest(), name="name_value")
-
-
-def test_list_connections(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = connection.ListConnectionsRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = connection.ListConnectionsResponse(
- next_page_token="next_page_token_value"
- )
-
- response = client.list_connections(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListConnectionsPager)
- assert response.next_page_token == "next_page_token_value"
-
-
-def test_list_connections_field_headers():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = connection.ListConnectionsRequest(parent="parent/value")
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
- call.return_value = connection.ListConnectionsResponse()
- client.list_connections(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- # Establish that the field header was sent.
- _, _, kw = call.mock_calls[0]
- assert ("x-goog-request-params", "parent=parent/value") in kw["metadata"]
-
-
-def test_list_connections_pager():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- connection.ListConnectionsResponse(
- connections=[
- connection.Connection(),
- connection.Connection(),
- connection.Connection(),
- ],
- next_page_token="abc",
- ),
- connection.ListConnectionsResponse(connections=[], next_page_token="def"),
- connection.ListConnectionsResponse(
- connections=[connection.Connection()], next_page_token="ghi"
- ),
- connection.ListConnectionsResponse(
- connections=[connection.Connection(), connection.Connection()]
- ),
- RuntimeError,
- )
- results = [i for i in client.list_connections(request={})]
- assert len(results) == 6
- assert all(isinstance(i, connection.Connection) for i in results)
-
-
-def test_list_connections_pages():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_connections), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- connection.ListConnectionsResponse(
- connections=[
- connection.Connection(),
- connection.Connection(),
- connection.Connection(),
- ],
- next_page_token="abc",
- ),
- connection.ListConnectionsResponse(connections=[], next_page_token="def"),
- connection.ListConnectionsResponse(
- connections=[connection.Connection()], next_page_token="ghi"
- ),
- connection.ListConnectionsResponse(
- connections=[connection.Connection(), connection.Connection()]
- ),
- RuntimeError,
- )
- pages = list(client.list_connections(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
-
-
-def test_update_connection(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = gcbc_connection.UpdateConnectionRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.update_connection), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = gcbc_connection.Connection(
- name="name_value",
- friendly_name="friendly_name_value",
- description="description_value",
- creation_time=1379,
- last_modified_time=1890,
- has_credential=True,
- )
-
- response = client.update_connection(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, gcbc_connection.Connection)
- assert response.name == "name_value"
- assert response.friendly_name == "friendly_name_value"
- assert response.description == "description_value"
- assert response.creation_time == 1379
- assert response.last_modified_time == 1890
-
- assert response.has_credential is True
-
-
-def test_update_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.update_connection), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = gcbc_connection.Connection()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.update_connection(
- name="name_value",
- connection=gcbc_connection.Connection(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
- )
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].name == "name_value"
- assert args[0].connection == gcbc_connection.Connection(name="name_value")
- assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
-
-
-def test_update_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.update_connection(
- gcbc_connection.UpdateConnectionRequest(),
- name="name_value",
- connection=gcbc_connection.Connection(name="name_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
- )
-
-
-def test_delete_connection(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = connection.DeleteConnectionRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.delete_connection), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = None
-
- response = client.delete_connection(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert response is None
-
-
-def test_delete_connection_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.delete_connection), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = None
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.delete_connection(name="name_value")
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].name == "name_value"
-
-
-def test_delete_connection_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.delete_connection(
- connection.DeleteConnectionRequest(), name="name_value"
- )
-
-
-def test_get_iam_policy(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.GetIamPolicyRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = policy.Policy(version=774, etag=b"etag_blob")
-
- response = client.get_iam_policy(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, policy.Policy)
- assert response.version == 774
- assert response.etag == b"etag_blob"
-
-
-def test_get_iam_policy_from_dict():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
- response = client.get_iam_policy(
- request={
- "resource": "resource_value",
- "options": options.GetPolicyOptions(requested_policy_version=2598),
- }
- )
- call.assert_called()
-
-
-def test_get_iam_policy_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.get_iam_policy(resource="resource_value")
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].resource == "resource_value"
-
-
-def test_get_iam_policy_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.get_iam_policy(
- iam_policy.GetIamPolicyRequest(), resource="resource_value"
- )
-
-
-def test_set_iam_policy(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.SetIamPolicyRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = policy.Policy(version=774, etag=b"etag_blob")
-
- response = client.set_iam_policy(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, policy.Policy)
- assert response.version == 774
- assert response.etag == b"etag_blob"
-
-
-def test_set_iam_policy_from_dict():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
- response = client.set_iam_policy(
- request={"resource": "resource_value", "policy": policy.Policy(version=774)}
- )
- call.assert_called()
-
-
-def test_set_iam_policy_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = policy.Policy()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.set_iam_policy(resource="resource_value")
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].resource == "resource_value"
-
-
-def test_set_iam_policy_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.set_iam_policy(
- iam_policy.SetIamPolicyRequest(), resource="resource_value"
- )
-
-
-def test_test_iam_permissions(transport: str = "grpc"):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.TestIamPermissionsRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = iam_policy.TestIamPermissionsResponse(
- permissions=["permissions_value"]
- )
-
- response = client.test_iam_permissions(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, iam_policy.TestIamPermissionsResponse)
- assert response.permissions == ["permissions_value"]
-
-
-def test_test_iam_permissions_from_dict():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = iam_policy.TestIamPermissionsResponse()
-
- response = client.test_iam_permissions(
- request={"resource": "resource_value", "permissions": ["permissions_value"]}
- )
- call.assert_called()
-
-
-def test_test_iam_permissions_flattened():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = iam_policy.TestIamPermissionsResponse()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.test_iam_permissions(
- resource="resource_value", permissions=["permissions_value"]
- )
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].resource == "resource_value"
- assert args[0].permissions == ["permissions_value"]
-
-
-def test_test_iam_permissions_flattened_error():
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.test_iam_permissions(
- iam_policy.TestIamPermissionsRequest(),
- resource="resource_value",
- permissions=["permissions_value"],
- )
-
-
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- with pytest.raises(ValueError):
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
-
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.ConnectionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- client = ConnectionServiceClient(transport=transport)
- assert client._transport is transport
-
-
-def test_transport_grpc_default():
- # A client should use the gRPC transport by default.
- client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials())
- assert isinstance(client._transport, transports.ConnectionServiceGrpcTransport)
-
-
-def test_connection_service_base_transport():
- # Instantiate the base transport.
- transport = transports.ConnectionServiceTransport(
- credentials=credentials.AnonymousCredentials()
- )
-
- # Every method on the transport should just blindly
- # raise NotImplementedError.
- methods = (
- "create_connection",
- "get_connection",
- "list_connections",
- "update_connection",
- "delete_connection",
- "get_iam_policy",
- "set_iam_policy",
- "test_iam_permissions",
- )
- for method in methods:
- with pytest.raises(NotImplementedError):
- getattr(transport, method)(request=object())
-
-
-def test_connection_service_auth_adc():
- # If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- ConnectionServiceClient()
- adc.assert_called_once_with(
- scopes=(
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- )
- )
-
-
-def test_connection_service_host_no_port():
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="bigqueryconnection.googleapis.com"
- ),
- transport="grpc",
- )
- assert client._transport._host == "bigqueryconnection.googleapis.com:443"
-
-
-def test_connection_service_host_with_port():
- client = ConnectionServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="bigqueryconnection.googleapis.com:8000"
- ),
- transport="grpc",
- )
- assert client._transport._host == "bigqueryconnection.googleapis.com:8000"
-
-
-def test_connection_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
-
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
- transport = transports.ConnectionServiceGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
- )
- assert transport.grpc_channel == channel
- assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_connection_service_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.ConnectionServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- ssl_credentials=mock_ssl_cred,
- scopes=(
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- ),
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
-)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_connection_service_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.ConnectionServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- ssl_credentials=mock_ssl_cred,
- scopes=(
- "https://www.googleapis.com/auth/bigquery",
- "https://www.googleapis.com/auth/cloud-platform",
- ),
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-def test_connection_path():
- project = "squid"
- location = "clam"
- connection = "whelk"
-
- expected = "projects/{project}/locations/{location}/connections/{connection}".format(
- project=project, location=location, connection=connection
- )
- actual = ConnectionServiceClient.connection_path(project, location, connection)
- assert expected == actual
-
-
-def test_parse_connection_path():
- expected = {"project": "octopus", "location": "oyster", "connection": "nudibranch"}
- path = ConnectionServiceClient.connection_path(**expected)
-
- # Check that the path construction is reversible.
- actual = ConnectionServiceClient.parse_connection_path(path)
- assert expected == actual
diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/connection_v1/__init__.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/connection_v1/__init__.py
new file mode 100644
index 000000000000..8b137891791f
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/connection_v1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/connection_v1/test_connection_service.py
new file mode 100644
index 000000000000..cdedca940a39
--- /dev/null
+++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/connection_v1/test_connection_service.py
@@ -0,0 +1,2692 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.bigquery.connection_v1.services.connection_service import (
+ ConnectionServiceAsyncClient,
+)
+from google.cloud.bigquery.connection_v1.services.connection_service import (
+ ConnectionServiceClient,
+)
+from google.cloud.bigquery.connection_v1.services.connection_service import pagers
+from google.cloud.bigquery.connection_v1.services.connection_service import transports
+from google.cloud.bigquery.connection_v1.types import connection
+from google.cloud.bigquery.connection_v1.types import connection as gcbc_connection
+from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
+from google.iam.v1 import options_pb2 as options # type: ignore
+from google.iam.v1 import policy_pb2 as policy # type: ignore
+from google.oauth2 import service_account
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.type import expr_pb2 as expr # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert ConnectionServiceClient._get_default_mtls_endpoint(None) is None
+ assert (
+ ConnectionServiceClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ ConnectionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ ConnectionServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ ConnectionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ ConnectionServiceClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [ConnectionServiceClient, ConnectionServiceAsyncClient]
+)
+def test_connection_service_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client._transport._credentials == creds
+
+ assert client._transport._host == "bigqueryconnection.googleapis.com:443"
+
+
+def test_connection_service_client_get_transport_class():
+ transport = ConnectionServiceClient.get_transport_class()
+ assert transport == transports.ConnectionServiceGrpcTransport
+
+ transport = ConnectionServiceClient.get_transport_class("grpc")
+ assert transport == transports.ConnectionServiceGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"),
+ (
+ ConnectionServiceAsyncClient,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ ConnectionServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(ConnectionServiceClient),
+)
+@mock.patch.object(
+ ConnectionServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(ConnectionServiceAsyncClient),
+)
+def test_connection_service_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(ConnectionServiceClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(ConnectionServiceClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ api_mtls_endpoint="squid.clam.whelk",
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ api_mtls_endpoint=client.DEFAULT_ENDPOINT,
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+ # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
+ # "auto", and client_cert_source is provided.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
+ client_cert_source=client_cert_source_callback,
+ quota_project_id=None,
+ )
+
+ # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
+ # "auto", and default_client_cert_source is provided.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+ # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
+ # "auto", but client_cert_source and default_client_cert_source are None.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ api_mtls_endpoint=client.DEFAULT_ENDPOINT,
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ api_mtls_endpoint=client.DEFAULT_ENDPOINT,
+ client_cert_source=None,
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"),
+ (
+ ConnectionServiceAsyncClient,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_connection_service_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ api_mtls_endpoint=client.DEFAULT_ENDPOINT,
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"),
+ (
+ ConnectionServiceAsyncClient,
+ transports.ConnectionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_connection_service_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ api_mtls_endpoint=client.DEFAULT_ENDPOINT,
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+
+def test_connection_service_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = ConnectionServiceClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ api_mtls_endpoint="squid.clam.whelk",
+ client_cert_source=None,
+ quota_project_id=None,
+ )
+
+
+def test_create_connection(
+ transport: str = "grpc", request_type=gcbc_connection.CreateConnectionRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.create_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcbc_connection.Connection(
+ name="name_value",
+ friendly_name="friendly_name_value",
+ description="description_value",
+ creation_time=1379,
+ last_modified_time=1890,
+ has_credential=True,
+ cloud_sql=gcbc_connection.CloudSqlProperties(
+ instance_id="instance_id_value"
+ ),
+ )
+
+ response = client.create_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == gcbc_connection.CreateConnectionRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, gcbc_connection.Connection)
+
+ assert response.name == "name_value"
+
+ assert response.friendly_name == "friendly_name_value"
+
+ assert response.description == "description_value"
+
+ assert response.creation_time == 1379
+
+ assert response.last_modified_time == 1890
+
+ assert response.has_credential is True
+
+
+def test_create_connection_from_dict():
+ test_create_connection(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_create_connection_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = gcbc_connection.CreateConnectionRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcbc_connection.Connection(
+ name="name_value",
+ friendly_name="friendly_name_value",
+ description="description_value",
+ creation_time=1379,
+ last_modified_time=1890,
+ has_credential=True,
+ )
+ )
+
+ response = await client.create_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, gcbc_connection.Connection)
+
+ assert response.name == "name_value"
+
+ assert response.friendly_name == "friendly_name_value"
+
+ assert response.description == "description_value"
+
+ assert response.creation_time == 1379
+
+ assert response.last_modified_time == 1890
+
+ assert response.has_credential is True
+
+
+def test_create_connection_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = gcbc_connection.CreateConnectionRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.create_connection), "__call__"
+ ) as call:
+ call.return_value = gcbc_connection.Connection()
+
+ client.create_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_connection_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = gcbc_connection.CreateConnectionRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_connection), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcbc_connection.Connection()
+ )
+
+ await client.create_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_connection_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.create_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcbc_connection.Connection()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_connection(
+ parent="parent_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ connection_id="connection_id_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].connection == gcbc_connection.Connection(name="name_value")
+
+ assert args[0].connection_id == "connection_id_value"
+
+
+def test_create_connection_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_connection(
+ gcbc_connection.CreateConnectionRequest(),
+ parent="parent_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ connection_id="connection_id_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_connection_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.create_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcbc_connection.Connection()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcbc_connection.Connection()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_connection(
+ parent="parent_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ connection_id="connection_id_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].connection == gcbc_connection.Connection(name="name_value")
+
+ assert args[0].connection_id == "connection_id_value"
+
+
+@pytest.mark.asyncio
+async def test_create_connection_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_connection(
+ gcbc_connection.CreateConnectionRequest(),
+ parent="parent_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ connection_id="connection_id_value",
+ )
+
+
+def test_get_connection(
+ transport: str = "grpc", request_type=connection.GetConnectionRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = connection.Connection(
+ name="name_value",
+ friendly_name="friendly_name_value",
+ description="description_value",
+ creation_time=1379,
+ last_modified_time=1890,
+ has_credential=True,
+ cloud_sql=connection.CloudSqlProperties(instance_id="instance_id_value"),
+ )
+
+ response = client.get_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == connection.GetConnectionRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, connection.Connection)
+
+ assert response.name == "name_value"
+
+ assert response.friendly_name == "friendly_name_value"
+
+ assert response.description == "description_value"
+
+ assert response.creation_time == 1379
+
+ assert response.last_modified_time == 1890
+
+ assert response.has_credential is True
+
+
+def test_get_connection_from_dict():
+ test_get_connection(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_connection_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = connection.GetConnectionRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ connection.Connection(
+ name="name_value",
+ friendly_name="friendly_name_value",
+ description="description_value",
+ creation_time=1379,
+ last_modified_time=1890,
+ has_credential=True,
+ )
+ )
+
+ response = await client.get_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, connection.Connection)
+
+ assert response.name == "name_value"
+
+ assert response.friendly_name == "friendly_name_value"
+
+ assert response.description == "description_value"
+
+ assert response.creation_time == 1379
+
+ assert response.last_modified_time == 1890
+
+ assert response.has_credential is True
+
+
+def test_get_connection_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = connection.GetConnectionRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
+ call.return_value = connection.Connection()
+
+ client.get_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_connection_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = connection.GetConnectionRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_connection), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ connection.Connection()
+ )
+
+ await client.get_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_connection_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_connection), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = connection.Connection()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_connection(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_get_connection_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_connection(
+ connection.GetConnectionRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_connection_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = connection.Connection()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ connection.Connection()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_connection(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_connection_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_connection(
+ connection.GetConnectionRequest(), name="name_value",
+ )
+
+
+def test_list_connections(
+ transport: str = "grpc", request_type=connection.ListConnectionsRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_connections), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = connection.ListConnectionsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_connections(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == connection.ListConnectionsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListConnectionsPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_connections_from_dict():
+ test_list_connections(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_list_connections_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = connection.ListConnectionsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_connections), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ connection.ListConnectionsResponse(next_page_token="next_page_token_value",)
+ )
+
+ response = await client.list_connections(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListConnectionsAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_connections_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = connection.ListConnectionsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_connections), "__call__"
+ ) as call:
+ call.return_value = connection.ListConnectionsResponse()
+
+ client.list_connections(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_connections_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = connection.ListConnectionsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_connections), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ connection.ListConnectionsResponse()
+ )
+
+ await client.list_connections(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_connections_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_connections), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = connection.ListConnectionsResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_connections(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+def test_list_connections_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_connections(
+ connection.ListConnectionsRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_connections_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_connections), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = connection.ListConnectionsResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ connection.ListConnectionsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_connections(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_connections_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_connections(
+ connection.ListConnectionsRequest(), parent="parent_value",
+ )
+
+
+def test_list_connections_pager():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_connections), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ connection.ListConnectionsResponse(
+ connections=[
+ connection.Connection(),
+ connection.Connection(),
+ connection.Connection(),
+ ],
+ next_page_token="abc",
+ ),
+ connection.ListConnectionsResponse(connections=[], next_page_token="def",),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(),], next_page_token="ghi",
+ ),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(), connection.Connection(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_connections(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, connection.Connection) for i in results)
+
+
+def test_list_connections_pages():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.list_connections), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ connection.ListConnectionsResponse(
+ connections=[
+ connection.Connection(),
+ connection.Connection(),
+ connection.Connection(),
+ ],
+ next_page_token="abc",
+ ),
+ connection.ListConnectionsResponse(connections=[], next_page_token="def",),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(),], next_page_token="ghi",
+ ),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(), connection.Connection(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_connections(request={}).pages)
+ for page, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_connections_async_pager():
+ client = ConnectionServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_connections),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ connection.ListConnectionsResponse(
+ connections=[
+ connection.Connection(),
+ connection.Connection(),
+ connection.Connection(),
+ ],
+ next_page_token="abc",
+ ),
+ connection.ListConnectionsResponse(connections=[], next_page_token="def",),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(),], next_page_token="ghi",
+ ),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(), connection.Connection(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_connections(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, connection.Connection) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_connections_async_pages():
+ client = ConnectionServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.list_connections),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ connection.ListConnectionsResponse(
+ connections=[
+ connection.Connection(),
+ connection.Connection(),
+ connection.Connection(),
+ ],
+ next_page_token="abc",
+ ),
+ connection.ListConnectionsResponse(connections=[], next_page_token="def",),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(),], next_page_token="ghi",
+ ),
+ connection.ListConnectionsResponse(
+ connections=[connection.Connection(), connection.Connection(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page in (await client.list_connections(request={})).pages:
+ pages.append(page)
+ for page, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page.raw_page.next_page_token == token
+
+
+def test_update_connection(
+ transport: str = "grpc", request_type=gcbc_connection.UpdateConnectionRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.update_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcbc_connection.Connection(
+ name="name_value",
+ friendly_name="friendly_name_value",
+ description="description_value",
+ creation_time=1379,
+ last_modified_time=1890,
+ has_credential=True,
+ cloud_sql=gcbc_connection.CloudSqlProperties(
+ instance_id="instance_id_value"
+ ),
+ )
+
+ response = client.update_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == gcbc_connection.UpdateConnectionRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, gcbc_connection.Connection)
+
+ assert response.name == "name_value"
+
+ assert response.friendly_name == "friendly_name_value"
+
+ assert response.description == "description_value"
+
+ assert response.creation_time == 1379
+
+ assert response.last_modified_time == 1890
+
+ assert response.has_credential is True
+
+
+def test_update_connection_from_dict():
+ test_update_connection(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_update_connection_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = gcbc_connection.UpdateConnectionRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcbc_connection.Connection(
+ name="name_value",
+ friendly_name="friendly_name_value",
+ description="description_value",
+ creation_time=1379,
+ last_modified_time=1890,
+ has_credential=True,
+ )
+ )
+
+ response = await client.update_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, gcbc_connection.Connection)
+
+ assert response.name == "name_value"
+
+ assert response.friendly_name == "friendly_name_value"
+
+ assert response.description == "description_value"
+
+ assert response.creation_time == 1379
+
+ assert response.last_modified_time == 1890
+
+ assert response.has_credential is True
+
+
+def test_update_connection_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = gcbc_connection.UpdateConnectionRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.update_connection), "__call__"
+ ) as call:
+ call.return_value = gcbc_connection.Connection()
+
+ client.update_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_update_connection_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = gcbc_connection.UpdateConnectionRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_connection), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcbc_connection.Connection()
+ )
+
+ await client.update_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_update_connection_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.update_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcbc_connection.Connection()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.update_connection(
+ name="name_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].connection == gcbc_connection.Connection(name="name_value")
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+
+def test_update_connection_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.update_connection(
+ gcbc_connection.UpdateConnectionRequest(),
+ name="name_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+
+@pytest.mark.asyncio
+async def test_update_connection_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.update_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcbc_connection.Connection()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcbc_connection.Connection()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.update_connection(
+ name="name_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].connection == gcbc_connection.Connection(name="name_value")
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+
+@pytest.mark.asyncio
+async def test_update_connection_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.update_connection(
+ gcbc_connection.UpdateConnectionRequest(),
+ name="name_value",
+ connection=gcbc_connection.Connection(name="name_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+
+def test_delete_connection(
+ transport: str = "grpc", request_type=connection.DeleteConnectionRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.delete_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ response = client.delete_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == connection.DeleteConnectionRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_connection_from_dict():
+ test_delete_connection(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_delete_connection_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = connection.DeleteConnectionRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ response = await client.delete_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_connection_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = connection.DeleteConnectionRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.delete_connection), "__call__"
+ ) as call:
+ call.return_value = None
+
+ client.delete_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_connection_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = connection.DeleteConnectionRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_connection), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ await client.delete_connection(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_connection_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.delete_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_connection(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_delete_connection_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_connection(
+ connection.DeleteConnectionRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_connection_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.delete_connection), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_connection(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_connection_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_connection(
+ connection.DeleteConnectionRequest(), name="name_value",
+ )
+
+
+def test_get_iam_policy(
+ transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
+
+ response = client.get_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == iam_policy.GetIamPolicyRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, policy.Policy)
+
+ assert response.version == 774
+
+ assert response.etag == b"etag_blob"
+
+
+def test_get_iam_policy_from_dict():
+ test_get_iam_policy(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = iam_policy.GetIamPolicyRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_iam_policy), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ policy.Policy(version=774, etag=b"etag_blob",)
+ )
+
+ response = await client.get_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, policy.Policy)
+
+ assert response.version == 774
+
+ assert response.etag == b"etag_blob"
+
+
+def test_get_iam_policy_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = iam_policy.GetIamPolicyRequest()
+ request.resource = "resource/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ call.return_value = policy.Policy()
+
+ client.get_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_iam_policy_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = iam_policy.GetIamPolicyRequest()
+ request.resource = "resource/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_iam_policy), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
+
+ await client.get_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
+
+
+def test_get_iam_policy_from_dict():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy()
+
+ response = client.get_iam_policy(
+ request={
+ "resource": "resource_value",
+ "options": options.GetPolicyOptions(requested_policy_version=2598),
+ }
+ )
+ call.assert_called()
+
+
+def test_get_iam_policy_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_iam_policy(resource="resource_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].resource == "resource_value"
+
+
+def test_get_iam_policy_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_iam_policy(
+ iam_policy.GetIamPolicyRequest(), resource="resource_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_iam_policy_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.get_iam_policy), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_iam_policy(resource="resource_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].resource == "resource_value"
+
+
+@pytest.mark.asyncio
+async def test_get_iam_policy_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_iam_policy(
+ iam_policy.GetIamPolicyRequest(), resource="resource_value",
+ )
+
+
+def test_set_iam_policy(
+ transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
+
+ response = client.set_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == iam_policy.SetIamPolicyRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, policy.Policy)
+
+ assert response.version == 774
+
+ assert response.etag == b"etag_blob"
+
+
+def test_set_iam_policy_from_dict():
+ test_set_iam_policy(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = iam_policy.SetIamPolicyRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.set_iam_policy), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ policy.Policy(version=774, etag=b"etag_blob",)
+ )
+
+ response = await client.set_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, policy.Policy)
+
+ assert response.version == 774
+
+ assert response.etag == b"etag_blob"
+
+
+def test_set_iam_policy_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = iam_policy.SetIamPolicyRequest()
+ request.resource = "resource/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ call.return_value = policy.Policy()
+
+ client.set_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_set_iam_policy_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = iam_policy.SetIamPolicyRequest()
+ request.resource = "resource/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.set_iam_policy), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
+
+ await client.set_iam_policy(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
+
+
+def test_set_iam_policy_from_dict():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy()
+
+ response = client.set_iam_policy(
+ request={
+ "resource": "resource_value",
+ "policy": policy.Policy(version=774),
+ }
+ )
+ call.assert_called()
+
+
+def test_set_iam_policy_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.set_iam_policy(resource="resource_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].resource == "resource_value"
+
+
+def test_set_iam_policy_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.set_iam_policy(
+ iam_policy.SetIamPolicyRequest(), resource="resource_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_set_iam_policy_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.set_iam_policy), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = policy.Policy()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.set_iam_policy(resource="resource_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].resource == "resource_value"
+
+
+@pytest.mark.asyncio
+async def test_set_iam_policy_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.set_iam_policy(
+ iam_policy.SetIamPolicyRequest(), resource="resource_value",
+ )
+
+
+def test_test_iam_permissions(
+ transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest
+):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.test_iam_permissions), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iam_policy.TestIamPermissionsResponse(
+ permissions=["permissions_value"],
+ )
+
+ response = client.test_iam_permissions(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == iam_policy.TestIamPermissionsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, iam_policy.TestIamPermissionsResponse)
+
+ assert response.permissions == ["permissions_value"]
+
+
+def test_test_iam_permissions_from_dict():
+ test_test_iam_permissions(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = iam_policy.TestIamPermissionsRequest()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.test_iam_permissions), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],)
+ )
+
+ response = await client.test_iam_permissions(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, iam_policy.TestIamPermissionsResponse)
+
+ assert response.permissions == ["permissions_value"]
+
+
+def test_test_iam_permissions_field_headers():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = iam_policy.TestIamPermissionsRequest()
+ request.resource = "resource/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.test_iam_permissions), "__call__"
+ ) as call:
+ call.return_value = iam_policy.TestIamPermissionsResponse()
+
+ client.test_iam_permissions(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_test_iam_permissions_field_headers_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = iam_policy.TestIamPermissionsRequest()
+ request.resource = "resource/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.test_iam_permissions), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ iam_policy.TestIamPermissionsResponse()
+ )
+
+ await client.test_iam_permissions(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
+
+
+def test_test_iam_permissions_from_dict():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.test_iam_permissions), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iam_policy.TestIamPermissionsResponse()
+
+ response = client.test_iam_permissions(
+ request={
+ "resource": "resource_value",
+ "permissions": ["permissions_value"],
+ }
+ )
+ call.assert_called()
+
+
+def test_test_iam_permissions_flattened():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._transport.test_iam_permissions), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iam_policy.TestIamPermissionsResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.test_iam_permissions(
+ resource="resource_value", permissions=["permissions_value"],
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].resource == "resource_value"
+
+ assert args[0].permissions == ["permissions_value"]
+
+
+def test_test_iam_permissions_flattened_error():
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.test_iam_permissions(
+ iam_policy.TestIamPermissionsRequest(),
+ resource="resource_value",
+ permissions=["permissions_value"],
+ )
+
+
+@pytest.mark.asyncio
+async def test_test_iam_permissions_flattened_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client._client._transport.test_iam_permissions), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = iam_policy.TestIamPermissionsResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ iam_policy.TestIamPermissionsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.test_iam_permissions(
+ resource="resource_value", permissions=["permissions_value"],
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].resource == "resource_value"
+
+ assert args[0].permissions == ["permissions_value"]
+
+
+@pytest.mark.asyncio
+async def test_test_iam_permissions_flattened_error_async():
+ client = ConnectionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.test_iam_permissions(
+ iam_policy.TestIamPermissionsRequest(),
+ resource="resource_value",
+ permissions=["permissions_value"],
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.ConnectionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.ConnectionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ConnectionServiceClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.ConnectionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ConnectionServiceClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ConnectionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = ConnectionServiceClient(transport=transport)
+ assert client._transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ConnectionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.ConnectionServiceGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = ConnectionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client._transport, transports.ConnectionServiceGrpcTransport,)
+
+
+def test_connection_service_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.ConnectionServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_connection_service_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.ConnectionServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "create_connection",
+ "get_connection",
+ "list_connections",
+ "update_connection",
+ "delete_connection",
+ "get_iam_policy",
+ "set_iam_policy",
+ "test_iam_permissions",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+
+def test_connection_service_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.bigquery.connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.ConnectionServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+def test_connection_service_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ ConnectionServiceClient()
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ quota_project_id=None,
+ )
+
+
+def test_connection_service_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.ConnectionServiceGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+def test_connection_service_host_no_port():
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="bigqueryconnection.googleapis.com"
+ ),
+ )
+ assert client._transport._host == "bigqueryconnection.googleapis.com:443"
+
+
+def test_connection_service_host_with_port():
+ client = ConnectionServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="bigqueryconnection.googleapis.com:8000"
+ ),
+ )
+ assert client._transport._host == "bigqueryconnection.googleapis.com:8000"
+
+
+def test_connection_service_grpc_transport_channel():
+ channel = grpc.insecure_channel("http://localhost/")
+
+ # Check that if channel is provided, mtls endpoint and client_cert_source
+ # won't be used.
+ callback = mock.MagicMock()
+ transport = transports.ConnectionServiceGrpcTransport(
+ host="squid.clam.whelk",
+ channel=channel,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=callback,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert not callback.called
+
+
+def test_connection_service_grpc_asyncio_transport_channel():
+ channel = aio.insecure_channel("http://localhost/")
+
+ # Check that if channel is provided, mtls endpoint and client_cert_source
+ # won't be used.
+ callback = mock.MagicMock()
+ transport = transports.ConnectionServiceGrpcAsyncIOTransport(
+ host="squid.clam.whelk",
+ channel=channel,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=callback,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert not callback.called
+
+
+@mock.patch("grpc.ssl_channel_credentials", autospec=True)
+@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
+def test_connection_service_grpc_transport_channel_mtls_with_client_cert_source(
+ grpc_create_channel, grpc_ssl_channel_cred
+):
+ # Check that if channel is None, but api_mtls_endpoint and client_cert_source
+ # are provided, then a mTLS channel will be created.
+ mock_cred = mock.Mock()
+
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ transport = transports.ConnectionServiceGrpcTransport(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+@mock.patch("grpc.ssl_channel_credentials", autospec=True)
+@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
+def test_connection_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
+ grpc_create_channel, grpc_ssl_channel_cred
+):
+ # Check that if channel is None, but api_mtls_endpoint and client_cert_source
+ # are provided, then a mTLS channel will be created.
+ mock_cred = mock.Mock()
+
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ transport = transports.ConnectionServiceGrpcAsyncIOTransport(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+@pytest.mark.parametrize(
+ "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+)
+@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
+def test_connection_service_grpc_transport_channel_mtls_with_adc(
+ grpc_create_channel, api_mtls_endpoint
+):
+ # Check that if channel and client_cert_source are None, but api_mtls_endpoint
+ # is provided, then a mTLS channel will be created with SSL ADC.
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ # Mock google.auth.transport.grpc.SslCredentials class.
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ mock_cred = mock.Mock()
+ transport = transports.ConnectionServiceGrpcTransport(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint=api_mtls_endpoint,
+ client_cert_source=None,
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+@pytest.mark.parametrize(
+ "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+)
+@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
+def test_connection_service_grpc_asyncio_transport_channel_mtls_with_adc(
+ grpc_create_channel, api_mtls_endpoint
+):
+ # Check that if channel and client_cert_source are None, but api_mtls_endpoint
+ # is provided, then a mTLS channel will be created with SSL ADC.
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ # Mock google.auth.transport.grpc.SslCredentials class.
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ mock_cred = mock.Mock()
+ transport = transports.ConnectionServiceGrpcAsyncIOTransport(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint=api_mtls_endpoint,
+ client_cert_source=None,
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/bigquery",
+ "https://www.googleapis.com/auth/cloud-platform",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_connection_path():
+ project = "squid"
+ location = "clam"
+ connection = "whelk"
+
+ expected = "projects/{project}/locations/{location}/connections/{connection}".format(
+ project=project, location=location, connection=connection,
+ )
+ actual = ConnectionServiceClient.connection_path(project, location, connection)
+ assert expected == actual
+
+
+def test_parse_connection_path():
+ expected = {
+ "project": "octopus",
+ "location": "oyster",
+ "connection": "nudibranch",
+ }
+ path = ConnectionServiceClient.connection_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ConnectionServiceClient.parse_connection_path(path)
+ assert expected == actual