From 2642721a733957530e9fb1ac315af645191038cd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 25 Jun 2021 13:10:24 +0000 Subject: [PATCH] chore: prepare v1beta3 (#2) PiperOrigin-RevId: 381356178 Source-Link: https://github.com/googleapis/googleapis/commit/6fa6c00ba207f927999d80a3c6ef2542d2739eeb Source-Link: https://github.com/googleapis/googleapis-gen/commit/ba89d923e4f6ed02d9fdce93fa4a817578063ace --- .../google-cloud-dataflow-client/.coveragerc | 2 +- .../flex_templates_service.rst | 2 +- .../docs/dataflow_v1beta3/jobs_v1_beta3.rst | 4 +- .../dataflow_v1beta3/messages_v1_beta3.rst | 4 +- .../dataflow_v1beta3/metrics_v1_beta3.rst | 4 +- .../docs/dataflow_v1beta3/services.rst | 4 +- .../dataflow_v1beta3/snapshots_v1_beta3.rst | 2 +- .../dataflow_v1beta3/templates_service.rst | 2 +- .../docs/dataflow_v1beta3/types.rst | 6 +- .../google/cloud/dataflow/__init__.py | 280 +++++ .../google/cloud/dataflow/py.typed | 2 + .../google/cloud/dataflow_v1beta3/__init__.py | 254 +++++ .../dataflow_v1beta3/gapic_metadata.json | 273 +++++ .../google/cloud/dataflow_v1beta3/py.typed | 2 + .../dataflow_v1beta3/services/__init__.py | 15 + .../flex_templates_service/__init__.py | 22 + .../flex_templates_service/async_client.py | 217 ++++ .../services/flex_templates_service/client.py | 390 +++++++ .../transports/__init__.py | 35 + .../flex_templates_service/transports/base.py | 180 ++++ .../flex_templates_service/transports/grpc.py | 255 +++++ .../transports/grpc_asyncio.py | 259 +++++ .../services/jobs_v1_beta3/__init__.py | 22 + .../services/jobs_v1_beta3/async_client.py | 529 ++++++++++ .../services/jobs_v1_beta3/client.py | 709 +++++++++++++ .../services/jobs_v1_beta3/pagers.py | 283 +++++ .../jobs_v1_beta3/transports/__init__.py | 33 + .../services/jobs_v1_beta3/transports/base.py | 241 +++++ .../services/jobs_v1_beta3/transports/grpc.py | 429 ++++++++ .../jobs_v1_beta3/transports/grpc_asyncio.py | 438 ++++++++ .../services/messages_v1_beta3/__init__.py | 22 + .../messages_v1_beta3/async_client.py | 236 +++++ .../services/messages_v1_beta3/client.py | 408 +++++++ .../services/messages_v1_beta3/pagers.py | 155 +++ .../messages_v1_beta3/transports/__init__.py | 33 + .../messages_v1_beta3/transports/base.py | 178 ++++ .../messages_v1_beta3/transports/grpc.py | 261 +++++ .../transports/grpc_asyncio.py | 266 +++++ .../services/metrics_v1_beta3/__init__.py | 22 + .../services/metrics_v1_beta3/async_client.py | 345 ++++++ .../services/metrics_v1_beta3/client.py | 523 +++++++++ .../services/metrics_v1_beta3/pagers.py | 283 +++++ .../metrics_v1_beta3/transports/__init__.py | 33 + .../metrics_v1_beta3/transports/base.py | 203 ++++ .../metrics_v1_beta3/transports/grpc.py | 322 ++++++ .../transports/grpc_asyncio.py | 328 ++++++ .../services/snapshots_v1_beta3/__init__.py | 22 + .../snapshots_v1_beta3/async_client.py | 295 ++++++ .../services/snapshots_v1_beta3/client.py | 469 +++++++++ .../snapshots_v1_beta3/transports/__init__.py | 33 + .../snapshots_v1_beta3/transports/base.py | 204 ++++ .../snapshots_v1_beta3/transports/grpc.py | 305 ++++++ .../transports/grpc_asyncio.py | 312 ++++++ .../services/templates_service/__init__.py | 22 + .../templates_service/async_client.py | 304 ++++++ .../services/templates_service/client.py | 478 +++++++++ .../templates_service/transports/__init__.py | 33 + .../templates_service/transports/base.py | 204 ++++ .../templates_service/transports/grpc.py | 306 ++++++ .../transports/grpc_asyncio.py | 313 ++++++ .../cloud/dataflow_v1beta3/types/__init__.py | 242 +++++ .../dataflow_v1beta3/types/environment.py | 646 ++++++++++++ .../cloud/dataflow_v1beta3/types/jobs.py | 991 ++++++++++++++++++ .../cloud/dataflow_v1beta3/types/messages.py | 211 ++++ .../cloud/dataflow_v1beta3/types/metrics.py | 412 ++++++++ .../cloud/dataflow_v1beta3/types/snapshots.py | 173 +++ .../cloud/dataflow_v1beta3/types/streaming.py | 354 +++++++ .../cloud/dataflow_v1beta3/types/templates.py | 676 ++++++++++++ .../google-cloud-dataflow-client/setup.py | 4 +- .../test_flex_templates_service.py | 24 +- .../dataflow_v1beta3/test_jobs_v1_beta3.py | 26 +- .../test_messages_v1_beta3.py | 24 +- .../dataflow_v1beta3/test_metrics_v1_beta3.py | 24 +- .../test_snapshots_v1_beta3.py | 22 +- .../test_templates_service.py | 26 +- 75 files changed, 15585 insertions(+), 86 deletions(-) create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow/py.typed create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_metadata.json create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/py.typed create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/__init__.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py create mode 100644 packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py diff --git a/packages/google-cloud-dataflow-client/.coveragerc b/packages/google-cloud-dataflow-client/.coveragerc index f9ea028315e3..da7caa1b092a 100644 --- a/packages/google-cloud-dataflow-client/.coveragerc +++ b/packages/google-cloud-dataflow-client/.coveragerc @@ -4,7 +4,7 @@ branch = True [report] show_missing = True omit = - google/dataflow/__init__.py + google/cloud/dataflow/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/flex_templates_service.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/flex_templates_service.rst index f7c98a28be9c..5fc44615d3bb 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/flex_templates_service.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/flex_templates_service.rst @@ -1,6 +1,6 @@ FlexTemplatesService -------------------------------------- -.. automodule:: google.dataflow_v1beta3.services.flex_templates_service +.. automodule:: google.cloud.dataflow_v1beta3.services.flex_templates_service :members: :inherited-members: diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/jobs_v1_beta3.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/jobs_v1_beta3.rst index 3bd2ecce9695..d2d95a880de6 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/jobs_v1_beta3.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/jobs_v1_beta3.rst @@ -1,10 +1,10 @@ JobsV1Beta3 ----------------------------- -.. automodule:: google.dataflow_v1beta3.services.jobs_v1_beta3 +.. automodule:: google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 :members: :inherited-members: -.. automodule:: google.dataflow_v1beta3.services.jobs_v1_beta3.pagers +.. automodule:: google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers :members: :inherited-members: diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/messages_v1_beta3.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/messages_v1_beta3.rst index 9f33c24fbc11..09152056298c 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/messages_v1_beta3.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/messages_v1_beta3.rst @@ -1,10 +1,10 @@ MessagesV1Beta3 --------------------------------- -.. automodule:: google.dataflow_v1beta3.services.messages_v1_beta3 +.. automodule:: google.cloud.dataflow_v1beta3.services.messages_v1_beta3 :members: :inherited-members: -.. automodule:: google.dataflow_v1beta3.services.messages_v1_beta3.pagers +.. automodule:: google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers :members: :inherited-members: diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/metrics_v1_beta3.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/metrics_v1_beta3.rst index a5c63d04c89f..8ca559432141 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/metrics_v1_beta3.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/metrics_v1_beta3.rst @@ -1,10 +1,10 @@ MetricsV1Beta3 -------------------------------- -.. automodule:: google.dataflow_v1beta3.services.metrics_v1_beta3 +.. automodule:: google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 :members: :inherited-members: -.. automodule:: google.dataflow_v1beta3.services.metrics_v1_beta3.pagers +.. automodule:: google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers :members: :inherited-members: diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/services.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/services.rst index ec07934a8306..d890af6b2f41 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/services.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/services.rst @@ -1,5 +1,5 @@ -Services for Google Dataflow v1beta3 API -======================================== +Services for Google Cloud Dataflow v1beta3 API +============================================== .. toctree:: :maxdepth: 2 diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/snapshots_v1_beta3.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/snapshots_v1_beta3.rst index 5b5b7744d470..4619e4d64523 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/snapshots_v1_beta3.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/snapshots_v1_beta3.rst @@ -1,6 +1,6 @@ SnapshotsV1Beta3 ---------------------------------- -.. automodule:: google.dataflow_v1beta3.services.snapshots_v1_beta3 +.. automodule:: google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 :members: :inherited-members: diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/templates_service.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/templates_service.rst index 2a0c8d225cb3..ad832aa28e80 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/templates_service.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/templates_service.rst @@ -1,6 +1,6 @@ TemplatesService ---------------------------------- -.. automodule:: google.dataflow_v1beta3.services.templates_service +.. automodule:: google.cloud.dataflow_v1beta3.services.templates_service :members: :inherited-members: diff --git a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/types.rst b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/types.rst index 1838f357f10e..368642a795bd 100644 --- a/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/types.rst +++ b/packages/google-cloud-dataflow-client/docs/dataflow_v1beta3/types.rst @@ -1,7 +1,7 @@ -Types for Google Dataflow v1beta3 API -===================================== +Types for Google Cloud Dataflow v1beta3 API +=========================================== -.. automodule:: google.dataflow_v1beta3.types +.. automodule:: google.cloud.dataflow_v1beta3.types :members: :undoc-members: :show-inheritance: diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow/__init__.py new file mode 100644 index 000000000000..598b957e807c --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow/__init__.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.dataflow_v1beta3.services.flex_templates_service.client import ( + FlexTemplatesServiceClient, +) +from google.cloud.dataflow_v1beta3.services.flex_templates_service.async_client import ( + FlexTemplatesServiceAsyncClient, +) +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.client import ( + JobsV1Beta3Client, +) +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.async_client import ( + JobsV1Beta3AsyncClient, +) +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.client import ( + MessagesV1Beta3Client, +) +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.async_client import ( + MessagesV1Beta3AsyncClient, +) +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.client import ( + MetricsV1Beta3Client, +) +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.async_client import ( + MetricsV1Beta3AsyncClient, +) +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.client import ( + SnapshotsV1Beta3Client, +) +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.async_client import ( + SnapshotsV1Beta3AsyncClient, +) +from google.cloud.dataflow_v1beta3.services.templates_service.client import ( + TemplatesServiceClient, +) +from google.cloud.dataflow_v1beta3.services.templates_service.async_client import ( + TemplatesServiceAsyncClient, +) + +from google.cloud.dataflow_v1beta3.types.environment import AutoscalingSettings +from google.cloud.dataflow_v1beta3.types.environment import DebugOptions +from google.cloud.dataflow_v1beta3.types.environment import Disk +from google.cloud.dataflow_v1beta3.types.environment import Environment +from google.cloud.dataflow_v1beta3.types.environment import Package +from google.cloud.dataflow_v1beta3.types.environment import SdkHarnessContainerImage +from google.cloud.dataflow_v1beta3.types.environment import TaskRunnerSettings +from google.cloud.dataflow_v1beta3.types.environment import WorkerPool +from google.cloud.dataflow_v1beta3.types.environment import WorkerSettings +from google.cloud.dataflow_v1beta3.types.environment import AutoscalingAlgorithm +from google.cloud.dataflow_v1beta3.types.environment import DefaultPackageSet +from google.cloud.dataflow_v1beta3.types.environment import FlexResourceSchedulingGoal +from google.cloud.dataflow_v1beta3.types.environment import JobType +from google.cloud.dataflow_v1beta3.types.environment import ShuffleMode +from google.cloud.dataflow_v1beta3.types.environment import TeardownPolicy +from google.cloud.dataflow_v1beta3.types.environment import WorkerIPAddressConfiguration +from google.cloud.dataflow_v1beta3.types.jobs import BigQueryIODetails +from google.cloud.dataflow_v1beta3.types.jobs import BigTableIODetails +from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsRequest +from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsResponse +from google.cloud.dataflow_v1beta3.types.jobs import CreateJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import DatastoreIODetails +from google.cloud.dataflow_v1beta3.types.jobs import DisplayData +from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageState +from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageSummary +from google.cloud.dataflow_v1beta3.types.jobs import FailedLocation +from google.cloud.dataflow_v1beta3.types.jobs import FileIODetails +from google.cloud.dataflow_v1beta3.types.jobs import GetJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import Job +from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionInfo +from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionStageInfo +from google.cloud.dataflow_v1beta3.types.jobs import JobMetadata +from google.cloud.dataflow_v1beta3.types.jobs import ListJobsRequest +from google.cloud.dataflow_v1beta3.types.jobs import ListJobsResponse +from google.cloud.dataflow_v1beta3.types.jobs import PipelineDescription +from google.cloud.dataflow_v1beta3.types.jobs import PubSubIODetails +from google.cloud.dataflow_v1beta3.types.jobs import SdkVersion +from google.cloud.dataflow_v1beta3.types.jobs import SnapshotJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import SpannerIODetails +from google.cloud.dataflow_v1beta3.types.jobs import Step +from google.cloud.dataflow_v1beta3.types.jobs import TransformSummary +from google.cloud.dataflow_v1beta3.types.jobs import UpdateJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import JobState +from google.cloud.dataflow_v1beta3.types.jobs import JobView +from google.cloud.dataflow_v1beta3.types.jobs import KindType +from google.cloud.dataflow_v1beta3.types.messages import AutoscalingEvent +from google.cloud.dataflow_v1beta3.types.messages import JobMessage +from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesRequest +from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesResponse +from google.cloud.dataflow_v1beta3.types.messages import StructuredMessage +from google.cloud.dataflow_v1beta3.types.messages import JobMessageImportance +from google.cloud.dataflow_v1beta3.types.metrics import GetJobExecutionDetailsRequest +from google.cloud.dataflow_v1beta3.types.metrics import GetJobMetricsRequest +from google.cloud.dataflow_v1beta3.types.metrics import GetStageExecutionDetailsRequest +from google.cloud.dataflow_v1beta3.types.metrics import JobExecutionDetails +from google.cloud.dataflow_v1beta3.types.metrics import JobMetrics +from google.cloud.dataflow_v1beta3.types.metrics import MetricStructuredName +from google.cloud.dataflow_v1beta3.types.metrics import MetricUpdate +from google.cloud.dataflow_v1beta3.types.metrics import ProgressTimeseries +from google.cloud.dataflow_v1beta3.types.metrics import StageExecutionDetails +from google.cloud.dataflow_v1beta3.types.metrics import StageSummary +from google.cloud.dataflow_v1beta3.types.metrics import WorkerDetails +from google.cloud.dataflow_v1beta3.types.metrics import WorkItemDetails +from google.cloud.dataflow_v1beta3.types.metrics import ExecutionState +from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotRequest +from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotResponse +from google.cloud.dataflow_v1beta3.types.snapshots import GetSnapshotRequest +from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsRequest +from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsResponse +from google.cloud.dataflow_v1beta3.types.snapshots import PubsubSnapshotMetadata +from google.cloud.dataflow_v1beta3.types.snapshots import Snapshot +from google.cloud.dataflow_v1beta3.types.snapshots import SnapshotState +from google.cloud.dataflow_v1beta3.types.streaming import ComputationTopology +from google.cloud.dataflow_v1beta3.types.streaming import CustomSourceLocation +from google.cloud.dataflow_v1beta3.types.streaming import DataDiskAssignment +from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeDataDiskAssignment +from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeLocation +from google.cloud.dataflow_v1beta3.types.streaming import MountedDataDisk +from google.cloud.dataflow_v1beta3.types.streaming import PubsubLocation +from google.cloud.dataflow_v1beta3.types.streaming import StateFamilyConfig +from google.cloud.dataflow_v1beta3.types.streaming import ( + StreamingApplianceSnapshotConfig, +) +from google.cloud.dataflow_v1beta3.types.streaming import StreamingComputationRanges +from google.cloud.dataflow_v1beta3.types.streaming import StreamingSideInputLocation +from google.cloud.dataflow_v1beta3.types.streaming import StreamingStageLocation +from google.cloud.dataflow_v1beta3.types.streaming import StreamLocation +from google.cloud.dataflow_v1beta3.types.streaming import TopologyConfig +from google.cloud.dataflow_v1beta3.types.templates import ContainerSpec +from google.cloud.dataflow_v1beta3.types.templates import CreateJobFromTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import DynamicTemplateLaunchParams +from google.cloud.dataflow_v1beta3.types.templates import FlexTemplateRuntimeEnvironment +from google.cloud.dataflow_v1beta3.types.templates import GetTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import GetTemplateResponse +from google.cloud.dataflow_v1beta3.types.templates import InvalidTemplateParameters +from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateParameter +from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateResponse +from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateParameters +from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateResponse +from google.cloud.dataflow_v1beta3.types.templates import ParameterMetadata +from google.cloud.dataflow_v1beta3.types.templates import RuntimeEnvironment +from google.cloud.dataflow_v1beta3.types.templates import RuntimeMetadata +from google.cloud.dataflow_v1beta3.types.templates import SDKInfo +from google.cloud.dataflow_v1beta3.types.templates import TemplateMetadata +from google.cloud.dataflow_v1beta3.types.templates import ParameterType + +__all__ = ( + "FlexTemplatesServiceClient", + "FlexTemplatesServiceAsyncClient", + "JobsV1Beta3Client", + "JobsV1Beta3AsyncClient", + "MessagesV1Beta3Client", + "MessagesV1Beta3AsyncClient", + "MetricsV1Beta3Client", + "MetricsV1Beta3AsyncClient", + "SnapshotsV1Beta3Client", + "SnapshotsV1Beta3AsyncClient", + "TemplatesServiceClient", + "TemplatesServiceAsyncClient", + "AutoscalingSettings", + "DebugOptions", + "Disk", + "Environment", + "Package", + "SdkHarnessContainerImage", + "TaskRunnerSettings", + "WorkerPool", + "WorkerSettings", + "AutoscalingAlgorithm", + "DefaultPackageSet", + "FlexResourceSchedulingGoal", + "JobType", + "ShuffleMode", + "TeardownPolicy", + "WorkerIPAddressConfiguration", + "BigQueryIODetails", + "BigTableIODetails", + "CheckActiveJobsRequest", + "CheckActiveJobsResponse", + "CreateJobRequest", + "DatastoreIODetails", + "DisplayData", + "ExecutionStageState", + "ExecutionStageSummary", + "FailedLocation", + "FileIODetails", + "GetJobRequest", + "Job", + "JobExecutionInfo", + "JobExecutionStageInfo", + "JobMetadata", + "ListJobsRequest", + "ListJobsResponse", + "PipelineDescription", + "PubSubIODetails", + "SdkVersion", + "SnapshotJobRequest", + "SpannerIODetails", + "Step", + "TransformSummary", + "UpdateJobRequest", + "JobState", + "JobView", + "KindType", + "AutoscalingEvent", + "JobMessage", + "ListJobMessagesRequest", + "ListJobMessagesResponse", + "StructuredMessage", + "JobMessageImportance", + "GetJobExecutionDetailsRequest", + "GetJobMetricsRequest", + "GetStageExecutionDetailsRequest", + "JobExecutionDetails", + "JobMetrics", + "MetricStructuredName", + "MetricUpdate", + "ProgressTimeseries", + "StageExecutionDetails", + "StageSummary", + "WorkerDetails", + "WorkItemDetails", + "ExecutionState", + "DeleteSnapshotRequest", + "DeleteSnapshotResponse", + "GetSnapshotRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "PubsubSnapshotMetadata", + "Snapshot", + "SnapshotState", + "ComputationTopology", + "CustomSourceLocation", + "DataDiskAssignment", + "KeyRangeDataDiskAssignment", + "KeyRangeLocation", + "MountedDataDisk", + "PubsubLocation", + "StateFamilyConfig", + "StreamingApplianceSnapshotConfig", + "StreamingComputationRanges", + "StreamingSideInputLocation", + "StreamingStageLocation", + "StreamLocation", + "TopologyConfig", + "ContainerSpec", + "CreateJobFromTemplateRequest", + "DynamicTemplateLaunchParams", + "FlexTemplateRuntimeEnvironment", + "GetTemplateRequest", + "GetTemplateResponse", + "InvalidTemplateParameters", + "LaunchFlexTemplateParameter", + "LaunchFlexTemplateRequest", + "LaunchFlexTemplateResponse", + "LaunchTemplateParameters", + "LaunchTemplateRequest", + "LaunchTemplateResponse", + "ParameterMetadata", + "RuntimeEnvironment", + "RuntimeMetadata", + "SDKInfo", + "TemplateMetadata", + "ParameterType", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow/py.typed b/packages/google-cloud-dataflow-client/google/cloud/dataflow/py.typed new file mode 100644 index 000000000000..6d4450bfca06 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataflow package uses inline types. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/__init__.py new file mode 100644 index 000000000000..caa2dafff3b7 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/__init__.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.flex_templates_service import FlexTemplatesServiceClient +from .services.flex_templates_service import FlexTemplatesServiceAsyncClient +from .services.jobs_v1_beta3 import JobsV1Beta3Client +from .services.jobs_v1_beta3 import JobsV1Beta3AsyncClient +from .services.messages_v1_beta3 import MessagesV1Beta3Client +from .services.messages_v1_beta3 import MessagesV1Beta3AsyncClient +from .services.metrics_v1_beta3 import MetricsV1Beta3Client +from .services.metrics_v1_beta3 import MetricsV1Beta3AsyncClient +from .services.snapshots_v1_beta3 import SnapshotsV1Beta3Client +from .services.snapshots_v1_beta3 import SnapshotsV1Beta3AsyncClient +from .services.templates_service import TemplatesServiceClient +from .services.templates_service import TemplatesServiceAsyncClient + +from .types.environment import AutoscalingSettings +from .types.environment import DebugOptions +from .types.environment import Disk +from .types.environment import Environment +from .types.environment import Package +from .types.environment import SdkHarnessContainerImage +from .types.environment import TaskRunnerSettings +from .types.environment import WorkerPool +from .types.environment import WorkerSettings +from .types.environment import AutoscalingAlgorithm +from .types.environment import DefaultPackageSet +from .types.environment import FlexResourceSchedulingGoal +from .types.environment import JobType +from .types.environment import ShuffleMode +from .types.environment import TeardownPolicy +from .types.environment import WorkerIPAddressConfiguration +from .types.jobs import BigQueryIODetails +from .types.jobs import BigTableIODetails +from .types.jobs import CheckActiveJobsRequest +from .types.jobs import CheckActiveJobsResponse +from .types.jobs import CreateJobRequest +from .types.jobs import DatastoreIODetails +from .types.jobs import DisplayData +from .types.jobs import ExecutionStageState +from .types.jobs import ExecutionStageSummary +from .types.jobs import FailedLocation +from .types.jobs import FileIODetails +from .types.jobs import GetJobRequest +from .types.jobs import Job +from .types.jobs import JobExecutionInfo +from .types.jobs import JobExecutionStageInfo +from .types.jobs import JobMetadata +from .types.jobs import ListJobsRequest +from .types.jobs import ListJobsResponse +from .types.jobs import PipelineDescription +from .types.jobs import PubSubIODetails +from .types.jobs import SdkVersion +from .types.jobs import SnapshotJobRequest +from .types.jobs import SpannerIODetails +from .types.jobs import Step +from .types.jobs import TransformSummary +from .types.jobs import UpdateJobRequest +from .types.jobs import JobState +from .types.jobs import JobView +from .types.jobs import KindType +from .types.messages import AutoscalingEvent +from .types.messages import JobMessage +from .types.messages import ListJobMessagesRequest +from .types.messages import ListJobMessagesResponse +from .types.messages import StructuredMessage +from .types.messages import JobMessageImportance +from .types.metrics import GetJobExecutionDetailsRequest +from .types.metrics import GetJobMetricsRequest +from .types.metrics import GetStageExecutionDetailsRequest +from .types.metrics import JobExecutionDetails +from .types.metrics import JobMetrics +from .types.metrics import MetricStructuredName +from .types.metrics import MetricUpdate +from .types.metrics import ProgressTimeseries +from .types.metrics import StageExecutionDetails +from .types.metrics import StageSummary +from .types.metrics import WorkerDetails +from .types.metrics import WorkItemDetails +from .types.metrics import ExecutionState +from .types.snapshots import DeleteSnapshotRequest +from .types.snapshots import DeleteSnapshotResponse +from .types.snapshots import GetSnapshotRequest +from .types.snapshots import ListSnapshotsRequest +from .types.snapshots import ListSnapshotsResponse +from .types.snapshots import PubsubSnapshotMetadata +from .types.snapshots import Snapshot +from .types.snapshots import SnapshotState +from .types.streaming import ComputationTopology +from .types.streaming import CustomSourceLocation +from .types.streaming import DataDiskAssignment +from .types.streaming import KeyRangeDataDiskAssignment +from .types.streaming import KeyRangeLocation +from .types.streaming import MountedDataDisk +from .types.streaming import PubsubLocation +from .types.streaming import StateFamilyConfig +from .types.streaming import StreamingApplianceSnapshotConfig +from .types.streaming import StreamingComputationRanges +from .types.streaming import StreamingSideInputLocation +from .types.streaming import StreamingStageLocation +from .types.streaming import StreamLocation +from .types.streaming import TopologyConfig +from .types.templates import ContainerSpec +from .types.templates import CreateJobFromTemplateRequest +from .types.templates import DynamicTemplateLaunchParams +from .types.templates import FlexTemplateRuntimeEnvironment +from .types.templates import GetTemplateRequest +from .types.templates import GetTemplateResponse +from .types.templates import InvalidTemplateParameters +from .types.templates import LaunchFlexTemplateParameter +from .types.templates import LaunchFlexTemplateRequest +from .types.templates import LaunchFlexTemplateResponse +from .types.templates import LaunchTemplateParameters +from .types.templates import LaunchTemplateRequest +from .types.templates import LaunchTemplateResponse +from .types.templates import ParameterMetadata +from .types.templates import RuntimeEnvironment +from .types.templates import RuntimeMetadata +from .types.templates import SDKInfo +from .types.templates import TemplateMetadata +from .types.templates import ParameterType + +__all__ = ( + "FlexTemplatesServiceAsyncClient", + "JobsV1Beta3AsyncClient", + "MessagesV1Beta3AsyncClient", + "MetricsV1Beta3AsyncClient", + "SnapshotsV1Beta3AsyncClient", + "TemplatesServiceAsyncClient", + "AutoscalingAlgorithm", + "AutoscalingEvent", + "AutoscalingSettings", + "BigQueryIODetails", + "BigTableIODetails", + "CheckActiveJobsRequest", + "CheckActiveJobsResponse", + "ComputationTopology", + "ContainerSpec", + "CreateJobFromTemplateRequest", + "CreateJobRequest", + "CustomSourceLocation", + "DataDiskAssignment", + "DatastoreIODetails", + "DebugOptions", + "DefaultPackageSet", + "DeleteSnapshotRequest", + "DeleteSnapshotResponse", + "Disk", + "DisplayData", + "DynamicTemplateLaunchParams", + "Environment", + "ExecutionStageState", + "ExecutionStageSummary", + "ExecutionState", + "FailedLocation", + "FileIODetails", + "FlexResourceSchedulingGoal", + "FlexTemplateRuntimeEnvironment", + "FlexTemplatesServiceClient", + "GetJobExecutionDetailsRequest", + "GetJobMetricsRequest", + "GetJobRequest", + "GetSnapshotRequest", + "GetStageExecutionDetailsRequest", + "GetTemplateRequest", + "GetTemplateResponse", + "InvalidTemplateParameters", + "Job", + "JobExecutionDetails", + "JobExecutionInfo", + "JobExecutionStageInfo", + "JobMessage", + "JobMessageImportance", + "JobMetadata", + "JobMetrics", + "JobState", + "JobType", + "JobView", + "JobsV1Beta3Client", + "KeyRangeDataDiskAssignment", + "KeyRangeLocation", + "KindType", + "LaunchFlexTemplateParameter", + "LaunchFlexTemplateRequest", + "LaunchFlexTemplateResponse", + "LaunchTemplateParameters", + "LaunchTemplateRequest", + "LaunchTemplateResponse", + "ListJobMessagesRequest", + "ListJobMessagesResponse", + "ListJobsRequest", + "ListJobsResponse", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "MessagesV1Beta3Client", + "MetricStructuredName", + "MetricUpdate", + "MetricsV1Beta3Client", + "MountedDataDisk", + "Package", + "ParameterMetadata", + "ParameterType", + "PipelineDescription", + "ProgressTimeseries", + "PubSubIODetails", + "PubsubLocation", + "PubsubSnapshotMetadata", + "RuntimeEnvironment", + "RuntimeMetadata", + "SDKInfo", + "SdkHarnessContainerImage", + "SdkVersion", + "ShuffleMode", + "Snapshot", + "SnapshotJobRequest", + "SnapshotState", + "SnapshotsV1Beta3Client", + "SpannerIODetails", + "StageExecutionDetails", + "StageSummary", + "StateFamilyConfig", + "Step", + "StreamLocation", + "StreamingApplianceSnapshotConfig", + "StreamingComputationRanges", + "StreamingSideInputLocation", + "StreamingStageLocation", + "StructuredMessage", + "TaskRunnerSettings", + "TeardownPolicy", + "TemplateMetadata", + "TemplatesServiceClient", + "TopologyConfig", + "TransformSummary", + "UpdateJobRequest", + "WorkItemDetails", + "WorkerDetails", + "WorkerIPAddressConfiguration", + "WorkerPool", + "WorkerSettings", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_metadata.json b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_metadata.json new file mode 100644 index 000000000000..8d172722c457 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_metadata.json @@ -0,0 +1,273 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataflow_v1beta3", + "protoPackage": "google.dataflow.v1beta3", + "schema": "1.0", + "services": { + "FlexTemplatesService": { + "clients": { + "grpc": { + "libraryClient": "FlexTemplatesServiceClient", + "rpcs": { + "LaunchFlexTemplate": { + "methods": [ + "launch_flex_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FlexTemplatesServiceAsyncClient", + "rpcs": { + "LaunchFlexTemplate": { + "methods": [ + "launch_flex_template" + ] + } + } + } + } + }, + "JobsV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "JobsV1Beta3Client", + "rpcs": { + "AggregatedListJobs": { + "methods": [ + "aggregated_list_jobs" + ] + }, + "CheckActiveJobs": { + "methods": [ + "check_active_jobs" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SnapshotJob": { + "methods": [ + "snapshot_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "JobsV1Beta3AsyncClient", + "rpcs": { + "AggregatedListJobs": { + "methods": [ + "aggregated_list_jobs" + ] + }, + "CheckActiveJobs": { + "methods": [ + "check_active_jobs" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SnapshotJob": { + "methods": [ + "snapshot_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + } + } + }, + "MessagesV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "MessagesV1Beta3Client", + "rpcs": { + "ListJobMessages": { + "methods": [ + "list_job_messages" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MessagesV1Beta3AsyncClient", + "rpcs": { + "ListJobMessages": { + "methods": [ + "list_job_messages" + ] + } + } + } + } + }, + "MetricsV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "MetricsV1Beta3Client", + "rpcs": { + "GetJobExecutionDetails": { + "methods": [ + "get_job_execution_details" + ] + }, + "GetJobMetrics": { + "methods": [ + "get_job_metrics" + ] + }, + "GetStageExecutionDetails": { + "methods": [ + "get_stage_execution_details" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricsV1Beta3AsyncClient", + "rpcs": { + "GetJobExecutionDetails": { + "methods": [ + "get_job_execution_details" + ] + }, + "GetJobMetrics": { + "methods": [ + "get_job_metrics" + ] + }, + "GetStageExecutionDetails": { + "methods": [ + "get_stage_execution_details" + ] + } + } + } + } + }, + "SnapshotsV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "SnapshotsV1Beta3Client", + "rpcs": { + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SnapshotsV1Beta3AsyncClient", + "rpcs": { + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + } + } + } + } + }, + "TemplatesService": { + "clients": { + "grpc": { + "libraryClient": "TemplatesServiceClient", + "rpcs": { + "CreateJobFromTemplate": { + "methods": [ + "create_job_from_template" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "LaunchTemplate": { + "methods": [ + "launch_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TemplatesServiceAsyncClient", + "rpcs": { + "CreateJobFromTemplate": { + "methods": [ + "create_job_from_template" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "LaunchTemplate": { + "methods": [ + "launch_template" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/py.typed b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/py.typed new file mode 100644 index 000000000000..6d4450bfca06 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataflow package uses inline types. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py new file mode 100644 index 000000000000..afd8353acdb4 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import FlexTemplatesServiceClient +from .async_client import FlexTemplatesServiceAsyncClient + +__all__ = ( + "FlexTemplatesServiceClient", + "FlexTemplatesServiceAsyncClient", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py new file mode 100644 index 000000000000..df642525e788 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .transports.base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .client import FlexTemplatesServiceClient + + +class FlexTemplatesServiceAsyncClient: + """Provides a service for Flex templates. This feature is not + ready yet. + """ + + _client: FlexTemplatesServiceClient + + DEFAULT_ENDPOINT = FlexTemplatesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FlexTemplatesServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + FlexTemplatesServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + FlexTemplatesServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(FlexTemplatesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + FlexTemplatesServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + FlexTemplatesServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + FlexTemplatesServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(FlexTemplatesServiceClient.common_project_path) + parse_common_project_path = staticmethod( + FlexTemplatesServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(FlexTemplatesServiceClient.common_location_path) + parse_common_location_path = staticmethod( + FlexTemplatesServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceAsyncClient: The constructed client. + """ + return FlexTemplatesServiceClient.from_service_account_info.__func__(FlexTemplatesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceAsyncClient: The constructed client. + """ + return FlexTemplatesServiceClient.from_service_account_file.__func__(FlexTemplatesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FlexTemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FlexTemplatesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(FlexTemplatesServiceClient).get_transport_class, + type(FlexTemplatesServiceClient), + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, FlexTemplatesServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the flex templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.FlexTemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FlexTemplatesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def launch_flex_template( + self, + request: templates.LaunchFlexTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchFlexTemplateResponse: + r"""Launch a job with a FlexTemplate. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest`): + The request object. A request to launch a Cloud Dataflow + job from a FlexTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse: + Response to the request to launch a + job from Flex Template. + + """ + # Create or coerce a protobuf request object. + request = templates.LaunchFlexTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.launch_flex_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("FlexTemplatesServiceAsyncClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py new file mode 100644 index 000000000000..9df242a7f09f --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .transports.base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import FlexTemplatesServiceGrpcTransport +from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport + + +class FlexTemplatesServiceClientMeta(type): + """Metaclass for the FlexTemplatesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[FlexTemplatesServiceTransport]] + _transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[FlexTemplatesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FlexTemplatesServiceClient(metaclass=FlexTemplatesServiceClientMeta): + """Provides a service for Flex templates. This feature is not + ready yet. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FlexTemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FlexTemplatesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, FlexTemplatesServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the flex templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, FlexTemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, FlexTemplatesServiceTransport): + # transport is a FlexTemplatesServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def launch_flex_template( + self, + request: templates.LaunchFlexTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchFlexTemplateResponse: + r"""Launch a job with a FlexTemplate. + + Args: + request (google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest): + The request object. A request to launch a Cloud Dataflow + job from a FlexTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse: + Response to the request to launch a + job from Flex Template. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.LaunchFlexTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.LaunchFlexTemplateRequest): + request = templates.LaunchFlexTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.launch_flex_template] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("FlexTemplatesServiceClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py new file mode 100644 index 000000000000..2d03f11e2b8e --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FlexTemplatesServiceTransport +from .grpc import FlexTemplatesServiceGrpcTransport +from .grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[FlexTemplatesServiceTransport]] +_transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport +_transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport + +__all__ = ( + "FlexTemplatesServiceTransport", + "FlexTemplatesServiceGrpcTransport", + "FlexTemplatesServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py new file mode 100644 index 000000000000..04a39bc364ce --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import templates + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class FlexTemplatesServiceTransport(abc.ABC): + """Abstract transport class for FlexTemplatesService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/userinfo.email", + ) + + DEFAULT_HOST: str = "dataflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.launch_flex_template: gapic_v1.method.wrap_method( + self.launch_flex_template, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def launch_flex_template( + self, + ) -> Callable[ + [templates.LaunchFlexTemplateRequest], + Union[ + templates.LaunchFlexTemplateResponse, + Awaitable[templates.LaunchFlexTemplateResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("FlexTemplatesServiceTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py new file mode 100644 index 000000000000..2a3e611569ab --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import templates +from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO + + +class FlexTemplatesServiceGrpcTransport(FlexTemplatesServiceTransport): + """gRPC backend transport for FlexTemplatesService. + + Provides a service for Flex templates. This feature is not + ready yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def launch_flex_template( + self, + ) -> Callable[ + [templates.LaunchFlexTemplateRequest], templates.LaunchFlexTemplateResponse + ]: + r"""Return a callable for the launch flex template method over gRPC. + + Launch a job with a FlexTemplate. + + Returns: + Callable[[~.LaunchFlexTemplateRequest], + ~.LaunchFlexTemplateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "launch_flex_template" not in self._stubs: + self._stubs["launch_flex_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.FlexTemplatesService/LaunchFlexTemplate", + request_serializer=templates.LaunchFlexTemplateRequest.serialize, + response_deserializer=templates.LaunchFlexTemplateResponse.deserialize, + ) + return self._stubs["launch_flex_template"] + + +__all__ = ("FlexTemplatesServiceGrpcTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..5bd684509336 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py @@ -0,0 +1,259 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import templates +from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import FlexTemplatesServiceGrpcTransport + + +class FlexTemplatesServiceGrpcAsyncIOTransport(FlexTemplatesServiceTransport): + """gRPC AsyncIO backend transport for FlexTemplatesService. + + Provides a service for Flex templates. This feature is not + ready yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def launch_flex_template( + self, + ) -> Callable[ + [templates.LaunchFlexTemplateRequest], + Awaitable[templates.LaunchFlexTemplateResponse], + ]: + r"""Return a callable for the launch flex template method over gRPC. + + Launch a job with a FlexTemplate. + + Returns: + Callable[[~.LaunchFlexTemplateRequest], + Awaitable[~.LaunchFlexTemplateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "launch_flex_template" not in self._stubs: + self._stubs["launch_flex_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.FlexTemplatesService/LaunchFlexTemplate", + request_serializer=templates.LaunchFlexTemplateRequest.serialize, + response_deserializer=templates.LaunchFlexTemplateResponse.deserialize, + ) + return self._stubs["launch_flex_template"] + + +__all__ = ("FlexTemplatesServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py new file mode 100644 index 000000000000..92101f6f4385 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import JobsV1Beta3Client +from .async_client import JobsV1Beta3AsyncClient + +__all__ = ( + "JobsV1Beta3Client", + "JobsV1Beta3AsyncClient", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py new file mode 100644 index 000000000000..21508dd8f85b --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py @@ -0,0 +1,529 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .client import JobsV1Beta3Client + + +class JobsV1Beta3AsyncClient: + """Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + """ + + _client: JobsV1Beta3Client + + DEFAULT_ENDPOINT = JobsV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = JobsV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + JobsV1Beta3Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + JobsV1Beta3Client.parse_common_billing_account_path + ) + common_folder_path = staticmethod(JobsV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod(JobsV1Beta3Client.parse_common_folder_path) + common_organization_path = staticmethod(JobsV1Beta3Client.common_organization_path) + parse_common_organization_path = staticmethod( + JobsV1Beta3Client.parse_common_organization_path + ) + common_project_path = staticmethod(JobsV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod( + JobsV1Beta3Client.parse_common_project_path + ) + common_location_path = staticmethod(JobsV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod( + JobsV1Beta3Client.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3AsyncClient: The constructed client. + """ + return JobsV1Beta3Client.from_service_account_info.__func__(JobsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3AsyncClient: The constructed client. + """ + return JobsV1Beta3Client.from_service_account_file.__func__(JobsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> JobsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + JobsV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(JobsV1Beta3Client).get_transport_class, type(JobsV1Beta3Client) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, JobsV1Beta3Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the jobs v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.JobsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = JobsV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_job( + self, + request: jobs.CreateJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.CreateJobRequest`): + The request object. Request to create a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + request = jobs.CreateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_job( + self, + request: jobs.GetJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.GetJobRequest`): + The request object. Request to get the state of a Cloud + Dataflow job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + request = jobs.GetJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_job( + self, + request: jobs.UpdateJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.UpdateJobRequest`): + The request object. Request to update a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + request = jobs.UpdateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_jobs( + self, + request: jobs.ListJobsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.ListJobsRequest`): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def aggregated_list_jobs( + self, + request: jobs.ListJobsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListJobsAsyncPager: + r"""List the jobs of a project across all regions. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.ListJobsRequest`): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsAsyncPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.aggregated_list_jobs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AggregatedListJobsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def check_active_jobs( + self, + request: jobs.CheckActiveJobsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.CheckActiveJobsResponse: + r"""Check for existence of active jobs in the given + project across all regions. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest`): + The request object. Request to check is active jobs + exists for a project + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse: + Response for CheckActiveJobsRequest. + """ + # Create or coerce a protobuf request object. + request = jobs.CheckActiveJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.check_active_jobs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def snapshot_job( + self, + request: jobs.SnapshotJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Snapshot the state of a streaming job. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.SnapshotJobRequest`): + The request object. Request to create a snapshot of a + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + request = jobs.SnapshotJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.snapshot_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("JobsV1Beta3AsyncClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py new file mode 100644 index 000000000000..5a0e9f0228d2 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -0,0 +1,709 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import JobsV1Beta3GrpcTransport +from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport + + +class JobsV1Beta3ClientMeta(type): + """Metaclass for the JobsV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] + _transport_registry["grpc"] = JobsV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[JobsV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class JobsV1Beta3Client(metaclass=JobsV1Beta3ClientMeta): + """Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> JobsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + JobsV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, JobsV1Beta3Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the jobs v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, JobsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, JobsV1Beta3Transport): + # transport is a JobsV1Beta3Transport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_job( + self, + request: jobs.CreateJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + Args: + request (google.cloud.dataflow_v1beta3.types.CreateJobRequest): + The request object. Request to create a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.CreateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.CreateJobRequest): + request = jobs.CreateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_job( + self, + request: jobs.GetJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + Args: + request (google.cloud.dataflow_v1beta3.types.GetJobRequest): + The request object. Request to get the state of a Cloud + Dataflow job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.GetJobRequest): + request = jobs.GetJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_job( + self, + request: jobs.UpdateJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + Args: + request (google.cloud.dataflow_v1beta3.types.UpdateJobRequest): + The request object. Request to update a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.UpdateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.UpdateJobRequest): + request = jobs.UpdateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_jobs( + self, + request: jobs.ListJobsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + Args: + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.ListJobsRequest): + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def aggregated_list_jobs( + self, + request: jobs.ListJobsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListJobsPager: + r"""List the jobs of a project across all regions. + + Args: + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.ListJobsRequest): + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list_jobs] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListJobsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def check_active_jobs( + self, + request: jobs.CheckActiveJobsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.CheckActiveJobsResponse: + r"""Check for existence of active jobs in the given + project across all regions. + + Args: + request (google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest): + The request object. Request to check is active jobs + exists for a project + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse: + Response for CheckActiveJobsRequest. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.CheckActiveJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.CheckActiveJobsRequest): + request = jobs.CheckActiveJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_active_jobs] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def snapshot_job( + self, + request: jobs.SnapshotJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Snapshot the state of a streaming job. + + Args: + request (google.cloud.dataflow_v1beta3.types.SnapshotJobRequest): + The request object. Request to create a snapshot of a + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.SnapshotJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.SnapshotJobRequest): + request = jobs.SnapshotJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.snapshot_job] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("JobsV1Beta3Client",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py new file mode 100644 index 000000000000..aff439e04274 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.dataflow_v1beta3.types import jobs + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., jobs.ListJobsResponse], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[jobs.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[jobs.ListJobsResponse]], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[jobs.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class AggregatedListJobsPager: + """A pager for iterating through ``aggregated_list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., jobs.ListJobsResponse], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[jobs.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class AggregatedListJobsAsyncPager: + """A pager for iterating through ``aggregated_list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AggregatedListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[jobs.ListJobsResponse]], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[jobs.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py new file mode 100644 index 000000000000..04a67835003a --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import JobsV1Beta3Transport +from .grpc import JobsV1Beta3GrpcTransport +from .grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] +_transport_registry["grpc"] = JobsV1Beta3GrpcTransport +_transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport + +__all__ = ( + "JobsV1Beta3Transport", + "JobsV1Beta3GrpcTransport", + "JobsV1Beta3GrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py new file mode 100644 index 000000000000..13637589236b --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class JobsV1Beta3Transport(abc.ABC): + """Abstract transport class for JobsV1Beta3.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/userinfo.email", + ) + + DEFAULT_HOST: str = "dataflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_job: gapic_v1.method.wrap_method( + self.create_job, default_timeout=None, client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, default_timeout=None, client_info=client_info, + ), + self.update_job: gapic_v1.method.wrap_method( + self.update_job, default_timeout=None, client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, default_timeout=None, client_info=client_info, + ), + self.aggregated_list_jobs: gapic_v1.method.wrap_method( + self.aggregated_list_jobs, + default_timeout=None, + client_info=client_info, + ), + self.check_active_jobs: gapic_v1.method.wrap_method( + self.check_active_jobs, default_timeout=None, client_info=client_info, + ), + self.snapshot_job: gapic_v1.method.wrap_method( + self.snapshot_job, default_timeout=None, client_info=client_info, + ), + } + + @property + def create_job( + self, + ) -> Callable[[jobs.CreateJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: + raise NotImplementedError() + + @property + def get_job( + self, + ) -> Callable[[jobs.GetJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: + raise NotImplementedError() + + @property + def update_job( + self, + ) -> Callable[[jobs.UpdateJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: + raise NotImplementedError() + + @property + def list_jobs( + self, + ) -> Callable[ + [jobs.ListJobsRequest], + Union[jobs.ListJobsResponse, Awaitable[jobs.ListJobsResponse]], + ]: + raise NotImplementedError() + + @property + def aggregated_list_jobs( + self, + ) -> Callable[ + [jobs.ListJobsRequest], + Union[jobs.ListJobsResponse, Awaitable[jobs.ListJobsResponse]], + ]: + raise NotImplementedError() + + @property + def check_active_jobs( + self, + ) -> Callable[ + [jobs.CheckActiveJobsRequest], + Union[jobs.CheckActiveJobsResponse, Awaitable[jobs.CheckActiveJobsResponse]], + ]: + raise NotImplementedError() + + @property + def snapshot_job( + self, + ) -> Callable[ + [jobs.SnapshotJobRequest], + Union[snapshots.Snapshot, Awaitable[snapshots.Snapshot]], + ]: + raise NotImplementedError() + + +__all__ = ("JobsV1Beta3Transport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py new file mode 100644 index 000000000000..1d1be29b6e5c --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py @@ -0,0 +1,429 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class JobsV1Beta3GrpcTransport(JobsV1Beta3Transport): + """gRPC backend transport for JobsV1Beta3. + + Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_job(self) -> Callable[[jobs.CreateJobRequest], jobs.Job]: + r"""Return a callable for the create job method over gRPC. + + Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + Returns: + Callable[[~.CreateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job" not in self._stubs: + self._stubs["create_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/CreateJob", + request_serializer=jobs.CreateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["create_job"] + + @property + def get_job(self) -> Callable[[jobs.GetJobRequest], jobs.Job]: + r"""Return a callable for the get job method over gRPC. + + Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/GetJob", + request_serializer=jobs.GetJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def update_job(self) -> Callable[[jobs.UpdateJobRequest], jobs.Job]: + r"""Return a callable for the update job method over gRPC. + + Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.UpdateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/UpdateJob", + request_serializer=jobs.UpdateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def list_jobs(self) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/ListJobs", + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def aggregated_list_jobs( + self, + ) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + r"""Return a callable for the aggregated list jobs method over gRPC. + + List the jobs of a project across all regions. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "aggregated_list_jobs" not in self._stubs: + self._stubs["aggregated_list_jobs"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/AggregatedListJobs", + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs["aggregated_list_jobs"] + + @property + def check_active_jobs( + self, + ) -> Callable[[jobs.CheckActiveJobsRequest], jobs.CheckActiveJobsResponse]: + r"""Return a callable for the check active jobs method over gRPC. + + Check for existence of active jobs in the given + project across all regions. + + Returns: + Callable[[~.CheckActiveJobsRequest], + ~.CheckActiveJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_active_jobs" not in self._stubs: + self._stubs["check_active_jobs"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/CheckActiveJobs", + request_serializer=jobs.CheckActiveJobsRequest.serialize, + response_deserializer=jobs.CheckActiveJobsResponse.deserialize, + ) + return self._stubs["check_active_jobs"] + + @property + def snapshot_job(self) -> Callable[[jobs.SnapshotJobRequest], snapshots.Snapshot]: + r"""Return a callable for the snapshot job method over gRPC. + + Snapshot the state of a streaming job. + + Returns: + Callable[[~.SnapshotJobRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "snapshot_job" not in self._stubs: + self._stubs["snapshot_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/SnapshotJob", + request_serializer=jobs.SnapshotJobRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs["snapshot_job"] + + +__all__ = ("JobsV1Beta3GrpcTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e559b89fafae --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import JobsV1Beta3GrpcTransport + + +class JobsV1Beta3GrpcAsyncIOTransport(JobsV1Beta3Transport): + """gRPC AsyncIO backend transport for JobsV1Beta3. + + Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_job(self) -> Callable[[jobs.CreateJobRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the create job method over gRPC. + + Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job" not in self._stubs: + self._stubs["create_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/CreateJob", + request_serializer=jobs.CreateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["create_job"] + + @property + def get_job(self) -> Callable[[jobs.GetJobRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the get job method over gRPC. + + Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/GetJob", + request_serializer=jobs.GetJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def update_job(self) -> Callable[[jobs.UpdateJobRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the update job method over gRPC. + + Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.UpdateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/UpdateJob", + request_serializer=jobs.UpdateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def list_jobs( + self, + ) -> Callable[[jobs.ListJobsRequest], Awaitable[jobs.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/ListJobs", + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def aggregated_list_jobs( + self, + ) -> Callable[[jobs.ListJobsRequest], Awaitable[jobs.ListJobsResponse]]: + r"""Return a callable for the aggregated list jobs method over gRPC. + + List the jobs of a project across all regions. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "aggregated_list_jobs" not in self._stubs: + self._stubs["aggregated_list_jobs"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/AggregatedListJobs", + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs["aggregated_list_jobs"] + + @property + def check_active_jobs( + self, + ) -> Callable[ + [jobs.CheckActiveJobsRequest], Awaitable[jobs.CheckActiveJobsResponse] + ]: + r"""Return a callable for the check active jobs method over gRPC. + + Check for existence of active jobs in the given + project across all regions. + + Returns: + Callable[[~.CheckActiveJobsRequest], + Awaitable[~.CheckActiveJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_active_jobs" not in self._stubs: + self._stubs["check_active_jobs"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/CheckActiveJobs", + request_serializer=jobs.CheckActiveJobsRequest.serialize, + response_deserializer=jobs.CheckActiveJobsResponse.deserialize, + ) + return self._stubs["check_active_jobs"] + + @property + def snapshot_job( + self, + ) -> Callable[[jobs.SnapshotJobRequest], Awaitable[snapshots.Snapshot]]: + r"""Return a callable for the snapshot job method over gRPC. + + Snapshot the state of a streaming job. + + Returns: + Callable[[~.SnapshotJobRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "snapshot_job" not in self._stubs: + self._stubs["snapshot_job"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.JobsV1Beta3/SnapshotJob", + request_serializer=jobs.SnapshotJobRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs["snapshot_job"] + + +__all__ = ("JobsV1Beta3GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py new file mode 100644 index 000000000000..a4018f1bc019 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MessagesV1Beta3Client +from .async_client import MessagesV1Beta3AsyncClient + +__all__ = ( + "MessagesV1Beta3Client", + "MessagesV1Beta3AsyncClient", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py new file mode 100644 index 000000000000..7897798f0f13 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import messages +from .transports.base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .client import MessagesV1Beta3Client + + +class MessagesV1Beta3AsyncClient: + """The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + """ + + _client: MessagesV1Beta3Client + + DEFAULT_ENDPOINT = MessagesV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MessagesV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + MessagesV1Beta3Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MessagesV1Beta3Client.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MessagesV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod( + MessagesV1Beta3Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + MessagesV1Beta3Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + MessagesV1Beta3Client.parse_common_organization_path + ) + common_project_path = staticmethod(MessagesV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod( + MessagesV1Beta3Client.parse_common_project_path + ) + common_location_path = staticmethod(MessagesV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod( + MessagesV1Beta3Client.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3AsyncClient: The constructed client. + """ + return MessagesV1Beta3Client.from_service_account_info.__func__(MessagesV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3AsyncClient: The constructed client. + """ + return MessagesV1Beta3Client.from_service_account_file.__func__(MessagesV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MessagesV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MessagesV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MessagesV1Beta3Client).get_transport_class, type(MessagesV1Beta3Client) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, MessagesV1Beta3Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the messages v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MessagesV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MessagesV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_job_messages( + self, + request: messages.ListJobMessagesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobMessagesAsyncPager: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest`): + The request object. Request to list job messages. + Up to max_results messages will be returned in the time + range specified starting with the oldest messages first. + If no time range is specified the results with start + with the oldest message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager: + Response to a request to list job + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = messages.ListJobMessagesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_messages, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobMessagesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("MessagesV1Beta3AsyncClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py new file mode 100644 index 000000000000..37852401dd8f --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -0,0 +1,408 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import messages +from .transports.base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MessagesV1Beta3GrpcTransport +from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport + + +class MessagesV1Beta3ClientMeta(type): + """Metaclass for the MessagesV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MessagesV1Beta3Transport]] + _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[MessagesV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MessagesV1Beta3Client(metaclass=MessagesV1Beta3ClientMeta): + """The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MessagesV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MessagesV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MessagesV1Beta3Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the messages v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MessagesV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MessagesV1Beta3Transport): + # transport is a MessagesV1Beta3Transport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_job_messages( + self, + request: messages.ListJobMessagesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobMessagesPager: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Args: + request (google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest): + The request object. Request to list job messages. + Up to max_results messages will be returned in the time + range specified starting with the oldest messages first. + If no time range is specified the results with start + with the oldest message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager: + Response to a request to list job + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a messages.ListJobMessagesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, messages.ListJobMessagesRequest): + request = messages.ListJobMessagesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_messages] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobMessagesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("MessagesV1Beta3Client",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py new file mode 100644 index 000000000000..681c4e9870dc --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.dataflow_v1beta3.types import messages + + +class ListJobMessagesPager: + """A pager for iterating through ``list_job_messages`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_messages`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobMessages`` requests and continue to iterate + through the ``job_messages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., messages.ListJobMessagesResponse], + request: messages.ListJobMessagesRequest, + response: messages.ListJobMessagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = messages.ListJobMessagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[messages.ListJobMessagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[messages.JobMessage]: + for page in self.pages: + yield from page.job_messages + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobMessagesAsyncPager: + """A pager for iterating through ``list_job_messages`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_messages`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobMessages`` requests and continue to iterate + through the ``job_messages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[messages.ListJobMessagesResponse]], + request: messages.ListJobMessagesRequest, + response: messages.ListJobMessagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = messages.ListJobMessagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[messages.ListJobMessagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[messages.JobMessage]: + async def async_generator(): + async for page in self.pages: + for response in page.job_messages: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py new file mode 100644 index 000000000000..f8cb3f74224c --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MessagesV1Beta3Transport +from .grpc import MessagesV1Beta3GrpcTransport +from .grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MessagesV1Beta3Transport]] +_transport_registry["grpc"] = MessagesV1Beta3GrpcTransport +_transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport + +__all__ = ( + "MessagesV1Beta3Transport", + "MessagesV1Beta3GrpcTransport", + "MessagesV1Beta3GrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py new file mode 100644 index 000000000000..fc6e00bb25ec --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import messages + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class MessagesV1Beta3Transport(abc.ABC): + """Abstract transport class for MessagesV1Beta3.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/userinfo.email", + ) + + DEFAULT_HOST: str = "dataflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_job_messages: gapic_v1.method.wrap_method( + self.list_job_messages, default_timeout=None, client_info=client_info, + ), + } + + @property + def list_job_messages( + self, + ) -> Callable[ + [messages.ListJobMessagesRequest], + Union[ + messages.ListJobMessagesResponse, + Awaitable[messages.ListJobMessagesResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("MessagesV1Beta3Transport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py new file mode 100644 index 000000000000..0a8cf914fff3 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import messages +from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class MessagesV1Beta3GrpcTransport(MessagesV1Beta3Transport): + """gRPC backend transport for MessagesV1Beta3. + + The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_job_messages( + self, + ) -> Callable[[messages.ListJobMessagesRequest], messages.ListJobMessagesResponse]: + r"""Return a callable for the list job messages method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.ListJobMessagesRequest], + ~.ListJobMessagesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_job_messages" not in self._stubs: + self._stubs["list_job_messages"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MessagesV1Beta3/ListJobMessages", + request_serializer=messages.ListJobMessagesRequest.serialize, + response_deserializer=messages.ListJobMessagesResponse.deserialize, + ) + return self._stubs["list_job_messages"] + + +__all__ = ("MessagesV1Beta3GrpcTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 000000000000..ac98a09bfd09 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import messages +from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import MessagesV1Beta3GrpcTransport + + +class MessagesV1Beta3GrpcAsyncIOTransport(MessagesV1Beta3Transport): + """gRPC AsyncIO backend transport for MessagesV1Beta3. + + The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_job_messages( + self, + ) -> Callable[ + [messages.ListJobMessagesRequest], Awaitable[messages.ListJobMessagesResponse] + ]: + r"""Return a callable for the list job messages method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.ListJobMessagesRequest], + Awaitable[~.ListJobMessagesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_job_messages" not in self._stubs: + self._stubs["list_job_messages"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MessagesV1Beta3/ListJobMessages", + request_serializer=messages.ListJobMessagesRequest.serialize, + response_deserializer=messages.ListJobMessagesResponse.deserialize, + ) + return self._stubs["list_job_messages"] + + +__all__ = ("MessagesV1Beta3GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py new file mode 100644 index 000000000000..dc4ea9ebd29d --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MetricsV1Beta3Client +from .async_client import MetricsV1Beta3AsyncClient + +__all__ = ( + "MetricsV1Beta3Client", + "MetricsV1Beta3AsyncClient", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py new file mode 100644 index 000000000000..f665dddd02c0 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .client import MetricsV1Beta3Client + + +class MetricsV1Beta3AsyncClient: + """The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + """ + + _client: MetricsV1Beta3Client + + DEFAULT_ENDPOINT = MetricsV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetricsV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + MetricsV1Beta3Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MetricsV1Beta3Client.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MetricsV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod( + MetricsV1Beta3Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + MetricsV1Beta3Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + MetricsV1Beta3Client.parse_common_organization_path + ) + common_project_path = staticmethod(MetricsV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod( + MetricsV1Beta3Client.parse_common_project_path + ) + common_location_path = staticmethod(MetricsV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod( + MetricsV1Beta3Client.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3AsyncClient: The constructed client. + """ + return MetricsV1Beta3Client.from_service_account_info.__func__(MetricsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3AsyncClient: The constructed client. + """ + return MetricsV1Beta3Client.from_service_account_file.__func__(MetricsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MetricsV1Beta3Client).get_transport_class, type(MetricsV1Beta3Client) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, MetricsV1Beta3Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetricsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetricsV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_job_metrics( + self, + request: metrics.GetJobMetricsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobMetrics: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest`): + The request object. Request to get job metrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.JobMetrics: + JobMetrics contains a collection of + metrics describing the detailed progress + of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics + in the job. + + This resource captures only the most + recent values of each metric; time- + series data can be queried for them + (under the same metric names) from Cloud + Monitoring. + + """ + # Create or coerce a protobuf request object. + request = metrics.GetJobMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_metrics, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_job_execution_details( + self, + request: metrics.GetJobExecutionDetailsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetJobExecutionDetailsAsyncPager: + r"""Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest`): + The request object. Request to get job execution + details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager: + Information about the execution of a + job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = metrics.GetJobExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_execution_details, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.GetJobExecutionDetailsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stage_execution_details( + self, + request: metrics.GetStageExecutionDetailsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetStageExecutionDetailsAsyncPager: + r"""Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest`): + The request object. Request to get information about a + particular execution stage of a job. Currently only + tracked for Batch jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager: + Information about the workers and + work items within a stage. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = metrics.GetStageExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stage_execution_details, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.GetStageExecutionDetailsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("MetricsV1Beta3AsyncClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py new file mode 100644 index 000000000000..0597bed173db --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -0,0 +1,523 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetricsV1Beta3GrpcTransport +from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport + + +class MetricsV1Beta3ClientMeta(type): + """Metaclass for the MetricsV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MetricsV1Beta3Transport]] + _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[MetricsV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetricsV1Beta3Client(metaclass=MetricsV1Beta3ClientMeta): + """The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MetricsV1Beta3Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MetricsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetricsV1Beta3Transport): + # transport is a MetricsV1Beta3Transport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def get_job_metrics( + self, + request: metrics.GetJobMetricsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobMetrics: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Args: + request (google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest): + The request object. Request to get job metrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.JobMetrics: + JobMetrics contains a collection of + metrics describing the detailed progress + of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics + in the job. + + This resource captures only the most + recent values of each metric; time- + series data can be queried for them + (under the same metric names) from Cloud + Monitoring. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metrics.GetJobMetricsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metrics.GetJobMetricsRequest): + request = metrics.GetJobMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_metrics] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_job_execution_details( + self, + request: metrics.GetJobExecutionDetailsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetJobExecutionDetailsPager: + r"""Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Args: + request (google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest): + The request object. Request to get job execution + details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager: + Information about the execution of a + job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metrics.GetJobExecutionDetailsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metrics.GetJobExecutionDetailsRequest): + request = metrics.GetJobExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_job_execution_details + ] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.GetJobExecutionDetailsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stage_execution_details( + self, + request: metrics.GetStageExecutionDetailsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetStageExecutionDetailsPager: + r"""Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Args: + request (google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest): + The request object. Request to get information about a + particular execution stage of a job. Currently only + tracked for Batch jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager: + Information about the workers and + work items within a stage. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metrics.GetStageExecutionDetailsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metrics.GetStageExecutionDetailsRequest): + request = metrics.GetStageExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_stage_execution_details + ] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.GetStageExecutionDetailsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("MetricsV1Beta3Client",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py new file mode 100644 index 000000000000..104291c4d35b --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.dataflow_v1beta3.types import metrics + + +class GetJobExecutionDetailsPager: + """A pager for iterating through ``get_job_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` object, and + provides an ``__iter__`` method to iterate through its + ``stages`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``GetJobExecutionDetails`` requests and continue to iterate + through the ``stages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metrics.JobExecutionDetails], + request: metrics.GetJobExecutionDetailsRequest, + response: metrics.JobExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.JobExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetJobExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metrics.JobExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[metrics.StageSummary]: + for page in self.pages: + yield from page.stages + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class GetJobExecutionDetailsAsyncPager: + """A pager for iterating through ``get_job_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` object, and + provides an ``__aiter__`` method to iterate through its + ``stages`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``GetJobExecutionDetails`` requests and continue to iterate + through the ``stages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metrics.JobExecutionDetails]], + request: metrics.GetJobExecutionDetailsRequest, + response: metrics.JobExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.JobExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetJobExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metrics.JobExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[metrics.StageSummary]: + async def async_generator(): + async for page in self.pages: + for response in page.stages: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class GetStageExecutionDetailsPager: + """A pager for iterating through ``get_stage_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` object, and + provides an ``__iter__`` method to iterate through its + ``workers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``GetStageExecutionDetails`` requests and continue to iterate + through the ``workers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metrics.StageExecutionDetails], + request: metrics.GetStageExecutionDetailsRequest, + response: metrics.StageExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.StageExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetStageExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metrics.StageExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[metrics.WorkerDetails]: + for page in self.pages: + yield from page.workers + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class GetStageExecutionDetailsAsyncPager: + """A pager for iterating through ``get_stage_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` object, and + provides an ``__aiter__`` method to iterate through its + ``workers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``GetStageExecutionDetails`` requests and continue to iterate + through the ``workers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metrics.StageExecutionDetails]], + request: metrics.GetStageExecutionDetailsRequest, + response: metrics.StageExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.StageExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetStageExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metrics.StageExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[metrics.WorkerDetails]: + async def async_generator(): + async for page in self.pages: + for response in page.workers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py new file mode 100644 index 000000000000..94b291dd6117 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetricsV1Beta3Transport +from .grpc import MetricsV1Beta3GrpcTransport +from .grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsV1Beta3Transport]] +_transport_registry["grpc"] = MetricsV1Beta3GrpcTransport +_transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport + +__all__ = ( + "MetricsV1Beta3Transport", + "MetricsV1Beta3GrpcTransport", + "MetricsV1Beta3GrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py new file mode 100644 index 000000000000..2c9ca68459c1 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import metrics + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class MetricsV1Beta3Transport(abc.ABC): + """Abstract transport class for MetricsV1Beta3.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/userinfo.email", + ) + + DEFAULT_HOST: str = "dataflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_job_metrics: gapic_v1.method.wrap_method( + self.get_job_metrics, default_timeout=None, client_info=client_info, + ), + self.get_job_execution_details: gapic_v1.method.wrap_method( + self.get_job_execution_details, + default_timeout=None, + client_info=client_info, + ), + self.get_stage_execution_details: gapic_v1.method.wrap_method( + self.get_stage_execution_details, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def get_job_metrics( + self, + ) -> Callable[ + [metrics.GetJobMetricsRequest], + Union[metrics.JobMetrics, Awaitable[metrics.JobMetrics]], + ]: + raise NotImplementedError() + + @property + def get_job_execution_details( + self, + ) -> Callable[ + [metrics.GetJobExecutionDetailsRequest], + Union[metrics.JobExecutionDetails, Awaitable[metrics.JobExecutionDetails]], + ]: + raise NotImplementedError() + + @property + def get_stage_execution_details( + self, + ) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], + Union[metrics.StageExecutionDetails, Awaitable[metrics.StageExecutionDetails]], + ]: + raise NotImplementedError() + + +__all__ = ("MetricsV1Beta3Transport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py new file mode 100644 index 000000000000..ffad4c33602f --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py @@ -0,0 +1,322 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import metrics +from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class MetricsV1Beta3GrpcTransport(MetricsV1Beta3Transport): + """gRPC backend transport for MetricsV1Beta3. + + The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def get_job_metrics( + self, + ) -> Callable[[metrics.GetJobMetricsRequest], metrics.JobMetrics]: + r"""Return a callable for the get job metrics method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.GetJobMetricsRequest], + ~.JobMetrics]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_metrics" not in self._stubs: + self._stubs["get_job_metrics"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MetricsV1Beta3/GetJobMetrics", + request_serializer=metrics.GetJobMetricsRequest.serialize, + response_deserializer=metrics.JobMetrics.deserialize, + ) + return self._stubs["get_job_metrics"] + + @property + def get_job_execution_details( + self, + ) -> Callable[[metrics.GetJobExecutionDetailsRequest], metrics.JobExecutionDetails]: + r"""Return a callable for the get job execution details method over gRPC. + + Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetJobExecutionDetailsRequest], + ~.JobExecutionDetails]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_execution_details" not in self._stubs: + self._stubs["get_job_execution_details"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MetricsV1Beta3/GetJobExecutionDetails", + request_serializer=metrics.GetJobExecutionDetailsRequest.serialize, + response_deserializer=metrics.JobExecutionDetails.deserialize, + ) + return self._stubs["get_job_execution_details"] + + @property + def get_stage_execution_details( + self, + ) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], metrics.StageExecutionDetails + ]: + r"""Return a callable for the get stage execution details method over gRPC. + + Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetStageExecutionDetailsRequest], + ~.StageExecutionDetails]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_stage_execution_details" not in self._stubs: + self._stubs["get_stage_execution_details"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MetricsV1Beta3/GetStageExecutionDetails", + request_serializer=metrics.GetStageExecutionDetailsRequest.serialize, + response_deserializer=metrics.StageExecutionDetails.deserialize, + ) + return self._stubs["get_stage_execution_details"] + + +__all__ = ("MetricsV1Beta3GrpcTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2b69e91be9da --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import metrics +from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import MetricsV1Beta3GrpcTransport + + +class MetricsV1Beta3GrpcAsyncIOTransport(MetricsV1Beta3Transport): + """gRPC AsyncIO backend transport for MetricsV1Beta3. + + The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_job_metrics( + self, + ) -> Callable[[metrics.GetJobMetricsRequest], Awaitable[metrics.JobMetrics]]: + r"""Return a callable for the get job metrics method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.GetJobMetricsRequest], + Awaitable[~.JobMetrics]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_metrics" not in self._stubs: + self._stubs["get_job_metrics"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MetricsV1Beta3/GetJobMetrics", + request_serializer=metrics.GetJobMetricsRequest.serialize, + response_deserializer=metrics.JobMetrics.deserialize, + ) + return self._stubs["get_job_metrics"] + + @property + def get_job_execution_details( + self, + ) -> Callable[ + [metrics.GetJobExecutionDetailsRequest], Awaitable[metrics.JobExecutionDetails] + ]: + r"""Return a callable for the get job execution details method over gRPC. + + Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetJobExecutionDetailsRequest], + Awaitable[~.JobExecutionDetails]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_execution_details" not in self._stubs: + self._stubs["get_job_execution_details"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MetricsV1Beta3/GetJobExecutionDetails", + request_serializer=metrics.GetJobExecutionDetailsRequest.serialize, + response_deserializer=metrics.JobExecutionDetails.deserialize, + ) + return self._stubs["get_job_execution_details"] + + @property + def get_stage_execution_details( + self, + ) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], + Awaitable[metrics.StageExecutionDetails], + ]: + r"""Return a callable for the get stage execution details method over gRPC. + + Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetStageExecutionDetailsRequest], + Awaitable[~.StageExecutionDetails]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_stage_execution_details" not in self._stubs: + self._stubs["get_stage_execution_details"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.MetricsV1Beta3/GetStageExecutionDetails", + request_serializer=metrics.GetStageExecutionDetailsRequest.serialize, + response_deserializer=metrics.StageExecutionDetails.deserialize, + ) + return self._stubs["get_stage_execution_details"] + + +__all__ = ("MetricsV1Beta3GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py new file mode 100644 index 000000000000..23f0594f5163 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SnapshotsV1Beta3Client +from .async_client import SnapshotsV1Beta3AsyncClient + +__all__ = ( + "SnapshotsV1Beta3Client", + "SnapshotsV1Beta3AsyncClient", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py new file mode 100644 index 000000000000..9249d52bac24 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .client import SnapshotsV1Beta3Client + + +class SnapshotsV1Beta3AsyncClient: + """Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + """ + + _client: SnapshotsV1Beta3Client + + DEFAULT_ENDPOINT = SnapshotsV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SnapshotsV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + SnapshotsV1Beta3Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SnapshotsV1Beta3Client.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SnapshotsV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod( + SnapshotsV1Beta3Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + SnapshotsV1Beta3Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + SnapshotsV1Beta3Client.parse_common_organization_path + ) + common_project_path = staticmethod(SnapshotsV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod( + SnapshotsV1Beta3Client.parse_common_project_path + ) + common_location_path = staticmethod(SnapshotsV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod( + SnapshotsV1Beta3Client.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3AsyncClient: The constructed client. + """ + return SnapshotsV1Beta3Client.from_service_account_info.__func__(SnapshotsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3AsyncClient: The constructed client. + """ + return SnapshotsV1Beta3Client.from_service_account_file.__func__(SnapshotsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SnapshotsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + SnapshotsV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SnapshotsV1Beta3Client).get_transport_class, type(SnapshotsV1Beta3Client) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, SnapshotsV1Beta3Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the snapshots v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SnapshotsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SnapshotsV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_snapshot( + self, + request: snapshots.GetSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Gets information about a snapshot. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.GetSnapshotRequest`): + The request object. Request to get information about a + snapshot + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + request = snapshots.GetSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_snapshot( + self, + request: snapshots.DeleteSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.DeleteSnapshotResponse: + r"""Deletes a snapshot. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest`): + The request object. Request to delete a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse: + Response from deleting a snapshot. + """ + # Create or coerce a protobuf request object. + request = snapshots.DeleteSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_snapshots( + self, + request: snapshots.ListSnapshotsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.ListSnapshotsResponse: + r"""Lists snapshots. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest`): + The request object. Request to list snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse: + List of snapshots. + """ + # Create or coerce a protobuf request object. + request = snapshots.ListSnapshotsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_snapshots, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SnapshotsV1Beta3AsyncClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py new file mode 100644 index 000000000000..3358c934cd6f --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -0,0 +1,469 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import SnapshotsV1Beta3GrpcTransport +from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport + + +class SnapshotsV1Beta3ClientMeta(type): + """Metaclass for the SnapshotsV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SnapshotsV1Beta3Transport]] + _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[SnapshotsV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SnapshotsV1Beta3Client(metaclass=SnapshotsV1Beta3ClientMeta): + """Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SnapshotsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + SnapshotsV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SnapshotsV1Beta3Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the snapshots v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SnapshotsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SnapshotsV1Beta3Transport): + # transport is a SnapshotsV1Beta3Transport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def get_snapshot( + self, + request: snapshots.GetSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Gets information about a snapshot. + + Args: + request (google.cloud.dataflow_v1beta3.types.GetSnapshotRequest): + The request object. Request to get information about a + snapshot + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a snapshots.GetSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, snapshots.GetSnapshotRequest): + request = snapshots.GetSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_snapshot] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_snapshot( + self, + request: snapshots.DeleteSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.DeleteSnapshotResponse: + r"""Deletes a snapshot. + + Args: + request (google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest): + The request object. Request to delete a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse: + Response from deleting a snapshot. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a snapshots.DeleteSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, snapshots.DeleteSnapshotRequest): + request = snapshots.DeleteSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_snapshots( + self, + request: snapshots.ListSnapshotsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.ListSnapshotsResponse: + r"""Lists snapshots. + + Args: + request (google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest): + The request object. Request to list snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse: + List of snapshots. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a snapshots.ListSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, snapshots.ListSnapshotsRequest): + request = snapshots.ListSnapshotsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_snapshots] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SnapshotsV1Beta3Client",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py new file mode 100644 index 000000000000..d79a29536ffd --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SnapshotsV1Beta3Transport +from .grpc import SnapshotsV1Beta3GrpcTransport +from .grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsV1Beta3Transport]] +_transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport +_transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport + +__all__ = ( + "SnapshotsV1Beta3Transport", + "SnapshotsV1Beta3GrpcTransport", + "SnapshotsV1Beta3GrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py new file mode 100644 index 000000000000..fd55eb474483 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class SnapshotsV1Beta3Transport(abc.ABC): + """Abstract transport class for SnapshotsV1Beta3.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/userinfo.email", + ) + + DEFAULT_HOST: str = "dataflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_snapshot: gapic_v1.method.wrap_method( + self.get_snapshot, default_timeout=None, client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method.wrap_method( + self.delete_snapshot, default_timeout=None, client_info=client_info, + ), + self.list_snapshots: gapic_v1.method.wrap_method( + self.list_snapshots, default_timeout=None, client_info=client_info, + ), + } + + @property + def get_snapshot( + self, + ) -> Callable[ + [snapshots.GetSnapshotRequest], + Union[snapshots.Snapshot, Awaitable[snapshots.Snapshot]], + ]: + raise NotImplementedError() + + @property + def delete_snapshot( + self, + ) -> Callable[ + [snapshots.DeleteSnapshotRequest], + Union[ + snapshots.DeleteSnapshotResponse, + Awaitable[snapshots.DeleteSnapshotResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_snapshots( + self, + ) -> Callable[ + [snapshots.ListSnapshotsRequest], + Union[ + snapshots.ListSnapshotsResponse, Awaitable[snapshots.ListSnapshotsResponse] + ], + ]: + raise NotImplementedError() + + +__all__ = ("SnapshotsV1Beta3Transport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py new file mode 100644 index 000000000000..5a46c9559570 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class SnapshotsV1Beta3GrpcTransport(SnapshotsV1Beta3Transport): + """gRPC backend transport for SnapshotsV1Beta3. + + Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def get_snapshot( + self, + ) -> Callable[[snapshots.GetSnapshotRequest], snapshots.Snapshot]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets information about a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.SnapshotsV1Beta3/GetSnapshot", + request_serializer=snapshots.GetSnapshotRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def delete_snapshot( + self, + ) -> Callable[[snapshots.DeleteSnapshotRequest], snapshots.DeleteSnapshotResponse]: + r"""Return a callable for the delete snapshot method over gRPC. + + Deletes a snapshot. + + Returns: + Callable[[~.DeleteSnapshotRequest], + ~.DeleteSnapshotResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.SnapshotsV1Beta3/DeleteSnapshot", + request_serializer=snapshots.DeleteSnapshotRequest.serialize, + response_deserializer=snapshots.DeleteSnapshotResponse.deserialize, + ) + return self._stubs["delete_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[[snapshots.ListSnapshotsRequest], snapshots.ListSnapshotsResponse]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists snapshots. + + Returns: + Callable[[~.ListSnapshotsRequest], + ~.ListSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.SnapshotsV1Beta3/ListSnapshots", + request_serializer=snapshots.ListSnapshotsRequest.serialize, + response_deserializer=snapshots.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + +__all__ = ("SnapshotsV1Beta3GrpcTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4204a7631fe0 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,312 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import SnapshotsV1Beta3GrpcTransport + + +class SnapshotsV1Beta3GrpcAsyncIOTransport(SnapshotsV1Beta3Transport): + """gRPC AsyncIO backend transport for SnapshotsV1Beta3. + + Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_snapshot( + self, + ) -> Callable[[snapshots.GetSnapshotRequest], Awaitable[snapshots.Snapshot]]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets information about a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.SnapshotsV1Beta3/GetSnapshot", + request_serializer=snapshots.GetSnapshotRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def delete_snapshot( + self, + ) -> Callable[ + [snapshots.DeleteSnapshotRequest], Awaitable[snapshots.DeleteSnapshotResponse] + ]: + r"""Return a callable for the delete snapshot method over gRPC. + + Deletes a snapshot. + + Returns: + Callable[[~.DeleteSnapshotRequest], + Awaitable[~.DeleteSnapshotResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.SnapshotsV1Beta3/DeleteSnapshot", + request_serializer=snapshots.DeleteSnapshotRequest.serialize, + response_deserializer=snapshots.DeleteSnapshotResponse.deserialize, + ) + return self._stubs["delete_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[ + [snapshots.ListSnapshotsRequest], Awaitable[snapshots.ListSnapshotsResponse] + ]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists snapshots. + + Returns: + Callable[[~.ListSnapshotsRequest], + Awaitable[~.ListSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.SnapshotsV1Beta3/ListSnapshots", + request_serializer=snapshots.ListSnapshotsRequest.serialize, + response_deserializer=snapshots.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + +__all__ = ("SnapshotsV1Beta3GrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py new file mode 100644 index 000000000000..7cd41bf2e954 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TemplatesServiceClient +from .async_client import TemplatesServiceAsyncClient + +__all__ = ( + "TemplatesServiceClient", + "TemplatesServiceAsyncClient", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py new file mode 100644 index 000000000000..6f1fd28f953f --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .client import TemplatesServiceClient + + +class TemplatesServiceAsyncClient: + """Provides a method to create Cloud Dataflow jobs from + templates. + """ + + _client: TemplatesServiceClient + + DEFAULT_ENDPOINT = TemplatesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TemplatesServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + TemplatesServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TemplatesServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TemplatesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + TemplatesServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + TemplatesServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + TemplatesServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(TemplatesServiceClient.common_project_path) + parse_common_project_path = staticmethod( + TemplatesServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(TemplatesServiceClient.common_location_path) + parse_common_location_path = staticmethod( + TemplatesServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceAsyncClient: The constructed client. + """ + return TemplatesServiceClient.from_service_account_info.__func__(TemplatesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceAsyncClient: The constructed client. + """ + return TemplatesServiceClient.from_service_account_file.__func__(TemplatesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TemplatesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TemplatesServiceClient).get_transport_class, type(TemplatesServiceClient) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, TemplatesServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TemplatesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_job_from_template( + self, + request: templates.CreateJobFromTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job from a template. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest`): + The request object. A request to create a Cloud Dataflow + job from a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + request = templates.CreateJobFromTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_from_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def launch_template( + self, + request: templates.LaunchTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchTemplateResponse: + r"""Launch a template. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest`): + The request object. A request to launch a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse: + Response to the request to launch a + template. + + """ + # Create or coerce a protobuf request object. + request = templates.LaunchTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.launch_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_template( + self, + request: templates.GetTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.GetTemplateResponse: + r"""Get the template associated with a template. + + Args: + request (:class:`google.cloud.dataflow_v1beta3.types.GetTemplateRequest`): + The request object. A request to retrieve a Cloud + Dataflow job template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.GetTemplateResponse: + The response to a GetTemplate + request. + + """ + # Create or coerce a protobuf request object. + request = templates.GetTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("TemplatesServiceAsyncClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py new file mode 100644 index 000000000000..354a80ac6db0 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -0,0 +1,478 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import TemplatesServiceGrpcTransport +from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport + + +class TemplatesServiceClientMeta(type): + """Metaclass for the TemplatesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[TemplatesServiceTransport]] + _transport_registry["grpc"] = TemplatesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[TemplatesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TemplatesServiceClient(metaclass=TemplatesServiceClientMeta): + """Provides a method to create Cloud Dataflow jobs from + templates. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TemplatesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TemplatesServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TemplatesServiceTransport): + # transport is a TemplatesServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_job_from_template( + self, + request: templates.CreateJobFromTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job from a template. + + Args: + request (google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest): + The request object. A request to create a Cloud Dataflow + job from a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. nextID: 26 + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.CreateJobFromTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.CreateJobFromTemplateRequest): + request = templates.CreateJobFromTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_from_template] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def launch_template( + self, + request: templates.LaunchTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchTemplateResponse: + r"""Launch a template. + + Args: + request (google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest): + The request object. A request to launch a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse: + Response to the request to launch a + template. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.LaunchTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.LaunchTemplateRequest): + request = templates.LaunchTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.launch_template] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_template( + self, + request: templates.GetTemplateRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.GetTemplateResponse: + r"""Get the template associated with a template. + + Args: + request (google.cloud.dataflow_v1beta3.types.GetTemplateRequest): + The request object. A request to retrieve a Cloud + Dataflow job template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.GetTemplateResponse: + The response to a GetTemplate + request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.GetTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.GetTemplateRequest): + request = templates.GetTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_template] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("TemplatesServiceClient",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py new file mode 100644 index 000000000000..f4ff50c64722 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TemplatesServiceTransport +from .grpc import TemplatesServiceGrpcTransport +from .grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TemplatesServiceTransport]] +_transport_registry["grpc"] = TemplatesServiceGrpcTransport +_transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport + +__all__ = ( + "TemplatesServiceTransport", + "TemplatesServiceGrpcTransport", + "TemplatesServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py new file mode 100644 index 000000000000..73c2788e2a7d --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class TemplatesServiceTransport(abc.ABC): + """Abstract transport class for TemplatesService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/userinfo.email", + ) + + DEFAULT_HOST: str = "dataflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_job_from_template: gapic_v1.method.wrap_method( + self.create_job_from_template, + default_timeout=None, + client_info=client_info, + ), + self.launch_template: gapic_v1.method.wrap_method( + self.launch_template, default_timeout=None, client_info=client_info, + ), + self.get_template: gapic_v1.method.wrap_method( + self.get_template, default_timeout=None, client_info=client_info, + ), + } + + @property + def create_job_from_template( + self, + ) -> Callable[ + [templates.CreateJobFromTemplateRequest], Union[jobs.Job, Awaitable[jobs.Job]] + ]: + raise NotImplementedError() + + @property + def launch_template( + self, + ) -> Callable[ + [templates.LaunchTemplateRequest], + Union[ + templates.LaunchTemplateResponse, + Awaitable[templates.LaunchTemplateResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_template( + self, + ) -> Callable[ + [templates.GetTemplateRequest], + Union[templates.GetTemplateResponse, Awaitable[templates.GetTemplateResponse]], + ]: + raise NotImplementedError() + + +__all__ = ("TemplatesServiceTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py new file mode 100644 index 000000000000..406019880760 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO + + +class TemplatesServiceGrpcTransport(TemplatesServiceTransport): + """gRPC backend transport for TemplatesService. + + Provides a method to create Cloud Dataflow jobs from + templates. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_job_from_template( + self, + ) -> Callable[[templates.CreateJobFromTemplateRequest], jobs.Job]: + r"""Return a callable for the create job from template method over gRPC. + + Creates a Cloud Dataflow job from a template. + + Returns: + Callable[[~.CreateJobFromTemplateRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job_from_template" not in self._stubs: + self._stubs["create_job_from_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.TemplatesService/CreateJobFromTemplate", + request_serializer=templates.CreateJobFromTemplateRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["create_job_from_template"] + + @property + def launch_template( + self, + ) -> Callable[[templates.LaunchTemplateRequest], templates.LaunchTemplateResponse]: + r"""Return a callable for the launch template method over gRPC. + + Launch a template. + + Returns: + Callable[[~.LaunchTemplateRequest], + ~.LaunchTemplateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "launch_template" not in self._stubs: + self._stubs["launch_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.TemplatesService/LaunchTemplate", + request_serializer=templates.LaunchTemplateRequest.serialize, + response_deserializer=templates.LaunchTemplateResponse.deserialize, + ) + return self._stubs["launch_template"] + + @property + def get_template( + self, + ) -> Callable[[templates.GetTemplateRequest], templates.GetTemplateResponse]: + r"""Return a callable for the get template method over gRPC. + + Get the template associated with a template. + + Returns: + Callable[[~.GetTemplateRequest], + ~.GetTemplateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_template" not in self._stubs: + self._stubs["get_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.TemplatesService/GetTemplate", + request_serializer=templates.GetTemplateRequest.serialize, + response_deserializer=templates.GetTemplateResponse.deserialize, + ) + return self._stubs["get_template"] + + +__all__ = ("TemplatesServiceGrpcTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a974ba1c89be --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import TemplatesServiceGrpcTransport + + +class TemplatesServiceGrpcAsyncIOTransport(TemplatesServiceTransport): + """gRPC AsyncIO backend transport for TemplatesService. + + Provides a method to create Cloud Dataflow jobs from + templates. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_job_from_template( + self, + ) -> Callable[[templates.CreateJobFromTemplateRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the create job from template method over gRPC. + + Creates a Cloud Dataflow job from a template. + + Returns: + Callable[[~.CreateJobFromTemplateRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job_from_template" not in self._stubs: + self._stubs["create_job_from_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.TemplatesService/CreateJobFromTemplate", + request_serializer=templates.CreateJobFromTemplateRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["create_job_from_template"] + + @property + def launch_template( + self, + ) -> Callable[ + [templates.LaunchTemplateRequest], Awaitable[templates.LaunchTemplateResponse] + ]: + r"""Return a callable for the launch template method over gRPC. + + Launch a template. + + Returns: + Callable[[~.LaunchTemplateRequest], + Awaitable[~.LaunchTemplateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "launch_template" not in self._stubs: + self._stubs["launch_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.TemplatesService/LaunchTemplate", + request_serializer=templates.LaunchTemplateRequest.serialize, + response_deserializer=templates.LaunchTemplateResponse.deserialize, + ) + return self._stubs["launch_template"] + + @property + def get_template( + self, + ) -> Callable[ + [templates.GetTemplateRequest], Awaitable[templates.GetTemplateResponse] + ]: + r"""Return a callable for the get template method over gRPC. + + Get the template associated with a template. + + Returns: + Callable[[~.GetTemplateRequest], + Awaitable[~.GetTemplateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_template" not in self._stubs: + self._stubs["get_template"] = self.grpc_channel.unary_unary( + "/google.dataflow.v1beta3.TemplatesService/GetTemplate", + request_serializer=templates.GetTemplateRequest.serialize, + response_deserializer=templates.GetTemplateResponse.deserialize, + ) + return self._stubs["get_template"] + + +__all__ = ("TemplatesServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/__init__.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/__init__.py new file mode 100644 index 000000000000..569b19ddddbb --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/__init__.py @@ -0,0 +1,242 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .environment import ( + AutoscalingSettings, + DebugOptions, + Disk, + Environment, + Package, + SdkHarnessContainerImage, + TaskRunnerSettings, + WorkerPool, + WorkerSettings, + AutoscalingAlgorithm, + DefaultPackageSet, + FlexResourceSchedulingGoal, + JobType, + ShuffleMode, + TeardownPolicy, + WorkerIPAddressConfiguration, +) +from .jobs import ( + BigQueryIODetails, + BigTableIODetails, + CheckActiveJobsRequest, + CheckActiveJobsResponse, + CreateJobRequest, + DatastoreIODetails, + DisplayData, + ExecutionStageState, + ExecutionStageSummary, + FailedLocation, + FileIODetails, + GetJobRequest, + Job, + JobExecutionInfo, + JobExecutionStageInfo, + JobMetadata, + ListJobsRequest, + ListJobsResponse, + PipelineDescription, + PubSubIODetails, + SdkVersion, + SnapshotJobRequest, + SpannerIODetails, + Step, + TransformSummary, + UpdateJobRequest, + JobState, + JobView, + KindType, +) +from .messages import ( + AutoscalingEvent, + JobMessage, + ListJobMessagesRequest, + ListJobMessagesResponse, + StructuredMessage, + JobMessageImportance, +) +from .metrics import ( + GetJobExecutionDetailsRequest, + GetJobMetricsRequest, + GetStageExecutionDetailsRequest, + JobExecutionDetails, + JobMetrics, + MetricStructuredName, + MetricUpdate, + ProgressTimeseries, + StageExecutionDetails, + StageSummary, + WorkerDetails, + WorkItemDetails, + ExecutionState, +) +from .snapshots import ( + DeleteSnapshotRequest, + DeleteSnapshotResponse, + GetSnapshotRequest, + ListSnapshotsRequest, + ListSnapshotsResponse, + PubsubSnapshotMetadata, + Snapshot, + SnapshotState, +) +from .streaming import ( + ComputationTopology, + CustomSourceLocation, + DataDiskAssignment, + KeyRangeDataDiskAssignment, + KeyRangeLocation, + MountedDataDisk, + PubsubLocation, + StateFamilyConfig, + StreamingApplianceSnapshotConfig, + StreamingComputationRanges, + StreamingSideInputLocation, + StreamingStageLocation, + StreamLocation, + TopologyConfig, +) +from .templates import ( + ContainerSpec, + CreateJobFromTemplateRequest, + DynamicTemplateLaunchParams, + FlexTemplateRuntimeEnvironment, + GetTemplateRequest, + GetTemplateResponse, + InvalidTemplateParameters, + LaunchFlexTemplateParameter, + LaunchFlexTemplateRequest, + LaunchFlexTemplateResponse, + LaunchTemplateParameters, + LaunchTemplateRequest, + LaunchTemplateResponse, + ParameterMetadata, + RuntimeEnvironment, + RuntimeMetadata, + SDKInfo, + TemplateMetadata, + ParameterType, +) + +__all__ = ( + "AutoscalingSettings", + "DebugOptions", + "Disk", + "Environment", + "Package", + "SdkHarnessContainerImage", + "TaskRunnerSettings", + "WorkerPool", + "WorkerSettings", + "AutoscalingAlgorithm", + "DefaultPackageSet", + "FlexResourceSchedulingGoal", + "JobType", + "ShuffleMode", + "TeardownPolicy", + "WorkerIPAddressConfiguration", + "BigQueryIODetails", + "BigTableIODetails", + "CheckActiveJobsRequest", + "CheckActiveJobsResponse", + "CreateJobRequest", + "DatastoreIODetails", + "DisplayData", + "ExecutionStageState", + "ExecutionStageSummary", + "FailedLocation", + "FileIODetails", + "GetJobRequest", + "Job", + "JobExecutionInfo", + "JobExecutionStageInfo", + "JobMetadata", + "ListJobsRequest", + "ListJobsResponse", + "PipelineDescription", + "PubSubIODetails", + "SdkVersion", + "SnapshotJobRequest", + "SpannerIODetails", + "Step", + "TransformSummary", + "UpdateJobRequest", + "JobState", + "JobView", + "KindType", + "AutoscalingEvent", + "JobMessage", + "ListJobMessagesRequest", + "ListJobMessagesResponse", + "StructuredMessage", + "JobMessageImportance", + "GetJobExecutionDetailsRequest", + "GetJobMetricsRequest", + "GetStageExecutionDetailsRequest", + "JobExecutionDetails", + "JobMetrics", + "MetricStructuredName", + "MetricUpdate", + "ProgressTimeseries", + "StageExecutionDetails", + "StageSummary", + "WorkerDetails", + "WorkItemDetails", + "ExecutionState", + "DeleteSnapshotRequest", + "DeleteSnapshotResponse", + "GetSnapshotRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "PubsubSnapshotMetadata", + "Snapshot", + "SnapshotState", + "ComputationTopology", + "CustomSourceLocation", + "DataDiskAssignment", + "KeyRangeDataDiskAssignment", + "KeyRangeLocation", + "MountedDataDisk", + "PubsubLocation", + "StateFamilyConfig", + "StreamingApplianceSnapshotConfig", + "StreamingComputationRanges", + "StreamingSideInputLocation", + "StreamingStageLocation", + "StreamLocation", + "TopologyConfig", + "ContainerSpec", + "CreateJobFromTemplateRequest", + "DynamicTemplateLaunchParams", + "FlexTemplateRuntimeEnvironment", + "GetTemplateRequest", + "GetTemplateResponse", + "InvalidTemplateParameters", + "LaunchFlexTemplateParameter", + "LaunchFlexTemplateRequest", + "LaunchFlexTemplateResponse", + "LaunchTemplateParameters", + "LaunchTemplateRequest", + "LaunchTemplateResponse", + "ParameterMetadata", + "RuntimeEnvironment", + "RuntimeMetadata", + "SDKInfo", + "TemplateMetadata", + "ParameterType", +) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py new file mode 100644 index 000000000000..98fe1a71abee --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/environment.py @@ -0,0 +1,646 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.dataflow.v1beta3", + manifest={ + "JobType", + "FlexResourceSchedulingGoal", + "TeardownPolicy", + "DefaultPackageSet", + "AutoscalingAlgorithm", + "WorkerIPAddressConfiguration", + "ShuffleMode", + "Environment", + "Package", + "Disk", + "WorkerSettings", + "TaskRunnerSettings", + "AutoscalingSettings", + "SdkHarnessContainerImage", + "WorkerPool", + "DebugOptions", + }, +) + + +class JobType(proto.Enum): + r"""Specifies the processing model used by a + [google.dataflow.v1beta3.Job], which determines the way the Job is + managed by the Cloud Dataflow service (how workers are scheduled, + how inputs are sharded, etc). + """ + JOB_TYPE_UNKNOWN = 0 + JOB_TYPE_BATCH = 1 + JOB_TYPE_STREAMING = 2 + + +class FlexResourceSchedulingGoal(proto.Enum): + r"""Specifies the resource to optimize for in Flexible Resource + Scheduling. + """ + FLEXRS_UNSPECIFIED = 0 + FLEXRS_SPEED_OPTIMIZED = 1 + FLEXRS_COST_OPTIMIZED = 2 + + +class TeardownPolicy(proto.Enum): + r"""Specifies what happens to a resource when a Cloud Dataflow + [google.dataflow.v1beta3.Job][google.dataflow.v1beta3.Job] has + completed. + """ + TEARDOWN_POLICY_UNKNOWN = 0 + TEARDOWN_ALWAYS = 1 + TEARDOWN_ON_SUCCESS = 2 + TEARDOWN_NEVER = 3 + + +class DefaultPackageSet(proto.Enum): + r"""The default set of packages to be staged on a pool of + workers. + """ + DEFAULT_PACKAGE_SET_UNKNOWN = 0 + DEFAULT_PACKAGE_SET_NONE = 1 + DEFAULT_PACKAGE_SET_JAVA = 2 + DEFAULT_PACKAGE_SET_PYTHON = 3 + + +class AutoscalingAlgorithm(proto.Enum): + r"""Specifies the algorithm used to determine the number of + worker processes to run at any given point in time, based on the + amount of data left to process, the number of workers, and how + quickly existing workers are processing data. + """ + AUTOSCALING_ALGORITHM_UNKNOWN = 0 + AUTOSCALING_ALGORITHM_NONE = 1 + AUTOSCALING_ALGORITHM_BASIC = 2 + + +class WorkerIPAddressConfiguration(proto.Enum): + r"""Specifies how IP addresses should be allocated to the worker + machines. + """ + WORKER_IP_UNSPECIFIED = 0 + WORKER_IP_PUBLIC = 1 + WORKER_IP_PRIVATE = 2 + + +class ShuffleMode(proto.Enum): + r"""Specifies the shuffle mode used by a [google.dataflow.v1beta3.Job], + which determines the approach data is shuffled during processing. + More details in: + https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#dataflow-shuffle + """ + SHUFFLE_MODE_UNSPECIFIED = 0 + VM_BASED = 1 + SERVICE_BASED = 2 + + +class Environment(proto.Message): + r"""Describes the environment in which a Dataflow Job runs. + Attributes: + temp_storage_prefix (str): + The prefix of the resources the system should use for + temporary storage. The system will append the suffix + "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is + the value of the job_name field. The resulting bucket and + object prefix is used as the prefix of the resources used to + store temporary data needed during the job execution. NOTE: + This will override the value in taskrunner_settings. The + supported resource type is: + + Google Cloud Storage: + + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + cluster_manager_api_service (str): + The type of cluster manager API to use. If + unknown or unspecified, the service will attempt + to choose a reasonable default. This should be + in the form of the API service name, e.g. + "compute.googleapis.com". + experiments (Sequence[str]): + The list of experiments to enable. This field should be used + for SDK related experiments and not for service related + experiments. The proper field for service related + experiments is service_options. + service_options (Sequence[str]): + The list of service options to enable. This + field should be used for service related + experiments only. These experiments, when + graduating to GA, should be replaced by + dedicated fields or become default (i.e. always + on). + service_kms_key_name (str): + If set, contains the Cloud KMS key identifier used to + encrypt data at rest, AKA a Customer Managed Encryption Key + (CMEK). + + Format: + projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY + worker_pools (Sequence[google.cloud.dataflow_v1beta3.types.WorkerPool]): + The worker pools. At least one "harness" + worker pool must be specified in order for the + job to have workers. + user_agent (google.protobuf.struct_pb2.Struct): + A description of the process that generated + the request. + version (google.protobuf.struct_pb2.Struct): + A structure describing which components and + their versions of the service are required in + order to run the job. + dataset (str): + The dataset for the current project where + various workflow related tables are stored. + + The supported resource type is: + + Google BigQuery: + bigquery.googleapis.com/{dataset} + sdk_pipeline_options (google.protobuf.struct_pb2.Struct): + The Cloud Dataflow SDK pipeline options + specified by the user. These options are passed + through the service and are used to recreate the + SDK pipeline options on the worker in a language + agnostic and platform independent way. + internal_experiments (google.protobuf.any_pb2.Any): + Experimental settings. + service_account_email (str): + Identity to run virtual machines as. Defaults + to the default account. + flex_resource_scheduling_goal (google.cloud.dataflow_v1beta3.types.FlexResourceSchedulingGoal): + Which Flexible Resource Scheduling mode to + run in. + worker_region (str): + The Compute Engine region + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1". + Mutually exclusive with worker_zone. If neither + worker_region nor worker_zone is specified, default to the + control plane's region. + worker_zone (str): + The Compute Engine zone + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1-a". + Mutually exclusive with worker_region. If neither + worker_region nor worker_zone is specified, a zone in the + control plane's region is chosen based on available + capacity. + shuffle_mode (google.cloud.dataflow_v1beta3.types.ShuffleMode): + Output only. The shuffle mode used for the + job. + debug_options (google.cloud.dataflow_v1beta3.types.DebugOptions): + Any debugging options to be supplied to the + job. + """ + + temp_storage_prefix = proto.Field(proto.STRING, number=1,) + cluster_manager_api_service = proto.Field(proto.STRING, number=2,) + experiments = proto.RepeatedField(proto.STRING, number=3,) + service_options = proto.RepeatedField(proto.STRING, number=16,) + service_kms_key_name = proto.Field(proto.STRING, number=12,) + worker_pools = proto.RepeatedField(proto.MESSAGE, number=4, message="WorkerPool",) + user_agent = proto.Field(proto.MESSAGE, number=5, message=struct_pb2.Struct,) + version = proto.Field(proto.MESSAGE, number=6, message=struct_pb2.Struct,) + dataset = proto.Field(proto.STRING, number=7,) + sdk_pipeline_options = proto.Field( + proto.MESSAGE, number=8, message=struct_pb2.Struct, + ) + internal_experiments = proto.Field(proto.MESSAGE, number=9, message=any_pb2.Any,) + service_account_email = proto.Field(proto.STRING, number=10,) + flex_resource_scheduling_goal = proto.Field( + proto.ENUM, number=11, enum="FlexResourceSchedulingGoal", + ) + worker_region = proto.Field(proto.STRING, number=13,) + worker_zone = proto.Field(proto.STRING, number=14,) + shuffle_mode = proto.Field(proto.ENUM, number=15, enum="ShuffleMode",) + debug_options = proto.Field(proto.MESSAGE, number=17, message="DebugOptions",) + + +class Package(proto.Message): + r"""The packages that must be installed in order for a worker to + run the steps of the Cloud Dataflow job that will be assigned to + its worker pool. + + This is the mechanism by which the Cloud Dataflow SDK causes + code to be loaded onto the workers. For example, the Cloud + Dataflow Java SDK might use this to install jars containing the + user's code and all of the various dependencies (libraries, data + files, etc.) required in order for that code to run. + + Attributes: + name (str): + The name of the package. + location (str): + The resource to read the package from. The + supported resource type is: + Google Cloud Storage: + + storage.googleapis.com/{bucket} + bucket.storage.googleapis.com/ + """ + + name = proto.Field(proto.STRING, number=1,) + location = proto.Field(proto.STRING, number=2,) + + +class Disk(proto.Message): + r"""Describes the data disk used by a workflow job. + Attributes: + size_gb (int): + Size of disk in GB. If zero or unspecified, + the service will attempt to choose a reasonable + default. + disk_type (str): + Disk storage type, as defined by Google + Compute Engine. This must be a disk type + appropriate to the project and zone in which the + workers will run. If unknown or unspecified, + the service will attempt to choose a reasonable + default. + + For example, the standard persistent disk type + is a resource name typically ending in "pd- + standard". If SSD persistent disks are + available, the resource name typically ends with + "pd-ssd". The actual valid values are defined + the Google Compute Engine API, not by the Cloud + Dataflow API; consult the Google Compute Engine + documentation for more information about + determining the set of available disk types for + a particular project and zone. + Google Compute Engine Disk types are local to a + particular project in a particular zone, and so + the resource name will typically look something + like this: + + compute.googleapis.com/projects/project- + id/zones/zone/diskTypes/pd-standard + mount_point (str): + Directory in a VM where disk is mounted. + """ + + size_gb = proto.Field(proto.INT32, number=1,) + disk_type = proto.Field(proto.STRING, number=2,) + mount_point = proto.Field(proto.STRING, number=3,) + + +class WorkerSettings(proto.Message): + r"""Provides data to pass through to the worker harness. + Attributes: + base_url (str): + The base URL for accessing Google Cloud APIs. + When workers access Google Cloud APIs, they + logically do so via relative URLs. If this + field is specified, it supplies the base URL to + use for resolving these relative URLs. The + normative algorithm used is defined by RFC 1808, + "Relative Uniform Resource Locators". + + If not specified, the default value is + "http://www.googleapis.com/". + reporting_enabled (bool): + Whether to send work progress updates to the + service. + service_path (str): + The Cloud Dataflow service path relative to + the root URL, for example, + "dataflow/v1b3/projects". + shuffle_service_path (str): + The Shuffle service path relative to the root + URL, for example, "shuffle/v1beta1". + worker_id (str): + The ID of the worker running this pipeline. + temp_storage_prefix (str): + The prefix of the resources the system should + use for temporary storage. + + The supported resource type is: + + Google Cloud Storage: + + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + """ + + base_url = proto.Field(proto.STRING, number=1,) + reporting_enabled = proto.Field(proto.BOOL, number=2,) + service_path = proto.Field(proto.STRING, number=3,) + shuffle_service_path = proto.Field(proto.STRING, number=4,) + worker_id = proto.Field(proto.STRING, number=5,) + temp_storage_prefix = proto.Field(proto.STRING, number=6,) + + +class TaskRunnerSettings(proto.Message): + r"""Taskrunner configuration settings. + Attributes: + task_user (str): + The UNIX user ID on the worker VM to use for + tasks launched by taskrunner; e.g. "root". + task_group (str): + The UNIX group ID on the worker VM to use for + tasks launched by taskrunner; e.g. "wheel". + oauth_scopes (Sequence[str]): + The OAuth2 scopes to be requested by the + taskrunner in order to access the Cloud Dataflow + API. + base_url (str): + The base URL for the taskrunner to use when + accessing Google Cloud APIs. + When workers access Google Cloud APIs, they + logically do so via relative URLs. If this + field is specified, it supplies the base URL to + use for resolving these relative URLs. The + normative algorithm used is defined by RFC 1808, + "Relative Uniform Resource Locators". + + If not specified, the default value is + "http://www.googleapis.com/". + dataflow_api_version (str): + The API version of endpoint, e.g. "v1b3". + parallel_worker_settings (google.cloud.dataflow_v1beta3.types.WorkerSettings): + The settings to pass to the parallel worker + harness. + base_task_dir (str): + The location on the worker for task-specific + subdirectories. + continue_on_exception (bool): + Whether to continue taskrunner if an + exception is hit. + log_to_serialconsole (bool): + Whether to send taskrunner log info to Google + Compute Engine VM serial console. + alsologtostderr (bool): + Whether to also send taskrunner log info to + stderr. + log_upload_location (str): + Indicates where to put logs. If this is not + specified, the logs will not be uploaded. + + The supported resource type is: + + Google Cloud Storage: + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + log_dir (str): + The directory on the VM to store logs. + temp_storage_prefix (str): + The prefix of the resources the taskrunner + should use for temporary storage. + + The supported resource type is: + + Google Cloud Storage: + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + harness_command (str): + The command to launch the worker harness. + workflow_file_name (str): + The file to store the workflow in. + commandlines_file_name (str): + The file to store preprocessing commands in. + vm_id (str): + The ID string of the VM. + language_hint (str): + The suggested backend language. + streaming_worker_main_class (str): + The streaming worker main class name. + """ + + task_user = proto.Field(proto.STRING, number=1,) + task_group = proto.Field(proto.STRING, number=2,) + oauth_scopes = proto.RepeatedField(proto.STRING, number=3,) + base_url = proto.Field(proto.STRING, number=4,) + dataflow_api_version = proto.Field(proto.STRING, number=5,) + parallel_worker_settings = proto.Field( + proto.MESSAGE, number=6, message="WorkerSettings", + ) + base_task_dir = proto.Field(proto.STRING, number=7,) + continue_on_exception = proto.Field(proto.BOOL, number=8,) + log_to_serialconsole = proto.Field(proto.BOOL, number=9,) + alsologtostderr = proto.Field(proto.BOOL, number=10,) + log_upload_location = proto.Field(proto.STRING, number=11,) + log_dir = proto.Field(proto.STRING, number=12,) + temp_storage_prefix = proto.Field(proto.STRING, number=13,) + harness_command = proto.Field(proto.STRING, number=14,) + workflow_file_name = proto.Field(proto.STRING, number=15,) + commandlines_file_name = proto.Field(proto.STRING, number=16,) + vm_id = proto.Field(proto.STRING, number=17,) + language_hint = proto.Field(proto.STRING, number=18,) + streaming_worker_main_class = proto.Field(proto.STRING, number=19,) + + +class AutoscalingSettings(proto.Message): + r"""Settings for WorkerPool autoscaling. + Attributes: + algorithm (google.cloud.dataflow_v1beta3.types.AutoscalingAlgorithm): + The algorithm to use for autoscaling. + max_num_workers (int): + The maximum number of workers to cap scaling + at. + """ + + algorithm = proto.Field(proto.ENUM, number=1, enum="AutoscalingAlgorithm",) + max_num_workers = proto.Field(proto.INT32, number=2,) + + +class SdkHarnessContainerImage(proto.Message): + r"""Defines a SDK harness container for executing Dataflow + pipelines. + + Attributes: + container_image (str): + A docker container image that resides in + Google Container Registry. + use_single_core_per_container (bool): + If true, recommends the Dataflow service to + use only one core per SDK container instance + with this image. If false (or unset) recommends + using more than one core per SDK container + instance with this image for efficiency. Note + that Dataflow service may choose to override + this property if needed. + environment_id (str): + Environment ID for the Beam runner API proto + Environment that corresponds to the current SDK + Harness. + """ + + container_image = proto.Field(proto.STRING, number=1,) + use_single_core_per_container = proto.Field(proto.BOOL, number=2,) + environment_id = proto.Field(proto.STRING, number=3,) + + +class WorkerPool(proto.Message): + r"""Describes one particular pool of Cloud Dataflow workers to be + instantiated by the Cloud Dataflow service in order to perform + the computations required by a job. Note that a workflow job + may use multiple pools, in order to match the various + computational requirements of the various stages of the job. + + Attributes: + kind (str): + The kind of the worker pool; currently only ``harness`` and + ``shuffle`` are supported. + num_workers (int): + Number of Google Compute Engine workers in + this pool needed to execute the job. If zero or + unspecified, the service will attempt to choose + a reasonable default. + packages (Sequence[google.cloud.dataflow_v1beta3.types.Package]): + Packages to be installed on workers. + default_package_set (google.cloud.dataflow_v1beta3.types.DefaultPackageSet): + The default package set to install. This + allows the service to select a default set of + packages which are useful to worker harnesses + written in a particular language. + machine_type (str): + Machine type (e.g. "n1-standard-1"). If + empty or unspecified, the service will attempt + to choose a reasonable default. + teardown_policy (google.cloud.dataflow_v1beta3.types.TeardownPolicy): + Sets the policy for determining when to turndown worker + pool. Allowed values are: ``TEARDOWN_ALWAYS``, + ``TEARDOWN_ON_SUCCESS``, and ``TEARDOWN_NEVER``. + ``TEARDOWN_ALWAYS`` means workers are always torn down + regardless of whether the job succeeds. + ``TEARDOWN_ON_SUCCESS`` means workers are torn down if the + job succeeds. ``TEARDOWN_NEVER`` means the workers are never + torn down. + + If the workers are not torn down by the service, they will + continue to run and use Google Compute Engine VM resources + in the user's project until they are explicitly terminated + by the user. Because of this, Google recommends using the + ``TEARDOWN_ALWAYS`` policy except for small, manually + supervised test jobs. + + If unknown or unspecified, the service will attempt to + choose a reasonable default. + disk_size_gb (int): + Size of root disk for VMs, in GB. If zero or + unspecified, the service will attempt to choose + a reasonable default. + disk_type (str): + Type of root disk for VMs. If empty or + unspecified, the service will attempt to choose + a reasonable default. + disk_source_image (str): + Fully qualified source image for disks. + zone (str): + Zone to run the worker pools in. If empty or + unspecified, the service will attempt to choose + a reasonable default. + taskrunner_settings (google.cloud.dataflow_v1beta3.types.TaskRunnerSettings): + Settings passed through to Google Compute + Engine workers when using the standard Dataflow + task runner. Users should ignore this field. + on_host_maintenance (str): + The action to take on host maintenance, as + defined by the Google Compute Engine API. + data_disks (Sequence[google.cloud.dataflow_v1beta3.types.Disk]): + Data disks that are used by a VM in this + workflow. + metadata (Sequence[google.cloud.dataflow_v1beta3.types.WorkerPool.MetadataEntry]): + Metadata to set on the Google Compute Engine + VMs. + autoscaling_settings (google.cloud.dataflow_v1beta3.types.AutoscalingSettings): + Settings for autoscaling of this WorkerPool. + pool_args (google.protobuf.any_pb2.Any): + Extra arguments for this worker pool. + network (str): + Network to which VMs will be assigned. If + empty or unspecified, the service will use the + network "default". + subnetwork (str): + Subnetwork to which VMs will be assigned, if + desired. Expected to be of the form + "regions/REGION/subnetworks/SUBNETWORK". + worker_harness_container_image (str): + Required. Docker container image that executes the Cloud + Dataflow worker harness, residing in Google Container + Registry. + + Deprecated for the Fn API path. Use + sdk_harness_container_images instead. + num_threads_per_worker (int): + The number of threads per worker harness. If + empty or unspecified, the service will choose a + number of threads (according to the number of + cores on the selected machine type for batch, or + 1 by convention for streaming). + ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): + Configuration for VM IPs. + sdk_harness_container_images (Sequence[google.cloud.dataflow_v1beta3.types.SdkHarnessContainerImage]): + Set of SDK harness containers needed to + execute this pipeline. This will only be set in + the Fn API path. For non-cross-language + pipelines this should have only one entry. + Cross-language pipelines will have two or more + entries. + """ + + kind = proto.Field(proto.STRING, number=1,) + num_workers = proto.Field(proto.INT32, number=2,) + packages = proto.RepeatedField(proto.MESSAGE, number=3, message="Package",) + default_package_set = proto.Field(proto.ENUM, number=4, enum="DefaultPackageSet",) + machine_type = proto.Field(proto.STRING, number=5,) + teardown_policy = proto.Field(proto.ENUM, number=6, enum="TeardownPolicy",) + disk_size_gb = proto.Field(proto.INT32, number=7,) + disk_type = proto.Field(proto.STRING, number=16,) + disk_source_image = proto.Field(proto.STRING, number=8,) + zone = proto.Field(proto.STRING, number=9,) + taskrunner_settings = proto.Field( + proto.MESSAGE, number=10, message="TaskRunnerSettings", + ) + on_host_maintenance = proto.Field(proto.STRING, number=11,) + data_disks = proto.RepeatedField(proto.MESSAGE, number=12, message="Disk",) + metadata = proto.MapField(proto.STRING, proto.STRING, number=13,) + autoscaling_settings = proto.Field( + proto.MESSAGE, number=14, message="AutoscalingSettings", + ) + pool_args = proto.Field(proto.MESSAGE, number=15, message=any_pb2.Any,) + network = proto.Field(proto.STRING, number=17,) + subnetwork = proto.Field(proto.STRING, number=19,) + worker_harness_container_image = proto.Field(proto.STRING, number=18,) + num_threads_per_worker = proto.Field(proto.INT32, number=20,) + ip_configuration = proto.Field( + proto.ENUM, number=21, enum="WorkerIPAddressConfiguration", + ) + sdk_harness_container_images = proto.RepeatedField( + proto.MESSAGE, number=22, message="SdkHarnessContainerImage", + ) + + +class DebugOptions(proto.Message): + r"""Describes any options that have an effect on the debugging of + pipelines. + + Attributes: + enable_hot_key_logging (bool): + When true, enables the logging of the literal + hot key to the user's Cloud Logging. + """ + + enable_hot_key_logging = proto.Field(proto.BOOL, number=1,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py new file mode 100644 index 000000000000..12d968386b55 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/jobs.py @@ -0,0 +1,991 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment as gd_environment +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.dataflow.v1beta3", + manifest={ + "KindType", + "JobState", + "JobView", + "Job", + "DatastoreIODetails", + "PubSubIODetails", + "FileIODetails", + "BigTableIODetails", + "BigQueryIODetails", + "SpannerIODetails", + "SdkVersion", + "JobMetadata", + "ExecutionStageState", + "PipelineDescription", + "TransformSummary", + "ExecutionStageSummary", + "DisplayData", + "Step", + "JobExecutionInfo", + "JobExecutionStageInfo", + "CreateJobRequest", + "GetJobRequest", + "UpdateJobRequest", + "ListJobsRequest", + "FailedLocation", + "ListJobsResponse", + "SnapshotJobRequest", + "CheckActiveJobsRequest", + "CheckActiveJobsResponse", + }, +) + + +class KindType(proto.Enum): + r"""Type of transform or stage operation.""" + UNKNOWN_KIND = 0 + PAR_DO_KIND = 1 + GROUP_BY_KEY_KIND = 2 + FLATTEN_KIND = 3 + READ_KIND = 4 + WRITE_KIND = 5 + CONSTANT_KIND = 6 + SINGLETON_KIND = 7 + SHUFFLE_KIND = 8 + + +class JobState(proto.Enum): + r"""Describes the overall state of a + [google.dataflow.v1beta3.Job][google.dataflow.v1beta3.Job]. + """ + JOB_STATE_UNKNOWN = 0 + JOB_STATE_STOPPED = 1 + JOB_STATE_RUNNING = 2 + JOB_STATE_DONE = 3 + JOB_STATE_FAILED = 4 + JOB_STATE_CANCELLED = 5 + JOB_STATE_UPDATED = 6 + JOB_STATE_DRAINING = 7 + JOB_STATE_DRAINED = 8 + JOB_STATE_PENDING = 9 + JOB_STATE_CANCELLING = 10 + JOB_STATE_QUEUED = 11 + JOB_STATE_RESOURCE_CLEANING_UP = 12 + + +class JobView(proto.Enum): + r"""Selector for how much information is returned in Job + responses. + """ + JOB_VIEW_UNKNOWN = 0 + JOB_VIEW_SUMMARY = 1 + JOB_VIEW_ALL = 2 + JOB_VIEW_DESCRIPTION = 3 + + +class Job(proto.Message): + r"""Defines a job to be run by the Cloud Dataflow service. + nextID: 26 + + Attributes: + id (str): + The unique ID of this job. + This field is set by the Cloud Dataflow service + when the Job is created, and is immutable for + the life of the job. + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + name (str): + The user-specified Cloud Dataflow job name. + + Only one Job with a given name may exist in a project at any + given time. If a caller attempts to create a Job with the + same name as an already-existing Job, the attempt returns + the existing Job. + + The name must match the regular expression + ``[a-z]([-a-z0-9]{0,38}[a-z0-9])?`` + type_ (google.cloud.dataflow_v1beta3.types.JobType): + The type of Cloud Dataflow job. + environment (google.cloud.dataflow_v1beta3.types.Environment): + The environment for the job. + steps (Sequence[google.cloud.dataflow_v1beta3.types.Step]): + Exactly one of step or steps_location should be specified. + + The top-level steps that constitute the entire job. Only + retrieved with JOB_VIEW_ALL. + steps_location (str): + The Cloud Storage location where the steps + are stored. + current_state (google.cloud.dataflow_v1beta3.types.JobState): + The current state of the job. + + Jobs are created in the ``JOB_STATE_STOPPED`` state unless + otherwise specified. + + A job in the ``JOB_STATE_RUNNING`` state may asynchronously + enter a terminal state. After a job has reached a terminal + state, no further state updates may be made. + + This field may be mutated by the Cloud Dataflow service; + callers cannot mutate it. + current_state_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp associated with the current + state. + requested_state (google.cloud.dataflow_v1beta3.types.JobState): + The job's requested state. + + ``UpdateJob`` may be used to switch between the + ``JOB_STATE_STOPPED`` and ``JOB_STATE_RUNNING`` states, by + setting requested_state. ``UpdateJob`` may also be used to + directly set a job's requested state to + ``JOB_STATE_CANCELLED`` or ``JOB_STATE_DONE``, irrevocably + terminating the job if it has not already reached a terminal + state. + execution_info (google.cloud.dataflow_v1beta3.types.JobExecutionInfo): + Deprecated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the job was initially + created. Immutable and set by the Cloud Dataflow + service. + replace_job_id (str): + If this job is an update of an existing job, this field is + the job ID of the job it replaced. + + When sending a ``CreateJobRequest``, you can update a job by + specifying it here. The job named here is stopped, and its + intermediate state is transferred to this job. + transform_name_mapping (Sequence[google.cloud.dataflow_v1beta3.types.Job.TransformNameMappingEntry]): + The map of transform name prefixes of the job + to be replaced to the corresponding name + prefixes of the new job. + client_request_id (str): + The client's unique identifier of the job, + re-used across retried attempts. If this field + is set, the service will ensure its uniqueness. + The request to create a job will fail if the + service has knowledge of a previously submitted + job with the same client's ID and job name. The + caller may use this field to ensure idempotence + of job creation across retried attempts to + create a job. By default, the field is empty + and, in that case, the service ignores it. + replaced_by_job_id (str): + If another job is an update of this job (and thus, this job + is in ``JOB_STATE_UPDATED``), this field contains the ID of + that job. + temp_files (Sequence[str]): + A set of files the system should be aware of + that are used for temporary storage. These + temporary files will be removed on job + completion. + No duplicates are allowed. + No file patterns are supported. + + The supported files are: + + Google Cloud Storage: + + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + labels (Sequence[google.cloud.dataflow_v1beta3.types.Job.LabelsEntry]): + User-defined labels for this job. + + The labels map can contain no more than 64 entries. Entries + of the labels map are UTF8 strings that comply with the + following restrictions: + + - Keys must conform to regexp: + [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + - Values must conform to regexp: + [\p{Ll}\p{Lo}\p{N}_-]{0,63} + - Both keys and values are additionally constrained to be + <= 128 bytes in size. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + pipeline_description (google.cloud.dataflow_v1beta3.types.PipelineDescription): + Preliminary field: The format of this data may change at any + time. A description of the user pipeline and stages through + which it is executed. Created by Cloud Dataflow service. + Only retrieved with JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL. + stage_states (Sequence[google.cloud.dataflow_v1beta3.types.ExecutionStageState]): + This field may be mutated by the Cloud + Dataflow service; callers cannot mutate it. + job_metadata (google.cloud.dataflow_v1beta3.types.JobMetadata): + This field is populated by the Dataflow + service to support filtering jobs by the + metadata values provided here. Populated for + ListJobs and all GetJob views SUMMARY and + higher. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the job was started (transitioned to + JOB_STATE_PENDING). Flexible resource scheduling jobs are + started with some delay after job creation, so start_time is + unset before start and is updated when the job is started by + the Cloud Dataflow service. For other jobs, start_time + always equals to create_time and is immutable and set by the + Cloud Dataflow service. + created_from_snapshot_id (str): + If this is specified, the job's initial state + is populated from the given snapshot. + satisfies_pzs (bool): + Reserved for future use. This field is set + only in responses from the server; it is ignored + if it is set in any requests. + """ + + id = proto.Field(proto.STRING, number=1,) + project_id = proto.Field(proto.STRING, number=2,) + name = proto.Field(proto.STRING, number=3,) + type_ = proto.Field(proto.ENUM, number=4, enum=gd_environment.JobType,) + environment = proto.Field( + proto.MESSAGE, number=5, message=gd_environment.Environment, + ) + steps = proto.RepeatedField(proto.MESSAGE, number=6, message="Step",) + steps_location = proto.Field(proto.STRING, number=24,) + current_state = proto.Field(proto.ENUM, number=7, enum="JobState",) + current_state_time = proto.Field( + proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp, + ) + requested_state = proto.Field(proto.ENUM, number=9, enum="JobState",) + execution_info = proto.Field(proto.MESSAGE, number=10, message="JobExecutionInfo",) + create_time = proto.Field( + proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, + ) + replace_job_id = proto.Field(proto.STRING, number=12,) + transform_name_mapping = proto.MapField(proto.STRING, proto.STRING, number=13,) + client_request_id = proto.Field(proto.STRING, number=14,) + replaced_by_job_id = proto.Field(proto.STRING, number=15,) + temp_files = proto.RepeatedField(proto.STRING, number=16,) + labels = proto.MapField(proto.STRING, proto.STRING, number=17,) + location = proto.Field(proto.STRING, number=18,) + pipeline_description = proto.Field( + proto.MESSAGE, number=19, message="PipelineDescription", + ) + stage_states = proto.RepeatedField( + proto.MESSAGE, number=20, message="ExecutionStageState", + ) + job_metadata = proto.Field(proto.MESSAGE, number=21, message="JobMetadata",) + start_time = proto.Field(proto.MESSAGE, number=22, message=timestamp_pb2.Timestamp,) + created_from_snapshot_id = proto.Field(proto.STRING, number=23,) + satisfies_pzs = proto.Field(proto.BOOL, number=25,) + + +class DatastoreIODetails(proto.Message): + r"""Metadata for a Datastore connector used by the job. + Attributes: + namespace (str): + Namespace used in the connection. + project_id (str): + ProjectId accessed in the connection. + """ + + namespace = proto.Field(proto.STRING, number=1,) + project_id = proto.Field(proto.STRING, number=2,) + + +class PubSubIODetails(proto.Message): + r"""Metadata for a Pub/Sub connector used by the job. + Attributes: + topic (str): + Topic accessed in the connection. + subscription (str): + Subscription used in the connection. + """ + + topic = proto.Field(proto.STRING, number=1,) + subscription = proto.Field(proto.STRING, number=2,) + + +class FileIODetails(proto.Message): + r"""Metadata for a File connector used by the job. + Attributes: + file_pattern (str): + File Pattern used to access files by the + connector. + """ + + file_pattern = proto.Field(proto.STRING, number=1,) + + +class BigTableIODetails(proto.Message): + r"""Metadata for a Cloud BigTable connector used by the job. + Attributes: + project_id (str): + ProjectId accessed in the connection. + instance_id (str): + InstanceId accessed in the connection. + table_id (str): + TableId accessed in the connection. + """ + + project_id = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) + table_id = proto.Field(proto.STRING, number=3,) + + +class BigQueryIODetails(proto.Message): + r"""Metadata for a BigQuery connector used by the job. + Attributes: + table (str): + Table accessed in the connection. + dataset (str): + Dataset accessed in the connection. + project_id (str): + Project accessed in the connection. + query (str): + Query used to access data in the connection. + """ + + table = proto.Field(proto.STRING, number=1,) + dataset = proto.Field(proto.STRING, number=2,) + project_id = proto.Field(proto.STRING, number=3,) + query = proto.Field(proto.STRING, number=4,) + + +class SpannerIODetails(proto.Message): + r"""Metadata for a Spanner connector used by the job. + Attributes: + project_id (str): + ProjectId accessed in the connection. + instance_id (str): + InstanceId accessed in the connection. + database_id (str): + DatabaseId accessed in the connection. + """ + + project_id = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) + database_id = proto.Field(proto.STRING, number=3,) + + +class SdkVersion(proto.Message): + r"""The version of the SDK used to run the job. + Attributes: + version (str): + The version of the SDK used to run the job. + version_display_name (str): + A readable string describing the version of + the SDK. + sdk_support_status (google.cloud.dataflow_v1beta3.types.SdkVersion.SdkSupportStatus): + The support status for this SDK version. + """ + + class SdkSupportStatus(proto.Enum): + r"""The support status of the SDK used to run the job.""" + UNKNOWN = 0 + SUPPORTED = 1 + STALE = 2 + DEPRECATED = 3 + UNSUPPORTED = 4 + + version = proto.Field(proto.STRING, number=1,) + version_display_name = proto.Field(proto.STRING, number=2,) + sdk_support_status = proto.Field(proto.ENUM, number=3, enum=SdkSupportStatus,) + + +class JobMetadata(proto.Message): + r"""Metadata available primarily for filtering jobs. Will be + included in the ListJob response and Job SUMMARY view. + + Attributes: + sdk_version (google.cloud.dataflow_v1beta3.types.SdkVersion): + The SDK version used to run the job. + spanner_details (Sequence[google.cloud.dataflow_v1beta3.types.SpannerIODetails]): + Identification of a Spanner source used in + the Dataflow job. + bigquery_details (Sequence[google.cloud.dataflow_v1beta3.types.BigQueryIODetails]): + Identification of a BigQuery source used in + the Dataflow job. + big_table_details (Sequence[google.cloud.dataflow_v1beta3.types.BigTableIODetails]): + Identification of a Cloud BigTable source + used in the Dataflow job. + pubsub_details (Sequence[google.cloud.dataflow_v1beta3.types.PubSubIODetails]): + Identification of a PubSub source used in the + Dataflow job. + file_details (Sequence[google.cloud.dataflow_v1beta3.types.FileIODetails]): + Identification of a File source used in the + Dataflow job. + datastore_details (Sequence[google.cloud.dataflow_v1beta3.types.DatastoreIODetails]): + Identification of a Datastore source used in + the Dataflow job. + """ + + sdk_version = proto.Field(proto.MESSAGE, number=1, message="SdkVersion",) + spanner_details = proto.RepeatedField( + proto.MESSAGE, number=2, message="SpannerIODetails", + ) + bigquery_details = proto.RepeatedField( + proto.MESSAGE, number=3, message="BigQueryIODetails", + ) + big_table_details = proto.RepeatedField( + proto.MESSAGE, number=4, message="BigTableIODetails", + ) + pubsub_details = proto.RepeatedField( + proto.MESSAGE, number=5, message="PubSubIODetails", + ) + file_details = proto.RepeatedField( + proto.MESSAGE, number=6, message="FileIODetails", + ) + datastore_details = proto.RepeatedField( + proto.MESSAGE, number=7, message="DatastoreIODetails", + ) + + +class ExecutionStageState(proto.Message): + r"""A message describing the state of a particular execution + stage. + + Attributes: + execution_stage_name (str): + The name of the execution stage. + execution_stage_state (google.cloud.dataflow_v1beta3.types.JobState): + Executions stage states allow the same set of + values as JobState. + current_state_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the stage transitioned to + this state. + """ + + execution_stage_name = proto.Field(proto.STRING, number=1,) + execution_stage_state = proto.Field(proto.ENUM, number=2, enum="JobState",) + current_state_time = proto.Field( + proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, + ) + + +class PipelineDescription(proto.Message): + r"""A descriptive representation of submitted pipeline as well as + the executed form. This data is provided by the Dataflow + service for ease of visualizing the pipeline and interpreting + Dataflow provided metrics. + + Attributes: + original_pipeline_transform (Sequence[google.cloud.dataflow_v1beta3.types.TransformSummary]): + Description of each transform in the pipeline + and collections between them. + execution_pipeline_stage (Sequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary]): + Description of each stage of execution of the + pipeline. + display_data (Sequence[google.cloud.dataflow_v1beta3.types.DisplayData]): + Pipeline level display data. + """ + + original_pipeline_transform = proto.RepeatedField( + proto.MESSAGE, number=1, message="TransformSummary", + ) + execution_pipeline_stage = proto.RepeatedField( + proto.MESSAGE, number=2, message="ExecutionStageSummary", + ) + display_data = proto.RepeatedField(proto.MESSAGE, number=3, message="DisplayData",) + + +class TransformSummary(proto.Message): + r"""Description of the type, names/ids, and input/outputs for a + transform. + + Attributes: + kind (google.cloud.dataflow_v1beta3.types.KindType): + Type of transform. + id (str): + SDK generated id of this transform instance. + name (str): + User provided name for this transform + instance. + display_data (Sequence[google.cloud.dataflow_v1beta3.types.DisplayData]): + Transform-specific display data. + output_collection_name (Sequence[str]): + User names for all collection outputs to + this transform. + input_collection_name (Sequence[str]): + User names for all collection inputs to this + transform. + """ + + kind = proto.Field(proto.ENUM, number=1, enum="KindType",) + id = proto.Field(proto.STRING, number=2,) + name = proto.Field(proto.STRING, number=3,) + display_data = proto.RepeatedField(proto.MESSAGE, number=4, message="DisplayData",) + output_collection_name = proto.RepeatedField(proto.STRING, number=5,) + input_collection_name = proto.RepeatedField(proto.STRING, number=6,) + + +class ExecutionStageSummary(proto.Message): + r"""Description of the composing transforms, names/ids, and + input/outputs of a stage of execution. Some composing + transforms and sources may have been generated by the Dataflow + service during execution planning. + + Attributes: + name (str): + Dataflow service generated name for this + stage. + id (str): + Dataflow service generated id for this stage. + kind (google.cloud.dataflow_v1beta3.types.KindType): + Type of transform this stage is executing. + input_source (Sequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.StageSource]): + Input sources for this stage. + output_source (Sequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.StageSource]): + Output sources for this stage. + prerequisite_stage (Sequence[str]): + Other stages that must complete before this + stage can run. + component_transform (Sequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.ComponentTransform]): + Transforms that comprise this execution + stage. + component_source (Sequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.ComponentSource]): + Collections produced and consumed by + component transforms of this stage. + """ + + class StageSource(proto.Message): + r"""Description of an input or output of an execution stage. + Attributes: + user_name (str): + Human-readable name for this source; may be + user or system generated. + name (str): + Dataflow service generated name for this + source. + original_transform_or_collection (str): + User name for the original user transform or + collection with which this source is most + closely associated. + size_bytes (int): + Size of the source, if measurable. + """ + + user_name = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2,) + original_transform_or_collection = proto.Field(proto.STRING, number=3,) + size_bytes = proto.Field(proto.INT64, number=4,) + + class ComponentTransform(proto.Message): + r"""Description of a transform executed as part of an execution + stage. + + Attributes: + user_name (str): + Human-readable name for this transform; may + be user or system generated. + name (str): + Dataflow service generated name for this + source. + original_transform (str): + User name for the original user transform + with which this transform is most closely + associated. + """ + + user_name = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2,) + original_transform = proto.Field(proto.STRING, number=3,) + + class ComponentSource(proto.Message): + r"""Description of an interstitial value between transforms in an + execution stage. + + Attributes: + user_name (str): + Human-readable name for this transform; may + be user or system generated. + name (str): + Dataflow service generated name for this + source. + original_transform_or_collection (str): + User name for the original user transform or + collection with which this source is most + closely associated. + """ + + user_name = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2,) + original_transform_or_collection = proto.Field(proto.STRING, number=3,) + + name = proto.Field(proto.STRING, number=1,) + id = proto.Field(proto.STRING, number=2,) + kind = proto.Field(proto.ENUM, number=3, enum="KindType",) + input_source = proto.RepeatedField(proto.MESSAGE, number=4, message=StageSource,) + output_source = proto.RepeatedField(proto.MESSAGE, number=5, message=StageSource,) + prerequisite_stage = proto.RepeatedField(proto.STRING, number=8,) + component_transform = proto.RepeatedField( + proto.MESSAGE, number=6, message=ComponentTransform, + ) + component_source = proto.RepeatedField( + proto.MESSAGE, number=7, message=ComponentSource, + ) + + +class DisplayData(proto.Message): + r"""Data provided with a pipeline or transform to provide + descriptive info. + + Attributes: + key (str): + The key identifying the display data. + This is intended to be used as a label for the + display data when viewed in a dax monitoring + system. + namespace (str): + The namespace for the key. This is usually a + class name or programming language namespace + (i.e. python module) which defines the display + data. This allows a dax monitoring system to + specially handle the data and perform custom + rendering. + str_value (str): + Contains value if the data is of string type. + int64_value (int): + Contains value if the data is of int64 type. + float_value (float): + Contains value if the data is of float type. + java_class_value (str): + Contains value if the data is of java class + type. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + Contains value if the data is of timestamp + type. + duration_value (google.protobuf.duration_pb2.Duration): + Contains value if the data is of duration + type. + bool_value (bool): + Contains value if the data is of a boolean + type. + short_str_value (str): + A possible additional shorter value to display. For example + a java_class_name_value of com.mypackage.MyDoFn will be + stored with MyDoFn as the short_str_value and + com.mypackage.MyDoFn as the java_class_name value. + short_str_value can be displayed and java_class_name_value + will be displayed as a tooltip. + url (str): + An optional full URL. + label (str): + An optional label to display in a dax UI for + the element. + """ + + key = proto.Field(proto.STRING, number=1,) + namespace = proto.Field(proto.STRING, number=2,) + str_value = proto.Field(proto.STRING, number=4, oneof="Value",) + int64_value = proto.Field(proto.INT64, number=5, oneof="Value",) + float_value = proto.Field(proto.FLOAT, number=6, oneof="Value",) + java_class_value = proto.Field(proto.STRING, number=7, oneof="Value",) + timestamp_value = proto.Field( + proto.MESSAGE, number=8, oneof="Value", message=timestamp_pb2.Timestamp, + ) + duration_value = proto.Field( + proto.MESSAGE, number=9, oneof="Value", message=duration_pb2.Duration, + ) + bool_value = proto.Field(proto.BOOL, number=10, oneof="Value",) + short_str_value = proto.Field(proto.STRING, number=11,) + url = proto.Field(proto.STRING, number=12,) + label = proto.Field(proto.STRING, number=13,) + + +class Step(proto.Message): + r"""Defines a particular step within a Cloud Dataflow job. + + A job consists of multiple steps, each of which performs some + specific operation as part of the overall job. Data is typically + passed from one step to another as part of the job. + + Here's an example of a sequence of steps which together implement a + Map-Reduce job: + + - Read a collection of data from some source, parsing the + collection's elements. + + - Validate the elements. + + - Apply a user-defined function to map each element to some value + and extract an element-specific key value. + + - Group elements with the same key into a single element with that + key, transforming a multiply-keyed collection into a + uniquely-keyed collection. + + - Write the elements out to some data sink. + + Note that the Cloud Dataflow service may be used to run many + different types of jobs, not just Map-Reduce. + + Attributes: + kind (str): + The kind of step in the Cloud Dataflow job. + name (str): + The name that identifies the step. This must + be unique for each step with respect to all + other steps in the Cloud Dataflow job. + properties (google.protobuf.struct_pb2.Struct): + Named properties associated with the step. Each kind of + predefined step has its own required set of properties. Must + be provided on Create. Only retrieved with JOB_VIEW_ALL. + """ + + kind = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2,) + properties = proto.Field(proto.MESSAGE, number=3, message=struct_pb2.Struct,) + + +class JobExecutionInfo(proto.Message): + r"""Additional information about how a Cloud Dataflow job will be + executed that isn't contained in the submitted job. + + Attributes: + stages (Sequence[google.cloud.dataflow_v1beta3.types.JobExecutionInfo.StagesEntry]): + A mapping from each stage to the information + about that stage. + """ + + stages = proto.MapField( + proto.STRING, proto.MESSAGE, number=1, message="JobExecutionStageInfo", + ) + + +class JobExecutionStageInfo(proto.Message): + r"""Contains information about how a particular + [google.dataflow.v1beta3.Step][google.dataflow.v1beta3.Step] will be + executed. + + Attributes: + step_name (Sequence[str]): + The steps associated with the execution + stage. Note that stages may have several steps, + and that a given step might be run by more than + one stage. + """ + + step_name = proto.RepeatedField(proto.STRING, number=1,) + + +class CreateJobRequest(proto.Message): + r"""Request to create a Cloud Dataflow job. + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + job (google.cloud.dataflow_v1beta3.types.Job): + The job to create. + view (google.cloud.dataflow_v1beta3.types.JobView): + The level of information requested in + response. + replace_job_id (str): + Deprecated. This field is now in the Job + message. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job = proto.Field(proto.MESSAGE, number=2, message="Job",) + view = proto.Field(proto.ENUM, number=3, enum="JobView",) + replace_job_id = proto.Field(proto.STRING, number=4,) + location = proto.Field(proto.STRING, number=5,) + + +class GetJobRequest(proto.Message): + r"""Request to get the state of a Cloud Dataflow job. + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + job_id (str): + The job ID. + view (google.cloud.dataflow_v1beta3.types.JobView): + The level of information requested in + response. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) + view = proto.Field(proto.ENUM, number=3, enum="JobView",) + location = proto.Field(proto.STRING, number=4,) + + +class UpdateJobRequest(proto.Message): + r"""Request to update a Cloud Dataflow job. + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + job_id (str): + The job ID. + job (google.cloud.dataflow_v1beta3.types.Job): + The updated job. + Only the job state is updatable; other fields + will be ignored. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) + job = proto.Field(proto.MESSAGE, number=3, message="Job",) + location = proto.Field(proto.STRING, number=4,) + + +class ListJobsRequest(proto.Message): + r"""Request to list Cloud Dataflow jobs. + Attributes: + filter (google.cloud.dataflow_v1beta3.types.ListJobsRequest.Filter): + The kind of filter to use. + project_id (str): + The project which owns the jobs. + view (google.cloud.dataflow_v1beta3.types.JobView): + Deprecated. ListJobs always returns summaries + now. Use GetJob for other JobViews. + page_size (int): + If there are many jobs, limit response to at most this many. + The actual number of jobs returned will be the lesser of + max_responses and an unspecified server-defined limit. + page_token (str): + Set this to the 'next_page_token' field of a previous + response to request additional results in a long list. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + + class Filter(proto.Enum): + r"""This field filters out and returns jobs in the specified job + state. The order of data returned is determined by the filter + used, and is subject to change. + """ + UNKNOWN = 0 + ALL = 1 + TERMINATED = 2 + ACTIVE = 3 + + filter = proto.Field(proto.ENUM, number=5, enum=Filter,) + project_id = proto.Field(proto.STRING, number=1,) + view = proto.Field(proto.ENUM, number=2, enum="JobView",) + page_size = proto.Field(proto.INT32, number=3,) + page_token = proto.Field(proto.STRING, number=4,) + location = proto.Field(proto.STRING, number=17,) + + +class FailedLocation(proto.Message): + r"""Indicates which [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + failed to respond to a request for data. + + Attributes: + name (str): + The name of the [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that failed to respond. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ListJobsResponse(proto.Message): + r"""Response to a request to list Cloud Dataflow jobs in a + project. This might be a partial response, depending on the page + size in the ListJobsRequest. However, if the project does not + have any jobs, an instance of ListJobsResponse is not returned + and the requests's response body is empty {}. + + Attributes: + jobs (Sequence[google.cloud.dataflow_v1beta3.types.Job]): + A subset of the requested job information. + next_page_token (str): + Set if there may be more results than fit in + this response. + failed_location (Sequence[google.cloud.dataflow_v1beta3.types.FailedLocation]): + Zero or more messages describing the [regional endpoints] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that failed to respond. + """ + + @property + def raw_page(self): + return self + + jobs = proto.RepeatedField(proto.MESSAGE, number=1, message="Job",) + next_page_token = proto.Field(proto.STRING, number=2,) + failed_location = proto.RepeatedField( + proto.MESSAGE, number=3, message="FailedLocation", + ) + + +class SnapshotJobRequest(proto.Message): + r"""Request to create a snapshot of a job. + Attributes: + project_id (str): + The project which owns the job to be + snapshotted. + job_id (str): + The job to be snapshotted. + ttl (google.protobuf.duration_pb2.Duration): + TTL for the snapshot. + location (str): + The location that contains this job. + snapshot_sources (bool): + If true, perform snapshots for sources which + support this. + description (str): + User specified description of the snapshot. + Maybe empty. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) + ttl = proto.Field(proto.MESSAGE, number=3, message=duration_pb2.Duration,) + location = proto.Field(proto.STRING, number=4,) + snapshot_sources = proto.Field(proto.BOOL, number=5,) + description = proto.Field(proto.STRING, number=6,) + + +class CheckActiveJobsRequest(proto.Message): + r"""Request to check is active jobs exists for a project + Attributes: + project_id (str): + The project which owns the jobs. + """ + + project_id = proto.Field(proto.STRING, number=1,) + + +class CheckActiveJobsResponse(proto.Message): + r"""Response for CheckActiveJobsRequest. + Attributes: + active_jobs_exist (bool): + If True, active jobs exists for project. + False otherwise. + """ + + active_jobs_exist = proto.Field(proto.BOOL, number=1,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py new file mode 100644 index 000000000000..a3dca4508bfe --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/messages.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.dataflow.v1beta3", + manifest={ + "JobMessageImportance", + "JobMessage", + "StructuredMessage", + "AutoscalingEvent", + "ListJobMessagesRequest", + "ListJobMessagesResponse", + }, +) + + +class JobMessageImportance(proto.Enum): + r"""Indicates the importance of the message.""" + JOB_MESSAGE_IMPORTANCE_UNKNOWN = 0 + JOB_MESSAGE_DEBUG = 1 + JOB_MESSAGE_DETAILED = 2 + JOB_MESSAGE_BASIC = 5 + JOB_MESSAGE_WARNING = 3 + JOB_MESSAGE_ERROR = 4 + + +class JobMessage(proto.Message): + r"""A particular message pertaining to a Dataflow job. + Attributes: + id (str): + Deprecated. + time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the message. + message_text (str): + The text of the message. + message_importance (google.cloud.dataflow_v1beta3.types.JobMessageImportance): + Importance level of the message. + """ + + id = proto.Field(proto.STRING, number=1,) + time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + message_text = proto.Field(proto.STRING, number=3,) + message_importance = proto.Field(proto.ENUM, number=4, enum="JobMessageImportance",) + + +class StructuredMessage(proto.Message): + r"""A rich message format, including a human readable string, a + key for identifying the message, and structured data associated + with the message for programmatic consumption. + + Attributes: + message_text (str): + Human-readable version of message. + message_key (str): + Identifier for this message type. Used by + external systems to internationalize or + personalize message. + parameters (Sequence[google.cloud.dataflow_v1beta3.types.StructuredMessage.Parameter]): + The structured data associated with this + message. + """ + + class Parameter(proto.Message): + r"""Structured data associated with this message. + Attributes: + key (str): + Key or name for this parameter. + value (google.protobuf.struct_pb2.Value): + Value for this parameter. + """ + + key = proto.Field(proto.STRING, number=1,) + value = proto.Field(proto.MESSAGE, number=2, message=struct_pb2.Value,) + + message_text = proto.Field(proto.STRING, number=1,) + message_key = proto.Field(proto.STRING, number=2,) + parameters = proto.RepeatedField(proto.MESSAGE, number=3, message=Parameter,) + + +class AutoscalingEvent(proto.Message): + r"""A structured message reporting an autoscaling decision made + by the Dataflow service. + + Attributes: + current_num_workers (int): + The current number of workers the job has. + target_num_workers (int): + The target number of workers the worker pool + wants to resize to use. + event_type (google.cloud.dataflow_v1beta3.types.AutoscalingEvent.AutoscalingEventType): + The type of autoscaling event to report. + description (google.cloud.dataflow_v1beta3.types.StructuredMessage): + A message describing why the system decided + to adjust the current number of workers, why it + failed, or why the system decided to not make + any changes to the number of workers. + time (google.protobuf.timestamp_pb2.Timestamp): + The time this event was emitted to indicate a new target or + current num_workers value. + worker_pool (str): + A short and friendly name for the worker pool this event + refers to, populated from the value of + PoolStageRelation::user_pool_name. + """ + + class AutoscalingEventType(proto.Enum): + r"""Indicates the type of autoscaling event.""" + TYPE_UNKNOWN = 0 + TARGET_NUM_WORKERS_CHANGED = 1 + CURRENT_NUM_WORKERS_CHANGED = 2 + ACTUATION_FAILURE = 3 + NO_CHANGE = 4 + + current_num_workers = proto.Field(proto.INT64, number=1,) + target_num_workers = proto.Field(proto.INT64, number=2,) + event_type = proto.Field(proto.ENUM, number=3, enum=AutoscalingEventType,) + description = proto.Field(proto.MESSAGE, number=4, message="StructuredMessage",) + time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + worker_pool = proto.Field(proto.STRING, number=7,) + + +class ListJobMessagesRequest(proto.Message): + r"""Request to list job messages. Up to max_results messages will be + returned in the time range specified starting with the oldest + messages first. If no time range is specified the results with start + with the oldest message. + + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get messages about. + minimum_importance (google.cloud.dataflow_v1beta3.types.JobMessageImportance): + Filter to only get messages with importance + >= level + page_size (int): + If specified, determines the maximum number + of messages to return. If unspecified, the + service may choose an appropriate default, or + may return an arbitrarily large number of + results. + page_token (str): + If supplied, this should be the value of next_page_token + returned by an earlier call. This will cause the next page + of results to be returned. + start_time (google.protobuf.timestamp_pb2.Timestamp): + If specified, return only messages with timestamps >= + start_time. The default is the job creation time (i.e. + beginning of messages). + end_time (google.protobuf.timestamp_pb2.Timestamp): + Return only messages with timestamps < end_time. The default + is now (i.e. return up to the latest messages available). + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) + minimum_importance = proto.Field(proto.ENUM, number=3, enum="JobMessageImportance",) + page_size = proto.Field(proto.INT32, number=4,) + page_token = proto.Field(proto.STRING, number=5,) + start_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) + location = proto.Field(proto.STRING, number=8,) + + +class ListJobMessagesResponse(proto.Message): + r"""Response to a request to list job messages. + Attributes: + job_messages (Sequence[google.cloud.dataflow_v1beta3.types.JobMessage]): + Messages in ascending timestamp order. + next_page_token (str): + The token to obtain the next page of results + if there are more. + autoscaling_events (Sequence[google.cloud.dataflow_v1beta3.types.AutoscalingEvent]): + Autoscaling events in ascending timestamp + order. + """ + + @property + def raw_page(self): + return self + + job_messages = proto.RepeatedField(proto.MESSAGE, number=1, message="JobMessage",) + next_page_token = proto.Field(proto.STRING, number=2,) + autoscaling_events = proto.RepeatedField( + proto.MESSAGE, number=3, message="AutoscalingEvent", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py new file mode 100644 index 000000000000..85b2e2272c54 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/metrics.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.dataflow.v1beta3", + manifest={ + "ExecutionState", + "MetricStructuredName", + "MetricUpdate", + "GetJobMetricsRequest", + "JobMetrics", + "GetJobExecutionDetailsRequest", + "ProgressTimeseries", + "StageSummary", + "JobExecutionDetails", + "GetStageExecutionDetailsRequest", + "WorkItemDetails", + "WorkerDetails", + "StageExecutionDetails", + }, +) + + +class ExecutionState(proto.Enum): + r"""The state of some component of job execution.""" + EXECUTION_STATE_UNKNOWN = 0 + EXECUTION_STATE_NOT_STARTED = 1 + EXECUTION_STATE_RUNNING = 2 + EXECUTION_STATE_SUCCEEDED = 3 + EXECUTION_STATE_FAILED = 4 + EXECUTION_STATE_CANCELLED = 5 + + +class MetricStructuredName(proto.Message): + r"""Identifies a metric, by describing the source which generated + the metric. + + Attributes: + origin (str): + Origin (namespace) of metric name. May be + blank for user-define metrics; will be + "dataflow" for metrics defined by the Dataflow + service or SDK. + name (str): + Worker-defined metric name. + context (Sequence[google.cloud.dataflow_v1beta3.types.MetricStructuredName.ContextEntry]): + Zero or more labeled fields which identify the part of the + job this metric is associated with, such as the name of a + step or collection. + + For example, built-in counters associated with steps will + have context['step'] = . Counters associated with + PCollections in the SDK will have context['pcollection'] = . + """ + + origin = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2,) + context = proto.MapField(proto.STRING, proto.STRING, number=3,) + + +class MetricUpdate(proto.Message): + r"""Describes the state of a metric. + Attributes: + name (google.cloud.dataflow_v1beta3.types.MetricStructuredName): + Name of the metric. + kind (str): + Metric aggregation kind. The possible metric + aggregation kinds are "Sum", "Max", "Min", + "Mean", "Set", "And", "Or", and "Distribution". + The specified aggregation kind is case- + insensitive. + If omitted, this is not an aggregated value but + instead a single metric sample value. + cumulative (bool): + True if this metric is reported as the total + cumulative aggregate value accumulated since the + worker started working on this WorkItem. By + default this is false, indicating that this + metric is reported as a delta that is not + associated with any WorkItem. + scalar (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for + aggregation kinds "Sum", "Max", "Min", "And", + and "Or". The possible value types are Long, + Double, and Boolean. + mean_sum (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for the "Mean" aggregation + kind. This holds the sum of the aggregated values and is + used in combination with mean_count below to obtain the + actual mean aggregate value. The only possible value types + are Long and Double. + mean_count (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for the "Mean" aggregation + kind. This holds the count of the aggregated values and is + used in combination with mean_sum above to obtain the actual + mean aggregate value. The only possible value type is Long. + set_ (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for the "Set" + aggregation kind. The only possible value type + is a list of Values whose type can be Long, + Double, or String, according to the metric's + type. All Values in the list must be of the + same type. + distribution (google.protobuf.struct_pb2.Value): + A struct value describing properties of a + distribution of numeric values. + gauge (google.protobuf.struct_pb2.Value): + A struct value describing properties of a + Gauge. Metrics of gauge type show the value of a + metric across time, and is aggregated based on + the newest value. + internal (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for internal + use by the Dataflow service. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp associated with the metric value. + Optional when workers are reporting work + progress; it will be filled in responses from + the metrics API. + """ + + name = proto.Field(proto.MESSAGE, number=1, message="MetricStructuredName",) + kind = proto.Field(proto.STRING, number=2,) + cumulative = proto.Field(proto.BOOL, number=3,) + scalar = proto.Field(proto.MESSAGE, number=4, message=struct_pb2.Value,) + mean_sum = proto.Field(proto.MESSAGE, number=5, message=struct_pb2.Value,) + mean_count = proto.Field(proto.MESSAGE, number=6, message=struct_pb2.Value,) + set_ = proto.Field(proto.MESSAGE, number=7, message=struct_pb2.Value,) + distribution = proto.Field(proto.MESSAGE, number=11, message=struct_pb2.Value,) + gauge = proto.Field(proto.MESSAGE, number=12, message=struct_pb2.Value,) + internal = proto.Field(proto.MESSAGE, number=8, message=struct_pb2.Value,) + update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + + +class GetJobMetricsRequest(proto.Message): + r"""Request to get job metrics. + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get metrics for. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Return only metric data that has changed + since this time. Default is to return all + information about all metrics for the job. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) + start_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + location = proto.Field(proto.STRING, number=4,) + + +class JobMetrics(proto.Message): + r"""JobMetrics contains a collection of metrics describing the + detailed progress of a Dataflow job. Metrics correspond to user- + defined and system-defined metrics in the job. + + This resource captures only the most recent values of each + metric; time-series data can be queried for them (under the same + metric names) from Cloud Monitoring. + + Attributes: + metric_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp as of which metric values are + current. + metrics (Sequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): + All metrics for this job. + """ + + metric_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + metrics = proto.RepeatedField(proto.MESSAGE, number=2, message="MetricUpdate",) + + +class GetJobExecutionDetailsRequest(proto.Message): + r"""Request to get job execution details. + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get execution details for. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + page_size (int): + If specified, determines the maximum number + of stages to return. If unspecified, the + service may choose an appropriate default, or + may return an arbitrarily large number of + results. + page_token (str): + If supplied, this should be the value of next_page_token + returned by an earlier call. This will cause the next page + of results to be returned. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) + location = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) + page_token = proto.Field(proto.STRING, number=5,) + + +class ProgressTimeseries(proto.Message): + r"""Information about the progress of some component of job + execution. + + Attributes: + current_progress (float): + The current progress of the component, in the range [0,1]. + data_points (Sequence[google.cloud.dataflow_v1beta3.types.ProgressTimeseries.Point]): + History of progress for the component. + Points are sorted by time. + """ + + class Point(proto.Message): + r"""A point in the timeseries. + Attributes: + time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the point. + value (float): + The value of the point. + """ + + time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + value = proto.Field(proto.DOUBLE, number=2,) + + current_progress = proto.Field(proto.DOUBLE, number=1,) + data_points = proto.RepeatedField(proto.MESSAGE, number=2, message=Point,) + + +class StageSummary(proto.Message): + r"""Information about a particular execution stage of a job. + Attributes: + stage_id (str): + ID of this stage + state (google.cloud.dataflow_v1beta3.types.ExecutionState): + State of this stage. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of this stage. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of this stage. + If the work item is completed, this is the + actual end time of the stage. Otherwise, it is + the predicted end time. + progress (google.cloud.dataflow_v1beta3.types.ProgressTimeseries): + Progress for this stage. + Only applicable to Batch jobs. + metrics (Sequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): + Metrics for this stage. + """ + + stage_id = proto.Field(proto.STRING, number=1,) + state = proto.Field(proto.ENUM, number=2, enum="ExecutionState",) + start_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + progress = proto.Field(proto.MESSAGE, number=5, message="ProgressTimeseries",) + metrics = proto.RepeatedField(proto.MESSAGE, number=6, message="MetricUpdate",) + + +class JobExecutionDetails(proto.Message): + r"""Information about the execution of a job. + Attributes: + stages (Sequence[google.cloud.dataflow_v1beta3.types.StageSummary]): + The stages of the job execution. + next_page_token (str): + If present, this response does not contain all requested + tasks. To obtain the next page of results, repeat the + request with page_token set to this value. + """ + + @property + def raw_page(self): + return self + + stages = proto.RepeatedField(proto.MESSAGE, number=1, message="StageSummary",) + next_page_token = proto.Field(proto.STRING, number=2,) + + +class GetStageExecutionDetailsRequest(proto.Message): + r"""Request to get information about a particular execution stage + of a job. Currently only tracked for Batch jobs. + + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get execution details for. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + stage_id (str): + The stage for which to fetch information. + page_size (int): + If specified, determines the maximum number + of work items to return. If unspecified, the + service may choose an appropriate default, or + may return an arbitrarily large number of + results. + page_token (str): + If supplied, this should be the value of next_page_token + returned by an earlier call. This will cause the next page + of results to be returned. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Lower time bound of work items to include, by + start time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Upper time bound of work items to include, by + start time. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) + location = proto.Field(proto.STRING, number=3,) + stage_id = proto.Field(proto.STRING, number=4,) + page_size = proto.Field(proto.INT32, number=5,) + page_token = proto.Field(proto.STRING, number=6,) + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,) + + +class WorkItemDetails(proto.Message): + r"""Information about an individual work item execution. + Attributes: + task_id (str): + Name of this work item. + attempt_id (str): + Attempt ID of this work item + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of this work item attempt. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of this work item attempt. + If the work item is completed, this is the + actual end time of the work item. Otherwise, it + is the predicted end time. + state (google.cloud.dataflow_v1beta3.types.ExecutionState): + State of this work item. + progress (google.cloud.dataflow_v1beta3.types.ProgressTimeseries): + Progress of this work item. + metrics (Sequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): + Metrics for this work item. + """ + + task_id = proto.Field(proto.STRING, number=1,) + attempt_id = proto.Field(proto.STRING, number=2,) + start_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=5, enum="ExecutionState",) + progress = proto.Field(proto.MESSAGE, number=6, message="ProgressTimeseries",) + metrics = proto.RepeatedField(proto.MESSAGE, number=7, message="MetricUpdate",) + + +class WorkerDetails(proto.Message): + r"""Information about a worker + Attributes: + worker_name (str): + Name of this worker + work_items (Sequence[google.cloud.dataflow_v1beta3.types.WorkItemDetails]): + Work items processed by this worker, sorted + by time. + """ + + worker_name = proto.Field(proto.STRING, number=1,) + work_items = proto.RepeatedField( + proto.MESSAGE, number=2, message="WorkItemDetails", + ) + + +class StageExecutionDetails(proto.Message): + r"""Information about the workers and work items within a stage. + Attributes: + workers (Sequence[google.cloud.dataflow_v1beta3.types.WorkerDetails]): + Workers that have done work on the stage. + next_page_token (str): + If present, this response does not contain all requested + tasks. To obtain the next page of results, repeat the + request with page_token set to this value. + """ + + @property + def raw_page(self): + return self + + workers = proto.RepeatedField(proto.MESSAGE, number=1, message="WorkerDetails",) + next_page_token = proto.Field(proto.STRING, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py new file mode 100644 index 000000000000..ebe7b7978d8f --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/snapshots.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.dataflow.v1beta3", + manifest={ + "SnapshotState", + "PubsubSnapshotMetadata", + "Snapshot", + "GetSnapshotRequest", + "DeleteSnapshotRequest", + "DeleteSnapshotResponse", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + }, +) + + +class SnapshotState(proto.Enum): + r"""Snapshot state.""" + UNKNOWN_SNAPSHOT_STATE = 0 + PENDING = 1 + RUNNING = 2 + READY = 3 + FAILED = 4 + DELETED = 5 + + +class PubsubSnapshotMetadata(proto.Message): + r"""Represents a Pubsub snapshot. + Attributes: + topic_name (str): + The name of the Pubsub topic. + snapshot_name (str): + The name of the Pubsub snapshot. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The expire time of the Pubsub snapshot. + """ + + topic_name = proto.Field(proto.STRING, number=1,) + snapshot_name = proto.Field(proto.STRING, number=2,) + expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + + +class Snapshot(proto.Message): + r"""Represents a snapshot of a job. + Attributes: + id (str): + The unique ID of this snapshot. + project_id (str): + The project this snapshot belongs to. + source_job_id (str): + The job this snapshot was created from. + creation_time (google.protobuf.timestamp_pb2.Timestamp): + The time this snapshot was created. + ttl (google.protobuf.duration_pb2.Duration): + The time after which this snapshot will be + automatically deleted. + state (google.cloud.dataflow_v1beta3.types.SnapshotState): + State of the snapshot. + pubsub_metadata (Sequence[google.cloud.dataflow_v1beta3.types.PubsubSnapshotMetadata]): + PubSub snapshot metadata. + description (str): + User specified description of the snapshot. + Maybe empty. + disk_size_bytes (int): + The disk byte size of the snapshot. Only + available for snapshots in READY state. + region (str): + Cloud region where this snapshot lives in, + e.g., "us-central1". + """ + + id = proto.Field(proto.STRING, number=1,) + project_id = proto.Field(proto.STRING, number=2,) + source_job_id = proto.Field(proto.STRING, number=3,) + creation_time = proto.Field( + proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, + ) + ttl = proto.Field(proto.MESSAGE, number=5, message=duration_pb2.Duration,) + state = proto.Field(proto.ENUM, number=6, enum="SnapshotState",) + pubsub_metadata = proto.RepeatedField( + proto.MESSAGE, number=7, message="PubsubSnapshotMetadata", + ) + description = proto.Field(proto.STRING, number=8,) + disk_size_bytes = proto.Field(proto.INT64, number=9,) + region = proto.Field(proto.STRING, number=10,) + + +class GetSnapshotRequest(proto.Message): + r"""Request to get information about a snapshot + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + snapshot belongs to. + snapshot_id (str): + The ID of the snapshot. + location (str): + The location that contains this snapshot. + """ + + project_id = proto.Field(proto.STRING, number=1,) + snapshot_id = proto.Field(proto.STRING, number=2,) + location = proto.Field(proto.STRING, number=3,) + + +class DeleteSnapshotRequest(proto.Message): + r"""Request to delete a snapshot. + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + snapshot belongs to. + snapshot_id (str): + The ID of the snapshot. + location (str): + The location that contains this snapshot. + """ + + project_id = proto.Field(proto.STRING, number=1,) + snapshot_id = proto.Field(proto.STRING, number=2,) + location = proto.Field(proto.STRING, number=3,) + + +class DeleteSnapshotResponse(proto.Message): + r"""Response from deleting a snapshot. """ + + +class ListSnapshotsRequest(proto.Message): + r"""Request to list snapshots. + Attributes: + project_id (str): + The project ID to list snapshots for. + job_id (str): + If specified, list snapshots created from + this job. + location (str): + The location to list snapshots in. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=3,) + location = proto.Field(proto.STRING, number=2,) + + +class ListSnapshotsResponse(proto.Message): + r"""List of snapshots. + Attributes: + snapshots (Sequence[google.cloud.dataflow_v1beta3.types.Snapshot]): + Returned snapshots. + """ + + snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message="Snapshot",) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py new file mode 100644 index 000000000000..255a62e01eaa --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/streaming.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.dataflow.v1beta3", + manifest={ + "TopologyConfig", + "PubsubLocation", + "StreamingStageLocation", + "StreamingSideInputLocation", + "CustomSourceLocation", + "StreamLocation", + "StateFamilyConfig", + "ComputationTopology", + "KeyRangeLocation", + "MountedDataDisk", + "DataDiskAssignment", + "KeyRangeDataDiskAssignment", + "StreamingComputationRanges", + "StreamingApplianceSnapshotConfig", + }, +) + + +class TopologyConfig(proto.Message): + r"""Global topology of the streaming Dataflow job, including all + computations and their sharded locations. + + Attributes: + computations (Sequence[google.cloud.dataflow_v1beta3.types.ComputationTopology]): + The computations associated with a streaming + Dataflow job. + data_disk_assignments (Sequence[google.cloud.dataflow_v1beta3.types.DataDiskAssignment]): + The disks assigned to a streaming Dataflow + job. + user_stage_to_computation_name_map (Sequence[google.cloud.dataflow_v1beta3.types.TopologyConfig.UserStageToComputationNameMapEntry]): + Maps user stage names to stable computation + names. + forwarding_key_bits (int): + The size (in bits) of keys that will be + assigned to source messages. + persistent_state_version (int): + Version number for persistent state. + """ + + computations = proto.RepeatedField( + proto.MESSAGE, number=1, message="ComputationTopology", + ) + data_disk_assignments = proto.RepeatedField( + proto.MESSAGE, number=2, message="DataDiskAssignment", + ) + user_stage_to_computation_name_map = proto.MapField( + proto.STRING, proto.STRING, number=3, + ) + forwarding_key_bits = proto.Field(proto.INT32, number=4,) + persistent_state_version = proto.Field(proto.INT32, number=5,) + + +class PubsubLocation(proto.Message): + r"""Identifies a pubsub location to use for transferring data + into or out of a streaming Dataflow job. + + Attributes: + topic (str): + A pubsub topic, in the form of + "pubsub.googleapis.com/topics//". + subscription (str): + A pubsub subscription, in the form of + "pubsub.googleapis.com/subscriptions//". + timestamp_label (str): + If set, contains a pubsub label from which to + extract record timestamps. If left empty, record + timestamps will be generated upon arrival. + id_label (str): + If set, contains a pubsub label from which to + extract record ids. If left empty, record + deduplication will be strictly best effort. + drop_late_data (bool): + Indicates whether the pipeline allows late- + rriving data. + tracking_subscription (str): + If set, specifies the pubsub subscription + that will be used for tracking custom time + timestamps for watermark estimation. + with_attributes (bool): + If true, then the client has requested to get + pubsub attributes. + """ + + topic = proto.Field(proto.STRING, number=1,) + subscription = proto.Field(proto.STRING, number=2,) + timestamp_label = proto.Field(proto.STRING, number=3,) + id_label = proto.Field(proto.STRING, number=4,) + drop_late_data = proto.Field(proto.BOOL, number=5,) + tracking_subscription = proto.Field(proto.STRING, number=6,) + with_attributes = proto.Field(proto.BOOL, number=7,) + + +class StreamingStageLocation(proto.Message): + r"""Identifies the location of a streaming computation stage, for + stage-to-stage communication. + + Attributes: + stream_id (str): + Identifies the particular stream within the + streaming Dataflow job. + """ + + stream_id = proto.Field(proto.STRING, number=1,) + + +class StreamingSideInputLocation(proto.Message): + r"""Identifies the location of a streaming side input. + Attributes: + tag (str): + Identifies the particular side input within + the streaming Dataflow job. + state_family (str): + Identifies the state family where this side + input is stored. + """ + + tag = proto.Field(proto.STRING, number=1,) + state_family = proto.Field(proto.STRING, number=2,) + + +class CustomSourceLocation(proto.Message): + r"""Identifies the location of a custom souce. + Attributes: + stateful (bool): + Whether this source is stateful. + """ + + stateful = proto.Field(proto.BOOL, number=1,) + + +class StreamLocation(proto.Message): + r"""Describes a stream of data, either as input to be processed + or as output of a streaming Dataflow job. + + Attributes: + streaming_stage_location (google.cloud.dataflow_v1beta3.types.StreamingStageLocation): + The stream is part of another computation + within the current streaming Dataflow job. + pubsub_location (google.cloud.dataflow_v1beta3.types.PubsubLocation): + The stream is a pubsub stream. + side_input_location (google.cloud.dataflow_v1beta3.types.StreamingSideInputLocation): + The stream is a streaming side input. + custom_source_location (google.cloud.dataflow_v1beta3.types.CustomSourceLocation): + The stream is a custom source. + """ + + streaming_stage_location = proto.Field( + proto.MESSAGE, number=1, oneof="location", message="StreamingStageLocation", + ) + pubsub_location = proto.Field( + proto.MESSAGE, number=2, oneof="location", message="PubsubLocation", + ) + side_input_location = proto.Field( + proto.MESSAGE, number=3, oneof="location", message="StreamingSideInputLocation", + ) + custom_source_location = proto.Field( + proto.MESSAGE, number=4, oneof="location", message="CustomSourceLocation", + ) + + +class StateFamilyConfig(proto.Message): + r"""State family configuration. + Attributes: + state_family (str): + The state family value. + is_read (bool): + If true, this family corresponds to a read + operation. + """ + + state_family = proto.Field(proto.STRING, number=1,) + is_read = proto.Field(proto.BOOL, number=2,) + + +class ComputationTopology(proto.Message): + r"""All configuration data for a particular Computation. + Attributes: + system_stage_name (str): + The system stage name. + computation_id (str): + The ID of the computation. + key_ranges (Sequence[google.cloud.dataflow_v1beta3.types.KeyRangeLocation]): + The key ranges processed by the computation. + inputs (Sequence[google.cloud.dataflow_v1beta3.types.StreamLocation]): + The inputs to the computation. + outputs (Sequence[google.cloud.dataflow_v1beta3.types.StreamLocation]): + The outputs from the computation. + state_families (Sequence[google.cloud.dataflow_v1beta3.types.StateFamilyConfig]): + The state family values. + """ + + system_stage_name = proto.Field(proto.STRING, number=1,) + computation_id = proto.Field(proto.STRING, number=5,) + key_ranges = proto.RepeatedField( + proto.MESSAGE, number=2, message="KeyRangeLocation", + ) + inputs = proto.RepeatedField(proto.MESSAGE, number=3, message="StreamLocation",) + outputs = proto.RepeatedField(proto.MESSAGE, number=4, message="StreamLocation",) + state_families = proto.RepeatedField( + proto.MESSAGE, number=7, message="StateFamilyConfig", + ) + + +class KeyRangeLocation(proto.Message): + r"""Location information for a specific key-range of a sharded + computation. Currently we only support UTF-8 character splits to + simplify encoding into JSON. + + Attributes: + start (str): + The start (inclusive) of the key range. + end (str): + The end (exclusive) of the key range. + delivery_endpoint (str): + The physical location of this range + assignment to be used for streaming computation + cross-worker message delivery. + data_disk (str): + The name of the data disk where data for this + range is stored. This name is local to the + Google Cloud Platform project and uniquely + identifies the disk within that project, for + example + "myproject-1014-104817-4c2-harness-0-disk-1". + deprecated_persistent_directory (str): + DEPRECATED. The location of the persistent + state for this range, as a persistent directory + in the worker local filesystem. + """ + + start = proto.Field(proto.STRING, number=1,) + end = proto.Field(proto.STRING, number=2,) + delivery_endpoint = proto.Field(proto.STRING, number=3,) + data_disk = proto.Field(proto.STRING, number=5,) + deprecated_persistent_directory = proto.Field(proto.STRING, number=4,) + + +class MountedDataDisk(proto.Message): + r"""Describes mounted data disk. + Attributes: + data_disk (str): + The name of the data disk. + This name is local to the Google Cloud Platform + project and uniquely identifies the disk within + that project, for example + "myproject-1014-104817-4c2-harness-0-disk-1". + """ + + data_disk = proto.Field(proto.STRING, number=1,) + + +class DataDiskAssignment(proto.Message): + r"""Data disk assignment for a given VM instance. + Attributes: + vm_instance (str): + VM instance name the data disks mounted to, + for example + "myproject-1014-104817-4c2-harness-0". + data_disks (Sequence[str]): + Mounted data disks. The order is important a + data disk's 0-based index in this list defines + which persistent directory the disk is mounted + to, for example the list of { + "myproject-1014-104817-4c2-harness-0-disk-0" }, + { "myproject-1014-104817-4c2-harness-0-disk-1" + }. + """ + + vm_instance = proto.Field(proto.STRING, number=1,) + data_disks = proto.RepeatedField(proto.STRING, number=2,) + + +class KeyRangeDataDiskAssignment(proto.Message): + r"""Data disk assignment information for a specific key-range of + a sharded computation. + Currently we only support UTF-8 character splits to simplify + encoding into JSON. + + Attributes: + start (str): + The start (inclusive) of the key range. + end (str): + The end (exclusive) of the key range. + data_disk (str): + The name of the data disk where data for this + range is stored. This name is local to the + Google Cloud Platform project and uniquely + identifies the disk within that project, for + example + "myproject-1014-104817-4c2-harness-0-disk-1". + """ + + start = proto.Field(proto.STRING, number=1,) + end = proto.Field(proto.STRING, number=2,) + data_disk = proto.Field(proto.STRING, number=3,) + + +class StreamingComputationRanges(proto.Message): + r"""Describes full or partial data disk assignment information of + the computation ranges. + + Attributes: + computation_id (str): + The ID of the computation. + range_assignments (Sequence[google.cloud.dataflow_v1beta3.types.KeyRangeDataDiskAssignment]): + Data disk assignments for ranges from this + computation. + """ + + computation_id = proto.Field(proto.STRING, number=1,) + range_assignments = proto.RepeatedField( + proto.MESSAGE, number=2, message="KeyRangeDataDiskAssignment", + ) + + +class StreamingApplianceSnapshotConfig(proto.Message): + r"""Streaming appliance snapshot configuration. + Attributes: + snapshot_id (str): + If set, indicates the snapshot id for the + snapshot being performed. + import_state_endpoint (str): + Indicates which endpoint is used to import + appliance state. + """ + + snapshot_id = proto.Field(proto.STRING, number=1,) + import_state_endpoint = proto.Field(proto.STRING, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py new file mode 100644 index 000000000000..c8b4e623ffe0 --- /dev/null +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/types/templates.py @@ -0,0 +1,676 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment as gd_environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.dataflow.v1beta3", + manifest={ + "ParameterType", + "LaunchFlexTemplateResponse", + "ContainerSpec", + "LaunchFlexTemplateParameter", + "FlexTemplateRuntimeEnvironment", + "LaunchFlexTemplateRequest", + "RuntimeEnvironment", + "ParameterMetadata", + "TemplateMetadata", + "SDKInfo", + "RuntimeMetadata", + "CreateJobFromTemplateRequest", + "GetTemplateRequest", + "GetTemplateResponse", + "LaunchTemplateParameters", + "LaunchTemplateRequest", + "LaunchTemplateResponse", + "InvalidTemplateParameters", + "DynamicTemplateLaunchParams", + }, +) + + +class ParameterType(proto.Enum): + r"""ParameterType specifies what kind of input we need for this + parameter. + """ + DEFAULT = 0 + TEXT = 1 + GCS_READ_BUCKET = 2 + GCS_WRITE_BUCKET = 3 + GCS_READ_FILE = 4 + GCS_WRITE_FILE = 5 + GCS_READ_FOLDER = 6 + GCS_WRITE_FOLDER = 7 + PUBSUB_TOPIC = 8 + PUBSUB_SUBSCRIPTION = 9 + + +class LaunchFlexTemplateResponse(proto.Message): + r"""Response to the request to launch a job from Flex Template. + Attributes: + job (google.cloud.dataflow_v1beta3.types.Job): + The job that was launched, if the request was + not a dry run and the job was successfully + launched. + """ + + job = proto.Field(proto.MESSAGE, number=1, message=jobs.Job,) + + +class ContainerSpec(proto.Message): + r"""Container Spec. + Attributes: + image (str): + Name of the docker container image. E.g., + gcr.io/project/some-image + metadata (google.cloud.dataflow_v1beta3.types.TemplateMetadata): + Metadata describing a template including + description and validation rules. + sdk_info (google.cloud.dataflow_v1beta3.types.SDKInfo): + Required. SDK info of the Flex Template. + default_environment (google.cloud.dataflow_v1beta3.types.FlexTemplateRuntimeEnvironment): + Default runtime environment for the job. + """ + + image = proto.Field(proto.STRING, number=1,) + metadata = proto.Field(proto.MESSAGE, number=2, message="TemplateMetadata",) + sdk_info = proto.Field(proto.MESSAGE, number=3, message="SDKInfo",) + default_environment = proto.Field( + proto.MESSAGE, number=4, message="FlexTemplateRuntimeEnvironment", + ) + + +class LaunchFlexTemplateParameter(proto.Message): + r"""Launch FlexTemplate Parameter. + Attributes: + job_name (str): + Required. The job name to use for the created + job. For update job request, job name should be + same as the existing running job. + container_spec (google.cloud.dataflow_v1beta3.types.ContainerSpec): + Spec about the container image to launch. + container_spec_gcs_path (str): + Cloud Storage path to a file with json + serialized ContainerSpec as content. + parameters (Sequence[google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateParameter.ParametersEntry]): + The parameters for FlexTemplate. Ex. {"num_workers":"5"} + launch_options (Sequence[google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateParameter.LaunchOptionsEntry]): + Launch options for this flex template job. + This is a common set of options across languages + and templates. This should not be used to pass + job parameters. + environment (google.cloud.dataflow_v1beta3.types.FlexTemplateRuntimeEnvironment): + The runtime environment for the FlexTemplate + job + update (bool): + Set this to true if you are sending a request + to update a running streaming job. When set, the + job name should be the same as the running job. + transform_name_mappings (Sequence[google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateParameter.TransformNameMappingsEntry]): + Use this to pass transform_name_mappings for streaming + update jobs. Ex:{"oldTransformName":"newTransformName",...}' + """ + + job_name = proto.Field(proto.STRING, number=1,) + container_spec = proto.Field( + proto.MESSAGE, number=4, oneof="template", message="ContainerSpec", + ) + container_spec_gcs_path = proto.Field(proto.STRING, number=5, oneof="template",) + parameters = proto.MapField(proto.STRING, proto.STRING, number=2,) + launch_options = proto.MapField(proto.STRING, proto.STRING, number=6,) + environment = proto.Field( + proto.MESSAGE, number=7, message="FlexTemplateRuntimeEnvironment", + ) + update = proto.Field(proto.BOOL, number=8,) + transform_name_mappings = proto.MapField(proto.STRING, proto.STRING, number=9,) + + +class FlexTemplateRuntimeEnvironment(proto.Message): + r"""The environment values to be set at runtime for flex + template. + + Attributes: + num_workers (int): + The initial number of Google Compute Engine + instances for the job. + max_workers (int): + The maximum number of Google Compute Engine + instances to be made available to your pipeline + during execution, from 1 to 1000. + zone (str): + The Compute Engine `availability + zone `__ + for launching worker instances to run your pipeline. In the + future, worker_zone will take precedence. + service_account_email (str): + The email address of the service account to + run the job as. + temp_location (str): + The Cloud Storage path to use for temporary files. Must be a + valid Cloud Storage URL, beginning with ``gs://``. + machine_type (str): + The machine type to use for the job. Defaults + to the value from the template if not specified. + additional_experiments (Sequence[str]): + Additional experiment flags for the job. + network (str): + Network to which VMs will be assigned. If + empty or unspecified, the service will use the + network "default". + subnetwork (str): + Subnetwork to which VMs will be assigned, if desired. You + can specify a subnetwork using either a complete URL or an + abbreviated path. Expected to be of the form + "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" + or "regions/REGION/subnetworks/SUBNETWORK". If the + subnetwork is located in a Shared VPC network, you must use + the complete URL. + additional_user_labels (Sequence[google.cloud.dataflow_v1beta3.types.FlexTemplateRuntimeEnvironment.AdditionalUserLabelsEntry]): + Additional user labels to be specified for the job. Keys and + values must follow the restrictions specified in the + `labeling + restrictions `__ + page. An object containing a list of "key": value pairs. + Example: { "name": "wrench", "mass": "1kg", "count": "3" }. + kms_key_name (str): + Name for the Cloud KMS key for the job. + Key format is: + projects//locations//keyRings//cryptoKeys/ + ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): + Configuration for VM IPs. + worker_region (str): + The Compute Engine region + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1". + Mutually exclusive with worker_zone. If neither + worker_region nor worker_zone is specified, default to the + control plane's region. + worker_zone (str): + The Compute Engine zone + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1-a". + Mutually exclusive with worker_region. If neither + worker_region nor worker_zone is specified, a zone in the + control plane's region is chosen based on available + capacity. If both ``worker_zone`` and ``zone`` are set, + ``worker_zone`` takes precedence. + enable_streaming_engine (bool): + Whether to enable Streaming Engine for the + job. + flexrs_goal (google.cloud.dataflow_v1beta3.types.FlexResourceSchedulingGoal): + Set FlexRS goal for the job. + https://cloud.google.com/dataflow/docs/guides/flexrs + staging_location (str): + The Cloud Storage path for staging local files. Must be a + valid Cloud Storage URL, beginning with ``gs://``. + sdk_container_image (str): + Docker registry location of container image + to use for the 'worker harness. Default is the + container for the version of the SDK. Note this + field is only valid for portable pipelines. + """ + + num_workers = proto.Field(proto.INT32, number=1,) + max_workers = proto.Field(proto.INT32, number=2,) + zone = proto.Field(proto.STRING, number=3,) + service_account_email = proto.Field(proto.STRING, number=4,) + temp_location = proto.Field(proto.STRING, number=5,) + machine_type = proto.Field(proto.STRING, number=6,) + additional_experiments = proto.RepeatedField(proto.STRING, number=7,) + network = proto.Field(proto.STRING, number=8,) + subnetwork = proto.Field(proto.STRING, number=9,) + additional_user_labels = proto.MapField(proto.STRING, proto.STRING, number=10,) + kms_key_name = proto.Field(proto.STRING, number=11,) + ip_configuration = proto.Field( + proto.ENUM, number=12, enum=gd_environment.WorkerIPAddressConfiguration, + ) + worker_region = proto.Field(proto.STRING, number=13,) + worker_zone = proto.Field(proto.STRING, number=14,) + enable_streaming_engine = proto.Field(proto.BOOL, number=15,) + flexrs_goal = proto.Field( + proto.ENUM, number=16, enum=gd_environment.FlexResourceSchedulingGoal, + ) + staging_location = proto.Field(proto.STRING, number=17,) + sdk_container_image = proto.Field(proto.STRING, number=18,) + + +class LaunchFlexTemplateRequest(proto.Message): + r"""A request to launch a Cloud Dataflow job from a FlexTemplate. + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + launch_parameter (google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateParameter): + Required. Parameter to launch a job form Flex + Template. + location (str): + Required. The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. E.g., us-central1, us-west1. + validate_only (bool): + If true, the request is validated but not + actually executed. Defaults to false. + """ + + project_id = proto.Field(proto.STRING, number=1,) + launch_parameter = proto.Field( + proto.MESSAGE, number=2, message="LaunchFlexTemplateParameter", + ) + location = proto.Field(proto.STRING, number=3,) + validate_only = proto.Field(proto.BOOL, number=4,) + + +class RuntimeEnvironment(proto.Message): + r"""The environment values to set at runtime. + Attributes: + num_workers (int): + The initial number of Google Compute Engine + instnaces for the job. + max_workers (int): + The maximum number of Google Compute Engine + instances to be made available to your pipeline + during execution, from 1 to 1000. + zone (str): + The Compute Engine `availability + zone `__ + for launching worker instances to run your pipeline. In the + future, worker_zone will take precedence. + service_account_email (str): + The email address of the service account to + run the job as. + temp_location (str): + The Cloud Storage path to use for temporary files. Must be a + valid Cloud Storage URL, beginning with ``gs://``. + bypass_temp_dir_validation (bool): + Whether to bypass the safety checks for the + job's temporary directory. Use with caution. + machine_type (str): + The machine type to use for the job. Defaults + to the value from the template if not specified. + additional_experiments (Sequence[str]): + Additional experiment flags for the job. + network (str): + Network to which VMs will be assigned. If + empty or unspecified, the service will use the + network "default". + subnetwork (str): + Subnetwork to which VMs will be assigned, if desired. You + can specify a subnetwork using either a complete URL or an + abbreviated path. Expected to be of the form + "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" + or "regions/REGION/subnetworks/SUBNETWORK". If the + subnetwork is located in a Shared VPC network, you must use + the complete URL. + additional_user_labels (Sequence[google.cloud.dataflow_v1beta3.types.RuntimeEnvironment.AdditionalUserLabelsEntry]): + Additional user labels to be specified for the job. Keys and + values should follow the restrictions specified in the + `labeling + restrictions `__ + page. An object containing a list of "key": value pairs. + Example: { "name": "wrench", "mass": "1kg", "count": "3" }. + kms_key_name (str): + Name for the Cloud KMS key for the job. + Key format is: + projects//locations//keyRings//cryptoKeys/ + ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): + Configuration for VM IPs. + worker_region (str): + The Compute Engine region + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1". + Mutually exclusive with worker_zone. If neither + worker_region nor worker_zone is specified, default to the + control plane's region. + worker_zone (str): + The Compute Engine zone + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1-a". + Mutually exclusive with worker_region. If neither + worker_region nor worker_zone is specified, a zone in the + control plane's region is chosen based on available + capacity. If both ``worker_zone`` and ``zone`` are set, + ``worker_zone`` takes precedence. + enable_streaming_engine (bool): + Whether to enable Streaming Engine for the + job. + """ + + num_workers = proto.Field(proto.INT32, number=11,) + max_workers = proto.Field(proto.INT32, number=1,) + zone = proto.Field(proto.STRING, number=2,) + service_account_email = proto.Field(proto.STRING, number=3,) + temp_location = proto.Field(proto.STRING, number=4,) + bypass_temp_dir_validation = proto.Field(proto.BOOL, number=5,) + machine_type = proto.Field(proto.STRING, number=6,) + additional_experiments = proto.RepeatedField(proto.STRING, number=7,) + network = proto.Field(proto.STRING, number=8,) + subnetwork = proto.Field(proto.STRING, number=9,) + additional_user_labels = proto.MapField(proto.STRING, proto.STRING, number=10,) + kms_key_name = proto.Field(proto.STRING, number=12,) + ip_configuration = proto.Field( + proto.ENUM, number=14, enum=gd_environment.WorkerIPAddressConfiguration, + ) + worker_region = proto.Field(proto.STRING, number=15,) + worker_zone = proto.Field(proto.STRING, number=16,) + enable_streaming_engine = proto.Field(proto.BOOL, number=17,) + + +class ParameterMetadata(proto.Message): + r"""Metadata for a specific parameter. + Attributes: + name (str): + Required. The name of the parameter. + label (str): + Required. The label to display for the + parameter. + help_text (str): + Required. The help text to display for the + parameter. + is_optional (bool): + Optional. Whether the parameter is optional. + Defaults to false. + regexes (Sequence[str]): + Optional. Regexes that the parameter must + match. + param_type (google.cloud.dataflow_v1beta3.types.ParameterType): + Optional. The type of the parameter. + Used for selecting input picker. + custom_metadata (Sequence[google.cloud.dataflow_v1beta3.types.ParameterMetadata.CustomMetadataEntry]): + Optional. Additional metadata for describing + this parameter. + """ + + name = proto.Field(proto.STRING, number=1,) + label = proto.Field(proto.STRING, number=2,) + help_text = proto.Field(proto.STRING, number=3,) + is_optional = proto.Field(proto.BOOL, number=4,) + regexes = proto.RepeatedField(proto.STRING, number=5,) + param_type = proto.Field(proto.ENUM, number=6, enum="ParameterType",) + custom_metadata = proto.MapField(proto.STRING, proto.STRING, number=7,) + + +class TemplateMetadata(proto.Message): + r"""Metadata describing a template. + Attributes: + name (str): + Required. The name of the template. + description (str): + Optional. A description of the template. + parameters (Sequence[google.cloud.dataflow_v1beta3.types.ParameterMetadata]): + The parameters for the template. + """ + + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + parameters = proto.RepeatedField( + proto.MESSAGE, number=3, message="ParameterMetadata", + ) + + +class SDKInfo(proto.Message): + r"""SDK Information. + Attributes: + language (google.cloud.dataflow_v1beta3.types.SDKInfo.Language): + Required. The SDK Language. + version (str): + Optional. The SDK version. + """ + + class Language(proto.Enum): + r"""SDK Language.""" + UNKNOWN = 0 + JAVA = 1 + PYTHON = 2 + + language = proto.Field(proto.ENUM, number=1, enum=Language,) + version = proto.Field(proto.STRING, number=2,) + + +class RuntimeMetadata(proto.Message): + r"""RuntimeMetadata describing a runtime environment. + Attributes: + sdk_info (google.cloud.dataflow_v1beta3.types.SDKInfo): + SDK Info for the template. + parameters (Sequence[google.cloud.dataflow_v1beta3.types.ParameterMetadata]): + The parameters for the template. + """ + + sdk_info = proto.Field(proto.MESSAGE, number=1, message="SDKInfo",) + parameters = proto.RepeatedField( + proto.MESSAGE, number=2, message="ParameterMetadata", + ) + + +class CreateJobFromTemplateRequest(proto.Message): + r"""A request to create a Cloud Dataflow job from a template. + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + job_name (str): + Required. The job name to use for the created + job. + gcs_path (str): + Required. A Cloud Storage path to the template from which to + create the job. Must be a valid Cloud Storage URL, beginning + with ``gs://``. + parameters (Sequence[google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest.ParametersEntry]): + The runtime parameters to pass to the job. + environment (google.cloud.dataflow_v1beta3.types.RuntimeEnvironment): + The runtime environment for the job. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. + """ + + project_id = proto.Field(proto.STRING, number=1,) + job_name = proto.Field(proto.STRING, number=4,) + gcs_path = proto.Field(proto.STRING, number=2, oneof="template",) + parameters = proto.MapField(proto.STRING, proto.STRING, number=3,) + environment = proto.Field(proto.MESSAGE, number=5, message="RuntimeEnvironment",) + location = proto.Field(proto.STRING, number=6,) + + +class GetTemplateRequest(proto.Message): + r"""A request to retrieve a Cloud Dataflow job template. + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + gcs_path (str): + Required. A Cloud Storage path to the + template from which to create the job. + Must be valid Cloud Storage URL, beginning with + 'gs://'. + view (google.cloud.dataflow_v1beta3.types.GetTemplateRequest.TemplateView): + The view to retrieve. Defaults to METADATA_ONLY. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. + """ + + class TemplateView(proto.Enum): + r"""The various views of a template that may be retrieved.""" + METADATA_ONLY = 0 + + project_id = proto.Field(proto.STRING, number=1,) + gcs_path = proto.Field(proto.STRING, number=2, oneof="template",) + view = proto.Field(proto.ENUM, number=3, enum=TemplateView,) + location = proto.Field(proto.STRING, number=4,) + + +class GetTemplateResponse(proto.Message): + r"""The response to a GetTemplate request. + Attributes: + status (google.rpc.status_pb2.Status): + The status of the get template request. Any problems with + the request will be indicated in the error_details. + metadata (google.cloud.dataflow_v1beta3.types.TemplateMetadata): + The template metadata describing the template + name, available parameters, etc. + template_type (google.cloud.dataflow_v1beta3.types.GetTemplateResponse.TemplateType): + Template Type. + runtime_metadata (google.cloud.dataflow_v1beta3.types.RuntimeMetadata): + Describes the runtime metadata with SDKInfo + and available parameters. + """ + + class TemplateType(proto.Enum): + r"""Template Type.""" + UNKNOWN = 0 + LEGACY = 1 + FLEX = 2 + + status = proto.Field(proto.MESSAGE, number=1, message=status_pb2.Status,) + metadata = proto.Field(proto.MESSAGE, number=2, message="TemplateMetadata",) + template_type = proto.Field(proto.ENUM, number=3, enum=TemplateType,) + runtime_metadata = proto.Field(proto.MESSAGE, number=4, message="RuntimeMetadata",) + + +class LaunchTemplateParameters(proto.Message): + r"""Parameters to provide to the template being launched. + Attributes: + job_name (str): + Required. The job name to use for the created + job. + parameters (Sequence[google.cloud.dataflow_v1beta3.types.LaunchTemplateParameters.ParametersEntry]): + The runtime parameters to pass to the job. + environment (google.cloud.dataflow_v1beta3.types.RuntimeEnvironment): + The runtime environment for the job. + update (bool): + If set, replace the existing pipeline with + the name specified by jobName with this + pipeline, preserving state. + transform_name_mapping (Sequence[google.cloud.dataflow_v1beta3.types.LaunchTemplateParameters.TransformNameMappingEntry]): + Only applicable when updating a pipeline. Map + of transform name prefixes of the job to be + replaced to the corresponding name prefixes of + the new job. + """ + + job_name = proto.Field(proto.STRING, number=1,) + parameters = proto.MapField(proto.STRING, proto.STRING, number=2,) + environment = proto.Field(proto.MESSAGE, number=3, message="RuntimeEnvironment",) + update = proto.Field(proto.BOOL, number=4,) + transform_name_mapping = proto.MapField(proto.STRING, proto.STRING, number=5,) + + +class LaunchTemplateRequest(proto.Message): + r"""A request to launch a template. + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + validate_only (bool): + If true, the request is validated but not + actually executed. Defaults to false. + gcs_path (str): + A Cloud Storage path to the template from + which to create the job. + Must be valid Cloud Storage URL, beginning with + 'gs://'. + dynamic_template (google.cloud.dataflow_v1beta3.types.DynamicTemplateLaunchParams): + Params for launching a dynamic template. + launch_parameters (google.cloud.dataflow_v1beta3.types.LaunchTemplateParameters): + The parameters of the template to launch. + This should be part of the body of the POST + request. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. + """ + + project_id = proto.Field(proto.STRING, number=1,) + validate_only = proto.Field(proto.BOOL, number=2,) + gcs_path = proto.Field(proto.STRING, number=3, oneof="template",) + dynamic_template = proto.Field( + proto.MESSAGE, + number=6, + oneof="template", + message="DynamicTemplateLaunchParams", + ) + launch_parameters = proto.Field( + proto.MESSAGE, number=4, message="LaunchTemplateParameters", + ) + location = proto.Field(proto.STRING, number=5,) + + +class LaunchTemplateResponse(proto.Message): + r"""Response to the request to launch a template. + Attributes: + job (google.cloud.dataflow_v1beta3.types.Job): + The job that was launched, if the request was + not a dry run and the job was successfully + launched. + """ + + job = proto.Field(proto.MESSAGE, number=1, message=jobs.Job,) + + +class InvalidTemplateParameters(proto.Message): + r"""Used in the error_details field of a google.rpc.Status message, this + indicates problems with the template parameter. + + Attributes: + parameter_violations (Sequence[google.cloud.dataflow_v1beta3.types.InvalidTemplateParameters.ParameterViolation]): + Describes all parameter violations in a + template request. + """ + + class ParameterViolation(proto.Message): + r"""A specific template-parameter violation. + Attributes: + parameter (str): + The parameter that failed to validate. + description (str): + A description of why the parameter failed to + validate. + """ + + parameter = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + + parameter_violations = proto.RepeatedField( + proto.MESSAGE, number=1, message=ParameterViolation, + ) + + +class DynamicTemplateLaunchParams(proto.Message): + r"""Params which should be passed when launching a dynamic + template. + + Attributes: + gcs_path (str): + Path to dynamic template spec file on Cloud + Storage. The file must be a Json serialized + DynamicTemplateFieSpec object. + staging_location (str): + Cloud Storage path for staging dependencies. Must be a valid + Cloud Storage URL, beginning with ``gs://``. + """ + + gcs_path = proto.Field(proto.STRING, number=1,) + staging_location = proto.Field(proto.STRING, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataflow-client/setup.py b/packages/google-cloud-dataflow-client/setup.py index 97a91c37cb72..1419e9edc31a 100644 --- a/packages/google-cloud-dataflow-client/setup.py +++ b/packages/google-cloud-dataflow-client/setup.py @@ -71,9 +71,9 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, - python_requires=">=3.6", + python_requires=">=3.6", namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, -) \ No newline at end of file +) diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index c79c9449993f..b3eab78001df 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -31,19 +31,19 @@ from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.dataflow_v1beta3.services.flex_templates_service import ( +from google.cloud.dataflow_v1beta3.services.flex_templates_service import ( FlexTemplatesServiceAsyncClient, ) -from google.dataflow_v1beta3.services.flex_templates_service import ( +from google.cloud.dataflow_v1beta3.services.flex_templates_service import ( FlexTemplatesServiceClient, ) -from google.dataflow_v1beta3.services.flex_templates_service import transports -from google.dataflow_v1beta3.services.flex_templates_service.transports.base import ( +from google.cloud.dataflow_v1beta3.services.flex_templates_service import transports +from google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.base import ( _GOOGLE_AUTH_VERSION, ) -from google.dataflow_v1beta3.types import environment -from google.dataflow_v1beta3.types import jobs -from google.dataflow_v1beta3.types import templates +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates from google.oauth2 import service_account import google.auth @@ -482,7 +482,7 @@ def test_flex_templates_service_client_client_options_credentials_file( def test_flex_templates_service_client_client_options_from_dict(): with mock.patch( - "google.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceGrpcTransport.__init__" + "google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = FlexTemplatesServiceClient( @@ -673,7 +673,7 @@ def test_flex_templates_service_base_transport_error(): def test_flex_templates_service_base_transport(): # Instantiate the base transport. with mock.patch( - "google.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport.__init__" + "google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport.__init__" ) as Transport: Transport.return_value = None transport = transports.FlexTemplatesServiceTransport( @@ -694,7 +694,7 @@ def test_flex_templates_service_base_transport_with_credentials_file(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -720,7 +720,7 @@ def test_flex_templates_service_base_transport_with_credentials_file_old_google_ with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -742,7 +742,7 @@ def test_flex_templates_service_base_transport_with_credentials_file_old_google_ def test_flex_templates_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index ec51e7a3e8b5..ecdc5adfb16b 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -31,16 +31,16 @@ from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3AsyncClient -from google.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3Client -from google.dataflow_v1beta3.services.jobs_v1_beta3 import pagers -from google.dataflow_v1beta3.services.jobs_v1_beta3 import transports -from google.dataflow_v1beta3.services.jobs_v1_beta3.transports.base import ( +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.base import ( _GOOGLE_AUTH_VERSION, ) -from google.dataflow_v1beta3.types import environment -from google.dataflow_v1beta3.types import jobs -from google.dataflow_v1beta3.types import snapshots +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -447,7 +447,7 @@ def test_jobs_v1_beta3_client_client_options_credentials_file( def test_jobs_v1_beta3_client_client_options_from_dict(): with mock.patch( - "google.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3GrpcTransport.__init__" + "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3GrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = JobsV1Beta3Client(client_options={"api_endpoint": "squid.clam.whelk"}) @@ -1521,7 +1521,7 @@ def test_jobs_v1_beta3_base_transport_error(): def test_jobs_v1_beta3_base_transport(): # Instantiate the base transport. with mock.patch( - "google.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__" + "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__" ) as Transport: Transport.return_value = None transport = transports.JobsV1Beta3Transport( @@ -1550,7 +1550,7 @@ def test_jobs_v1_beta3_base_transport_with_credentials_file(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -1576,7 +1576,7 @@ def test_jobs_v1_beta3_base_transport_with_credentials_file_old_google_auth(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -1598,7 +1598,7 @@ def test_jobs_v1_beta3_base_transport_with_credentials_file_old_google_auth(): def test_jobs_v1_beta3_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index 268f4ed3c618..fc41c3593afa 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -31,16 +31,18 @@ from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.dataflow_v1beta3.services.messages_v1_beta3 import ( +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import ( MessagesV1Beta3AsyncClient, ) -from google.dataflow_v1beta3.services.messages_v1_beta3 import MessagesV1Beta3Client -from google.dataflow_v1beta3.services.messages_v1_beta3 import pagers -from google.dataflow_v1beta3.services.messages_v1_beta3 import transports -from google.dataflow_v1beta3.services.messages_v1_beta3.transports.base import ( +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import ( + MessagesV1Beta3Client, +) +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.base import ( _GOOGLE_AUTH_VERSION, ) -from google.dataflow_v1beta3.types import messages +from google.cloud.dataflow_v1beta3.types import messages from google.oauth2 import service_account from google.protobuf import timestamp_pb2 # type: ignore import google.auth @@ -467,7 +469,7 @@ def test_messages_v1_beta3_client_client_options_credentials_file( def test_messages_v1_beta3_client_client_options_from_dict(): with mock.patch( - "google.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3GrpcTransport.__init__" + "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3GrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = MessagesV1Beta3Client( @@ -807,7 +809,7 @@ def test_messages_v1_beta3_base_transport_error(): def test_messages_v1_beta3_base_transport(): # Instantiate the base transport. with mock.patch( - "google.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport.__init__" + "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport.__init__" ) as Transport: Transport.return_value = None transport = transports.MessagesV1Beta3Transport( @@ -828,7 +830,7 @@ def test_messages_v1_beta3_base_transport_with_credentials_file(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -854,7 +856,7 @@ def test_messages_v1_beta3_base_transport_with_credentials_file_old_google_auth( with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -876,7 +878,7 @@ def test_messages_v1_beta3_base_transport_with_credentials_file_old_google_auth( def test_messages_v1_beta3_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index 04bbc80727c5..7e089ce4a39e 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -31,14 +31,16 @@ from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.dataflow_v1beta3.services.metrics_v1_beta3 import MetricsV1Beta3AsyncClient -from google.dataflow_v1beta3.services.metrics_v1_beta3 import MetricsV1Beta3Client -from google.dataflow_v1beta3.services.metrics_v1_beta3 import pagers -from google.dataflow_v1beta3.services.metrics_v1_beta3 import transports -from google.dataflow_v1beta3.services.metrics_v1_beta3.transports.base import ( +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import ( + MetricsV1Beta3AsyncClient, +) +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import MetricsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.base import ( _GOOGLE_AUTH_VERSION, ) -from google.dataflow_v1beta3.types import metrics +from google.cloud.dataflow_v1beta3.types import metrics from google.oauth2 import service_account from google.protobuf import timestamp_pb2 # type: ignore import google.auth @@ -455,7 +457,7 @@ def test_metrics_v1_beta3_client_client_options_credentials_file( def test_metrics_v1_beta3_client_client_options_from_dict(): with mock.patch( - "google.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3GrpcTransport.__init__" + "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3GrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = MetricsV1Beta3Client( @@ -1101,7 +1103,7 @@ def test_metrics_v1_beta3_base_transport_error(): def test_metrics_v1_beta3_base_transport(): # Instantiate the base transport. with mock.patch( - "google.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport.__init__" + "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport.__init__" ) as Transport: Transport.return_value = None transport = transports.MetricsV1Beta3Transport( @@ -1126,7 +1128,7 @@ def test_metrics_v1_beta3_base_transport_with_credentials_file(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -1152,7 +1154,7 @@ def test_metrics_v1_beta3_base_transport_with_credentials_file_old_google_auth() with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -1174,7 +1176,7 @@ def test_metrics_v1_beta3_base_transport_with_credentials_file_old_google_auth() def test_metrics_v1_beta3_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index 0617acb7b71e..ed46088aa5a8 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -31,15 +31,17 @@ from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.dataflow_v1beta3.services.snapshots_v1_beta3 import ( +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import ( SnapshotsV1Beta3AsyncClient, ) -from google.dataflow_v1beta3.services.snapshots_v1_beta3 import SnapshotsV1Beta3Client -from google.dataflow_v1beta3.services.snapshots_v1_beta3 import transports -from google.dataflow_v1beta3.services.snapshots_v1_beta3.transports.base import ( +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import ( + SnapshotsV1Beta3Client, +) +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.base import ( _GOOGLE_AUTH_VERSION, ) -from google.dataflow_v1beta3.types import snapshots +from google.cloud.dataflow_v1beta3.types import snapshots from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -468,7 +470,7 @@ def test_snapshots_v1_beta3_client_client_options_credentials_file( def test_snapshots_v1_beta3_client_client_options_from_dict(): with mock.patch( - "google.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3GrpcTransport.__init__" + "google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3GrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = SnapshotsV1Beta3Client( @@ -839,7 +841,7 @@ def test_snapshots_v1_beta3_base_transport_error(): def test_snapshots_v1_beta3_base_transport(): # Instantiate the base transport. with mock.patch( - "google.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport.__init__" + "google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport.__init__" ) as Transport: Transport.return_value = None transport = transports.SnapshotsV1Beta3Transport( @@ -864,7 +866,7 @@ def test_snapshots_v1_beta3_base_transport_with_credentials_file(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -890,7 +892,7 @@ def test_snapshots_v1_beta3_base_transport_with_credentials_file_old_google_auth with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -912,7 +914,7 @@ def test_snapshots_v1_beta3_base_transport_with_credentials_file_old_google_auth def test_snapshots_v1_beta3_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index dfb3b1e7c752..0680cb46b679 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -31,17 +31,19 @@ from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.dataflow_v1beta3.services.templates_service import ( +from google.cloud.dataflow_v1beta3.services.templates_service import ( TemplatesServiceAsyncClient, ) -from google.dataflow_v1beta3.services.templates_service import TemplatesServiceClient -from google.dataflow_v1beta3.services.templates_service import transports -from google.dataflow_v1beta3.services.templates_service.transports.base import ( +from google.cloud.dataflow_v1beta3.services.templates_service import ( + TemplatesServiceClient, +) +from google.cloud.dataflow_v1beta3.services.templates_service import transports +from google.cloud.dataflow_v1beta3.services.templates_service.transports.base import ( _GOOGLE_AUTH_VERSION, ) -from google.dataflow_v1beta3.types import environment -from google.dataflow_v1beta3.types import jobs -from google.dataflow_v1beta3.types import templates +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates from google.oauth2 import service_account from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -470,7 +472,7 @@ def test_templates_service_client_client_options_credentials_file( def test_templates_service_client_client_options_from_dict(): with mock.patch( - "google.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceGrpcTransport.__init__" + "google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = TemplatesServiceClient( @@ -881,7 +883,7 @@ def test_templates_service_base_transport_error(): def test_templates_service_base_transport(): # Instantiate the base transport. with mock.patch( - "google.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport.__init__" + "google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport.__init__" ) as Transport: Transport.return_value = None transport = transports.TemplatesServiceTransport( @@ -906,7 +908,7 @@ def test_templates_service_base_transport_with_credentials_file(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -932,7 +934,7 @@ def test_templates_service_base_transport_with_credentials_file_old_google_auth( with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -954,7 +956,7 @@ def test_templates_service_base_transport_with_credentials_file_old_google_auth( def test_templates_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages" + "google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None)