From be0ccc488dac22128be317ca40337d6b93af0906 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Mar 2022 13:08:53 -0800 Subject: [PATCH] feat: add PredictRequestResponseLoggingConfig to Endpoint in aiplatform v1 endpoint.proto (#1072) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add PredictRequestResponseLoggingConfig to Endpoint in aiplatform v1 endpoint.proto PiperOrigin-RevId: 433794371 Source-Link: https://github.com/googleapis/googleapis/commit/e0f0642a1dc4f365f79a6186c1a4976ae82aa7b1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3afc3ec18e795c859231a2c0f8ef172a65056e0d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiM2FmYzNlYzE4ZTc5NWM4NTkyMzFhMmMwZjhlZjE3MmE2NTA1NmUwZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- google/cloud/aiplatform_v1/__init__.py | 2 + .../services/migration_service/client.py | 22 +++++----- google/cloud/aiplatform_v1/types/__init__.py | 2 + google/cloud/aiplatform_v1/types/endpoint.py | 41 ++++++++++++++++++- .../aiplatform_v1/types/model_monitoring.py | 2 +- .../aiplatform_v1/test_endpoint_service.py | 1 + .../aiplatform_v1/test_migration_service.py | 28 ++++++------- 7 files changed, 71 insertions(+), 27 deletions(-) diff --git a/google/cloud/aiplatform_v1/__init__.py b/google/cloud/aiplatform_v1/__init__.py index a0a75d7fd8..f66f2e43aa 100644 --- a/google/cloud/aiplatform_v1/__init__.py +++ b/google/cloud/aiplatform_v1/__init__.py @@ -93,6 +93,7 @@ from .types.encryption_spec import EncryptionSpec from .types.endpoint import DeployedModel from .types.endpoint import Endpoint +from .types.endpoint import PredictRequestResponseLoggingConfig from .types.endpoint import PrivateEndpoints from .types.endpoint_service import CreateEndpointOperationMetadata from .types.endpoint_service import CreateEndpointRequest @@ -831,6 +832,7 @@ "Port", "PredefinedSplit", "PredictRequest", + "PredictRequestResponseLoggingConfig", "PredictResponse", "PredictSchemata", "PredictionServiceClient", diff --git a/google/cloud/aiplatform_v1/services/migration_service/client.py b/google/cloud/aiplatform_v1/services/migration_service/client.py index 2b0e890e7b..353992d9f1 100644 --- a/google/cloud/aiplatform_v1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1/services/migration_service/client.py @@ -199,32 +199,32 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, location: str, dataset: str,) -> str: + def dataset_path(project: str, dataset: str,) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + return "projects/{project}/datasets/{dataset}".format( + project=project, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", - path, - ) + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, dataset: str,) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format( - project=project, dataset=dataset, + return "projects/{project}/locations/{location}/datasets/{dataset}".format( + project=project, location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod diff --git a/google/cloud/aiplatform_v1/types/__init__.py b/google/cloud/aiplatform_v1/types/__init__.py index 85140a3f52..5cd69f3dba 100644 --- a/google/cloud/aiplatform_v1/types/__init__.py +++ b/google/cloud/aiplatform_v1/types/__init__.py @@ -65,6 +65,7 @@ from .endpoint import ( DeployedModel, Endpoint, + PredictRequestResponseLoggingConfig, PrivateEndpoints, ) from .endpoint_service import ( @@ -546,6 +547,7 @@ "EncryptionSpec", "DeployedModel", "Endpoint", + "PredictRequestResponseLoggingConfig", "PrivateEndpoints", "CreateEndpointOperationMetadata", "CreateEndpointRequest", diff --git a/google/cloud/aiplatform_v1/types/endpoint.py b/google/cloud/aiplatform_v1/types/endpoint.py index e99e8d60c2..a60af7e620 100644 --- a/google/cloud/aiplatform_v1/types/endpoint.py +++ b/google/cloud/aiplatform_v1/types/endpoint.py @@ -17,13 +17,19 @@ from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec from google.cloud.aiplatform_v1.types import explanation +from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import machine_resources from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( package="google.cloud.aiplatform.v1", - manifest={"Endpoint", "DeployedModel", "PrivateEndpoints",}, + manifest={ + "Endpoint", + "DeployedModel", + "PrivateEndpoints", + "PredictRequestResponseLoggingConfig", + }, ) @@ -113,6 +119,9 @@ class Endpoint(proto.Message): associated with this Endpoint if monitoring is enabled by [CreateModelDeploymentMonitoringJob][]. Format: ``projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}`` + predict_request_response_logging_config (google.cloud.aiplatform_v1.types.PredictRequestResponseLoggingConfig): + Configures the request-response logging for + online prediction. """ name = proto.Field(proto.STRING, number=1,) @@ -132,6 +141,9 @@ class Endpoint(proto.Message): network = proto.Field(proto.STRING, number=13,) enable_private_service_connect = proto.Field(proto.BOOL, number=17,) model_deployment_monitoring_job = proto.Field(proto.STRING, number=14,) + predict_request_response_logging_config = proto.Field( + proto.MESSAGE, number=18, message="PredictRequestResponseLoggingConfig", + ) class DeployedModel(proto.Message): @@ -286,4 +298,31 @@ class PrivateEndpoints(proto.Message): service_attachment = proto.Field(proto.STRING, number=4,) +class PredictRequestResponseLoggingConfig(proto.Message): + r"""Configuration for logging request-response to a BigQuery + table. + + Attributes: + enabled (bool): + If logging is enabled or not. + sampling_rate (float): + Percentage of requests to be logged, expressed as a fraction + in range(0,1]. + bigquery_destination (google.cloud.aiplatform_v1.types.BigQueryDestination): + BigQuery table for logging. If only given project, a new + dataset will be created with name + ``logging__`` where will + be made BigQuery-dataset-name compatible (e.g. most special + characters will become underscores). If no table name is + given, a new table will be created with name + ``request_response_logging`` + """ + + enabled = proto.Field(proto.BOOL, number=1,) + sampling_rate = proto.Field(proto.DOUBLE, number=2,) + bigquery_destination = proto.Field( + proto.MESSAGE, number=3, message=io.BigQueryDestination, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1/types/model_monitoring.py b/google/cloud/aiplatform_v1/types/model_monitoring.py index 336129e5b4..f70e605a9b 100644 --- a/google/cloud/aiplatform_v1/types/model_monitoring.py +++ b/google/cloud/aiplatform_v1/types/model_monitoring.py @@ -30,7 +30,7 @@ class ModelMonitoringObjectiveConfig(proto.Message): - r"""Next ID: 6 + r"""Next ID: 7 Attributes: training_dataset (google.cloud.aiplatform_v1.types.ModelMonitoringObjectiveConfig.TrainingDataset): diff --git a/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py b/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py index af13945cf1..5fb471867f 100644 --- a/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_endpoint_service.py @@ -48,6 +48,7 @@ from google.cloud.aiplatform_v1.types import endpoint_service from google.cloud.aiplatform_v1.types import explanation from google.cloud.aiplatform_v1.types import explanation_metadata +from google.cloud.aiplatform_v1.types import io from google.cloud.aiplatform_v1.types import machine_resources from google.cloud.aiplatform_v1.types import operation as gca_operation from google.longrunning import operations_pb2 diff --git a/tests/unit/gapic/aiplatform_v1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1/test_migration_service.py index 6b4b74ac4c..3a0eb25e66 100644 --- a/tests/unit/gapic/aiplatform_v1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_migration_service.py @@ -1817,20 +1817,18 @@ def test_parse_dataset_path(): def test_dataset_path(): project = "squid" - location = "clam" - dataset = "whelk" - expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + dataset = "clam" + expected = "projects/{project}/datasets/{dataset}".format( + project=project, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, location, dataset) + actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "octopus", - "location": "oyster", - "dataset": "nudibranch", + "project": "whelk", + "dataset": "octopus", } path = MigrationServiceClient.dataset_path(**expected) @@ -1840,18 +1838,20 @@ def test_parse_dataset_path(): def test_dataset_path(): - project = "cuttlefish" - dataset = "mussel" - expected = "projects/{project}/datasets/{dataset}".format( - project=project, dataset=dataset, + project = "oyster" + location = "nudibranch" + dataset = "cuttlefish" + expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( + project=project, location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, dataset) + actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "winkle", + "project": "mussel", + "location": "winkle", "dataset": "nautilus", } path = MigrationServiceClient.dataset_path(**expected)