Skip to content

Commit

Permalink
feat: add PredictRequestResponseLoggingConfig to Endpoint in aiplatfo…
Browse files Browse the repository at this point in the history
…rm v1 endpoint.proto (#1072)

* feat: add PredictRequestResponseLoggingConfig to Endpoint in aiplatform v1 endpoint.proto

PiperOrigin-RevId: 433794371

Source-Link: googleapis/googleapis@e0f0642

Source-Link: googleapis/googleapis-gen@3afc3ec
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiM2FmYzNlYzE4ZTc5NWM4NTkyMzFhMmMwZjhlZjE3MmE2NTA1NmUwZCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] authored Mar 10, 2022
1 parent f10a1d4 commit be0ccc4
Show file tree
Hide file tree
Showing 7 changed files with 71 additions and 27 deletions.
2 changes: 2 additions & 0 deletions google/cloud/aiplatform_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@
from .types.encryption_spec import EncryptionSpec
from .types.endpoint import DeployedModel
from .types.endpoint import Endpoint
from .types.endpoint import PredictRequestResponseLoggingConfig
from .types.endpoint import PrivateEndpoints
from .types.endpoint_service import CreateEndpointOperationMetadata
from .types.endpoint_service import CreateEndpointRequest
Expand Down Expand Up @@ -831,6 +832,7 @@
"Port",
"PredefinedSplit",
"PredictRequest",
"PredictRequestResponseLoggingConfig",
"PredictResponse",
"PredictSchemata",
"PredictionServiceClient",
Expand Down
22 changes: 11 additions & 11 deletions google/cloud/aiplatform_v1/services/migration_service/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,32 +199,32 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, location: str, dataset: str,) -> str:
def dataset_path(project: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, dataset: str,) -> str:
def dataset_path(project: str, location: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand Down
2 changes: 2 additions & 0 deletions google/cloud/aiplatform_v1/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
from .endpoint import (
DeployedModel,
Endpoint,
PredictRequestResponseLoggingConfig,
PrivateEndpoints,
)
from .endpoint_service import (
Expand Down Expand Up @@ -546,6 +547,7 @@
"EncryptionSpec",
"DeployedModel",
"Endpoint",
"PredictRequestResponseLoggingConfig",
"PrivateEndpoints",
"CreateEndpointOperationMetadata",
"CreateEndpointRequest",
Expand Down
41 changes: 40 additions & 1 deletion google/cloud/aiplatform_v1/types/endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,19 @@

from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec
from google.cloud.aiplatform_v1.types import explanation
from google.cloud.aiplatform_v1.types import io
from google.cloud.aiplatform_v1.types import machine_resources
from google.protobuf import timestamp_pb2 # type: ignore


__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1",
manifest={"Endpoint", "DeployedModel", "PrivateEndpoints",},
manifest={
"Endpoint",
"DeployedModel",
"PrivateEndpoints",
"PredictRequestResponseLoggingConfig",
},
)


Expand Down Expand Up @@ -113,6 +119,9 @@ class Endpoint(proto.Message):
associated with this Endpoint if monitoring is enabled by
[CreateModelDeploymentMonitoringJob][]. Format:
``projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}``
predict_request_response_logging_config (google.cloud.aiplatform_v1.types.PredictRequestResponseLoggingConfig):
Configures the request-response logging for
online prediction.
"""

name = proto.Field(proto.STRING, number=1,)
Expand All @@ -132,6 +141,9 @@ class Endpoint(proto.Message):
network = proto.Field(proto.STRING, number=13,)
enable_private_service_connect = proto.Field(proto.BOOL, number=17,)
model_deployment_monitoring_job = proto.Field(proto.STRING, number=14,)
predict_request_response_logging_config = proto.Field(
proto.MESSAGE, number=18, message="PredictRequestResponseLoggingConfig",
)


class DeployedModel(proto.Message):
Expand Down Expand Up @@ -286,4 +298,31 @@ class PrivateEndpoints(proto.Message):
service_attachment = proto.Field(proto.STRING, number=4,)


class PredictRequestResponseLoggingConfig(proto.Message):
r"""Configuration for logging request-response to a BigQuery
table.
Attributes:
enabled (bool):
If logging is enabled or not.
sampling_rate (float):
Percentage of requests to be logged, expressed as a fraction
in range(0,1].
bigquery_destination (google.cloud.aiplatform_v1.types.BigQueryDestination):
BigQuery table for logging. If only given project, a new
dataset will be created with name
``logging_<endpoint-display-name>_<endpoint-id>`` where will
be made BigQuery-dataset-name compatible (e.g. most special
characters will become underscores). If no table name is
given, a new table will be created with name
``request_response_logging``
"""

enabled = proto.Field(proto.BOOL, number=1,)
sampling_rate = proto.Field(proto.DOUBLE, number=2,)
bigquery_destination = proto.Field(
proto.MESSAGE, number=3, message=io.BigQueryDestination,
)


__all__ = tuple(sorted(__protobuf__.manifest))
2 changes: 1 addition & 1 deletion google/cloud/aiplatform_v1/types/model_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@


class ModelMonitoringObjectiveConfig(proto.Message):
r"""Next ID: 6
r"""Next ID: 7
Attributes:
training_dataset (google.cloud.aiplatform_v1.types.ModelMonitoringObjectiveConfig.TrainingDataset):
Expand Down
1 change: 1 addition & 0 deletions tests/unit/gapic/aiplatform_v1/test_endpoint_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
from google.cloud.aiplatform_v1.types import endpoint_service
from google.cloud.aiplatform_v1.types import explanation
from google.cloud.aiplatform_v1.types import explanation_metadata
from google.cloud.aiplatform_v1.types import io
from google.cloud.aiplatform_v1.types import machine_resources
from google.cloud.aiplatform_v1.types import operation as gca_operation
from google.longrunning import operations_pb2
Expand Down
28 changes: 14 additions & 14 deletions tests/unit/gapic/aiplatform_v1/test_migration_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -1817,20 +1817,18 @@ def test_parse_dataset_path():

def test_dataset_path():
project = "squid"
location = "clam"
dataset = "whelk"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
dataset = "clam"
expected = "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
actual = MigrationServiceClient.dataset_path(project, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "octopus",
"location": "oyster",
"dataset": "nudibranch",
"project": "whelk",
"dataset": "octopus",
}
path = MigrationServiceClient.dataset_path(**expected)

Expand All @@ -1840,18 +1838,20 @@ def test_parse_dataset_path():


def test_dataset_path():
project = "cuttlefish"
dataset = "mussel"
expected = "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
project = "oyster"
location = "nudibranch"
dataset = "cuttlefish"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, dataset)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "winkle",
"project": "mussel",
"location": "winkle",
"dataset": "nautilus",
}
path = MigrationServiceClient.dataset_path(**expected)
Expand Down

0 comments on commit be0ccc4

Please sign in to comment.