diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index f33ba416ad..09f8260b0c 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -56,6 +56,10 @@ from google.cloud.aiplatform.utils import source_utils from google.cloud.aiplatform.utils import worker_spec_utils +from google.cloud.aiplatform_v1.types import ( + batch_prediction_job as batch_prediction_job_v1, +) +from google.cloud.aiplatform_v1.types import custom_job as custom_job_v1 _LOGGER = base.Logger(__name__) @@ -331,7 +335,7 @@ def __init__( @property def output_info( self, - ) -> Optional[aiplatform.gapic.BatchPredictionJob.OutputInfo]: + ) -> Optional[batch_prediction_job_v1.BatchPredictionJob.OutputInfo]: """Information describing the output of this job, including output location into which prediction output is written. @@ -1121,7 +1125,7 @@ def __init__( self, # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, - worker_pool_specs: Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]], + worker_pool_specs: Union[List[Dict], List[custom_job_v1.WorkerPoolSpec]], base_output_dir: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, diff --git a/google/cloud/aiplatform/metadata/metadata.py b/google/cloud/aiplatform/metadata/metadata.py index d103a79733..92c484f34f 100644 --- a/google/cloud/aiplatform/metadata/metadata.py +++ b/google/cloud/aiplatform/metadata/metadata.py @@ -22,7 +22,6 @@ from google.protobuf import timestamp_pb2 from google.cloud.aiplatform import base -from google.cloud.aiplatform import gapic from google.cloud.aiplatform import pipeline_jobs from google.cloud.aiplatform.compat.types import execution as gca_execution from google.cloud.aiplatform.metadata import constants @@ -32,6 +31,8 @@ from google.cloud.aiplatform.metadata import experiment_run_resource from google.cloud.aiplatform.tensorboard import tensorboard_resource +from google.cloud.aiplatform_v1.types import execution as execution_v1 + _LOGGER = base.Logger(__name__) @@ -302,7 +303,9 @@ def start_run( if tensorboard: self._experiment_run.assign_backing_tensorboard(tensorboard=tensorboard) - self._experiment_run.update_state(state=gapic.Execution.State.RUNNING) + self._experiment_run.update_state( + state=execution_v1.Execution.State.RUNNING + ) else: self._experiment_run = experiment_run_resource.ExperimentRun.create( @@ -311,7 +314,10 @@ def start_run( return self._experiment_run - def end_run(self, state: gapic.Execution.State = gapic.Execution.State.COMPLETE): + def end_run( + self, + state: execution_v1.Execution.State = execution_v1.Execution.State.COMPLETE, + ): """Ends the the current experiment run. ``` diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index 8ee0f400f1..db0093a04f 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -66,6 +66,8 @@ prediction as prediction_constants, ) +from google.cloud.aiplatform_v1.types import model as model_v1 + from google.protobuf import field_mask_pb2, timestamp_pb2 from google.protobuf import json_format @@ -2423,7 +2425,7 @@ def supported_export_formats( @property def supported_deployment_resources_types( self, - ) -> List[aiplatform.gapic.Model.DeploymentResourcesType]: + ) -> List[model_v1.Model.DeploymentResourcesType]: """List of deployment resource types accepted for this Model. When this Model is deployed, its prediction resources are described by @@ -2479,7 +2481,7 @@ def supported_output_storage_formats(self) -> List[str]: return list(self._gca_resource.supported_output_storage_formats) @property - def predict_schemata(self) -> Optional[aiplatform.gapic.PredictSchemata]: + def predict_schemata(self) -> Optional[model_v1.PredictSchemata]: """The schemata that describe formats of the Model's predictions and explanations, if available.""" self._assert_gca_resource_is_available() @@ -2512,7 +2514,7 @@ def training_job(self) -> Optional["aiplatform.training_jobs._TrainingJob"]: ) @property - def container_spec(self) -> Optional[aiplatform.gapic.ModelContainerSpec]: + def container_spec(self) -> Optional[model_v1.ModelContainerSpec]: """The specification of the container that is to be used when deploying this Model. Not present for AutoML Models.""" self._assert_gca_resource_is_available()