diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_helpers.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_helpers.py index e2427981585b..1fbd4479282a 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_helpers.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_helpers.py @@ -187,3 +187,17 @@ def convert_to_sub_feedback(feedback): if feedback.feedback_type == "Period": return PeriodFeedback._from_generated(feedback) # type: ignore raise HttpResponseError("Invalid feedback type returned in the response.") + +def convert_datetime(date_time): + # type: (Union[str, datetime.datetime]) -> datetime.datetime + if isinstance(date_time, datetime.datetime): + return date_time + if isinstance(date_time, six.string_types): + try: + return datetime.datetime.strptime(date_time, "%Y-%m-%d") + except ValueError: + try: + return datetime.datetime.strptime(date_time, "%Y-%m-%dT%H:%M:%SZ") + except ValueError: + return datetime.datetime.strptime(date_time, "%Y-%m-%d %H:%M:%S") + raise TypeError("Bad datetime type") diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_administration_client.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_administration_client.py index cc45242fd448..ba0df0f7ecfd 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_administration_client.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_administration_client.py @@ -58,7 +58,8 @@ construct_alert_config_dict, construct_detection_config_dict, construct_hook_dict, - construct_data_feed_dict + construct_data_feed_dict, + convert_datetime ) from .models._models import ( DataFeed, @@ -556,8 +557,8 @@ def get_data_feed_ingestion_progress( def refresh_data_feed_ingestion( self, data_feed_id, # type: str - start_time, # type: datetime.datetime - end_time, # type: datetime.datetime + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] **kwargs # type: Any ): # type: (...) -> None @@ -566,9 +567,9 @@ def refresh_data_feed_ingestion( :param data_feed_id: The data feed unique id. :type data_feed_id: str :param start_time: The start point of time range to refresh data ingestion. - :type start_time: ~datetime.datetime + :type start_time: Union[str, ~datetime.datetime] :param end_time: The end point of time range to refresh data ingestion. - :type end_time: ~datetime.datetime + :type end_time: Union[str, ~datetime.datetime] :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -582,11 +583,13 @@ def refresh_data_feed_ingestion( :dedent: 4 :caption: Refresh data feed ingestion over a period of time """ + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) self._client.reset_data_feed_ingestion_status( data_feed_id, body=_IngestionProgressResetOptions( - start_time=start_time, - end_time=end_time + start_time=converted_start_time, + end_time=converted_end_time ), **kwargs ) @@ -1171,8 +1174,8 @@ def list_metric_anomaly_detection_configurations( def list_data_feed_ingestion_status( self, data_feed_id, # type: str - start_time, # type: datetime.datetime - end_time, # type: datetime.datetime + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] **kwargs # type: Any ): # type: (...) -> ItemPaged[DataFeedIngestionStatus] @@ -1180,9 +1183,9 @@ def list_data_feed_ingestion_status( :param str data_feed_id: The data feed unique id. :param start_time: Required. the start point of time range to query data ingestion status. - :type start_time: ~datetime.datetime + :type start_time: Union[str, ~datetime.datetime] :param end_time: Required. the end point of time range to query data ingestion status. - :type end_time: ~datetime.datetime + :type end_time: Union[str, ~datetime.datetime] :keyword int skip: :return: Pageable of DataFeedIngestionStatus :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.DataFeedIngestionStatus] @@ -1199,12 +1202,14 @@ def list_data_feed_ingestion_status( """ skip = kwargs.pop("skip", None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) return self._client.get_data_feed_ingestion_status( # type: ignore data_feed_id=data_feed_id, body=_IngestionStatusQueryOptions( - start_time=start_time, - end_time=end_time + start_time=converted_start_time, + end_time=converted_end_time ), skip=skip, **kwargs diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_client.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_client.py index 513fbc5b8696..ea336e5ebb2d 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_client.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/_metrics_advisor_client.py @@ -7,7 +7,6 @@ # pylint: disable=protected-access from typing import List, Union, Dict, Any, cast, TYPE_CHECKING -import datetime # pylint:disable=unused-import from azure.core.tracing.decorator import distributed_trace from ._metrics_advisor_key_credential import MetricsAdvisorKeyCredential @@ -27,7 +26,7 @@ FeedbackDimensionFilter, ) from ._generated import AzureCognitiveServiceMetricsAdvisorRESTAPIOpenAPIV2 -from ._helpers import convert_to_sub_feedback +from ._helpers import convert_to_sub_feedback, convert_datetime from .models._models import ( Incident, Anomaly, @@ -38,6 +37,7 @@ from ._version import SDK_MONIKER if TYPE_CHECKING: + import datetime from ._generated.models import ( SeriesResult, EnrichmentStatus, @@ -175,8 +175,8 @@ def list_feedbacks(self, metric_id, **kwargs): :keyword feedback_type: filter feedbacks by type. Possible values include: "Anomaly", "ChangePoint", "Period", "Comment". :paramtype feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType - :keyword ~datetime.datetime start_time: start time filter under chosen time mode. - :keyword ~datetime.datetime end_time: end time filter under chosen time mode. + :keyword Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :keyword Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword time_mode: time mode to filter feedback. Possible values include: "MetricTimestamp", "FeedbackCreatedTime". :paramtype time_mode: str or ~azure.ai.metricsadvisor.models.FeedbackQueryTimeMode @@ -203,13 +203,15 @@ def list_feedbacks(self, metric_id, **kwargs): feedback_type = kwargs.pop('feedback_type', None) start_time = kwargs.pop('start_time', None) end_time = kwargs.pop('end_time', None) + converted_start_time = convert_datetime(start_time) if start_time else None + converted_end_time = convert_datetime(end_time) if end_time else None time_mode = kwargs.pop('time_mode', None) feedback_filter = MetricFeedbackFilter( metric_id=metric_id, dimension_filter=dimension_filter, feedback_type=feedback_type, - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, time_mode=time_mode, ) @@ -249,8 +251,8 @@ def list_incident_root_causes(self, detection_configuration_id, incident_id, **k def list_metric_enriched_series_data( self, detection_configuration_id, # type: str series, # type: Union[List[SeriesIdentity], List[Dict[str, str]]] - start_time, # type: datetime.datetime - end_time, # type: datetime.datetime + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] **kwargs # type: Any ): # type: (...) -> ItemPaged[SeriesResult] @@ -259,8 +261,8 @@ def list_metric_enriched_series_data( :param str detection_configuration_id: anomaly alerting configuration unique id. :param series: List of dimensions specified for series. :type series: ~azure.ai.metricsadvisor.models.SeriesIdentity or list[dict[str, str]] - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :return: Pageable of SeriesResult :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.SeriesResult] :raises ~azure.core.exceptions.HttpResponseError: @@ -273,9 +275,11 @@ def list_metric_enriched_series_data( ] or series series_list = cast(List[SeriesIdentity], series_list) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) detection_series_query = DetectionSeriesQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, series=series_list ) @@ -292,8 +296,8 @@ def list_alerts_for_alert_configuration(self, alert_configuration_id, start_time :param alert_configuration_id: anomaly alert configuration unique id. :type alert_configuration_id: str - :param ~datetime.datetime start_time: start time. - :param ~datetime.datetime end_time: end time. + :param Union[str, ~datetime.datetime] start_time: start time. + :param Union[str, ~datetime.datetime] end_time: end time. :param time_mode: time mode. Possible values include: "AnomalyTime", "CreatedTime", "ModifiedTime". :type time_mode: str or ~azure.ai.metricsadvisor.models.TimeMode @@ -313,10 +317,12 @@ def list_alerts_for_alert_configuration(self, alert_configuration_id, start_time """ skip = kwargs.pop('skip', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) alerting_result_query = AlertingResultQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, time_mode=time_mode, ) @@ -369,8 +375,8 @@ def list_anomalies_for_detection_configuration(self, detection_configuration_id, :param detection_configuration_id: anomaly detection configuration unique id. :type detection_configuration_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword int skip: :keyword filter: :paramtype filter: ~azure.ai.metricsadvisor.models.DetectionAnomalyFilterCondition @@ -381,9 +387,11 @@ def list_anomalies_for_detection_configuration(self, detection_configuration_id, skip = kwargs.pop('skip', None) filter_condition = kwargs.pop('filter', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) detection_anomaly_result_query = DetectionAnomalyResultQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, filter=filter_condition, ) @@ -402,15 +410,15 @@ def list_dimension_values_for_detection_configuration( end_time, **kwargs ): - # type: (str, str, datetime.datetime, datetime.datetime, Any) -> ItemPaged[str] + # type: (str, str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> ItemPaged[str] """Query dimension values of anomalies. :param detection_configuration_id: anomaly detection configuration unique id. :type detection_configuration_id: str :param str dimension_name: dimension to query. - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword int skip: :keyword str dimension_name: The dimension name to query. :paramtype dimension_filter: ~azure.ai.metricsadvisor.models.DimensionGroupIdentity @@ -421,9 +429,11 @@ def list_dimension_values_for_detection_configuration( skip = kwargs.pop('skip', None) dimension_filter = kwargs.pop('dimension_filter', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) anomaly_dimension_query = AnomalyDimensionQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, dimension_name=dimension_name, dimension_filter=dimension_filter, ) @@ -461,14 +471,14 @@ def list_incidents_for_alert(self, alert_configuration_id, alert_id, **kwargs): @distributed_trace def list_incidents_for_detection_configuration(self, detection_configuration_id, start_time, end_time, **kwargs): - # type: (str, datetime.datetime, datetime.datetime, Any) -> ItemPaged[Incident] + # type: (str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> ItemPaged[Incident] """Query incidents under a specific alert. :param detection_configuration_id: anomaly detection configuration unique id. :type detection_configuration_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword filter: :paramtype filter: ~azure.ai.metricsadvisor.models.DetectionIncidentFilterCondition :return: Incidents under a specific alert. @@ -477,10 +487,12 @@ def list_incidents_for_detection_configuration(self, detection_configuration_id, """ filter_condition = kwargs.pop('filter', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) detection_incident_result_query = DetectionIncidentResultQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, filter=filter_condition, ) @@ -523,15 +535,21 @@ def list_metric_dimension_values(self, metric_id, dimension_name, **kwargs): **kwargs) @distributed_trace - def list_metrics_series_data(self, metric_id, start_time, end_time, series_to_filter, **kwargs): - # type: (str, datetime.datetime, datetime.datetime, List[Dict[str, str]], Any) -> ItemPaged[MetricSeriesData] + def list_metrics_series_data(self, + metric_id, # type: str + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] + series_to_filter, # type: List[Dict[str, str]] + **kwargs # type: Any + ): + # type: (...) -> ItemPaged[MetricSeriesData] """Get time series data from metric. :param metric_id: metric unique id. :type metric_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :param series_to_filter: query specific series. :type series_to_filter: list[dict[str, str]] :return: Time series data from metric. @@ -539,9 +557,12 @@ def list_metrics_series_data(self, metric_id, start_time, end_time, series_to_fi :raises ~azure.core.exceptions.HttpResponseError: """ + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) + metric_data_query_options = MetricDataQueryOptions( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, series=series_to_filter, ) @@ -588,14 +609,14 @@ def list_metric_series_definitions(self, metric_id, active_since, **kwargs): @distributed_trace def list_metric_enrichment_status(self, metric_id, start_time, end_time, **kwargs): - # type: (str, datetime.datetime, datetime.datetime, Any) -> ItemPaged[EnrichmentStatus] + # type: (str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> ItemPaged[EnrichmentStatus] """Query anomaly detection status. :param metric_id: filter feedbacks by metric id. :type metric_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword int skip: :return: Anomaly detection status. :rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.EnrichmentStatus] @@ -603,9 +624,11 @@ def list_metric_enrichment_status(self, metric_id, start_time, end_time, **kwarg """ skip = kwargs.pop('skip', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) enrichment_status_query_option = EnrichmentStatusQueryOption( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, ) return self._client.get_enrichment_status_by_metric( # type: ignore diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_administration_client_async.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_administration_client_async.py index 45522d755e6c..ef2bd8c7a398 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_administration_client_async.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_administration_client_async.py @@ -34,7 +34,8 @@ construct_alert_config_dict, construct_detection_config_dict, construct_hook_dict, - construct_data_feed_dict + construct_data_feed_dict, + convert_datetime ) from ..models import ( DataFeed, @@ -466,8 +467,8 @@ async def get_data_feed_ingestion_progress( async def refresh_data_feed_ingestion( self, data_feed_id: str, - start_time: datetime.datetime, - end_time: datetime.datetime, + start_time: Union[str, datetime.datetime], + end_time: Union[str, datetime.datetime], **kwargs: Any ) -> None: """Refreshes data ingestion by data feed to backfill data. @@ -475,9 +476,9 @@ async def refresh_data_feed_ingestion( :param data_feed_id: The data feed unique id. :type data_feed_id: str :param start_time: The start point of time range to refresh data ingestion. - :type start_time: ~datetime.datetime + :type start_time: Union[str, ~datetime.datetime] :param end_time: The end point of time range to refresh data ingestion. - :type end_time: ~datetime.datetime + :type end_time: Union[str, ~datetime.datetime] :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: @@ -491,11 +492,13 @@ async def refresh_data_feed_ingestion( :dedent: 4 :caption: Refresh data feed ingestion over a period of time """ + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) await self._client.reset_data_feed_ingestion_status( data_feed_id, body=_IngestionProgressResetOptions( - start_time=start_time, - end_time=end_time + start_time=converted_start_time, + end_time=converted_end_time ), **kwargs ) @@ -1072,8 +1075,8 @@ def list_metric_anomaly_detection_configurations( def list_data_feed_ingestion_status( self, data_feed_id, # type: str - start_time, # type: datetime.datetime - end_time, # type: datetime.datetime + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] **kwargs # type: Any ): # type: (...) -> AsyncItemPaged[DataFeedIngestionStatus] @@ -1081,9 +1084,9 @@ def list_data_feed_ingestion_status( :param str data_feed_id: The data feed unique id. :param start_time: Required. the start point of time range to query data ingestion status. - :type start_time: ~datetime.datetime + :type start_time: Union[str, ~datetime.datetime] :param end_time: Required. the end point of time range to query data ingestion status. - :type end_time: ~datetime.datetime + :type end_time: Union[str, ~datetime.datetime] :keyword int skip: :return: Pageable of DataFeedIngestionStatus :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.DataFeedIngestionStatus] @@ -1100,12 +1103,14 @@ def list_data_feed_ingestion_status( """ skip = kwargs.pop("skip", None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) return self._client.get_data_feed_ingestion_status( # type: ignore data_feed_id=data_feed_id, body=_IngestionStatusQueryOptions( - start_time=start_time, - end_time=end_time + start_time=converted_start_time, + end_time=converted_end_time ), skip=skip, **kwargs diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_client_async.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_client_async.py index 4c1acbc8b831..85e89f632926 100644 --- a/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_client_async.py +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/azure/ai/metricsadvisor/aio/_metrics_advisor_client_async.py @@ -29,7 +29,7 @@ FeedbackDimensionFilter, ) from .._generated.aio import AzureCognitiveServiceMetricsAdvisorRESTAPIOpenAPIV2 -from .._helpers import convert_to_sub_feedback +from .._helpers import convert_to_sub_feedback, convert_datetime from ..models._models import ( Incident, Anomaly, @@ -179,8 +179,8 @@ def list_feedbacks( :keyword feedback_type: filter feedbacks by type. Possible values include: "Anomaly", "ChangePoint", "Period", "Comment". :paramtype feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType - :keyword ~datetime.datetime start_time: start time filter under chosen time mode. - :keyword ~datetime.datetime end_time: end time filter under chosen time mode. + :keyword Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :keyword Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword time_mode: time mode to filter feedback. Possible values include: "MetricTimestamp", "FeedbackCreatedTime". :paramtype time_mode: str or ~azure.ai.metricsadvisor.models.FeedbackQueryTimeMode @@ -207,13 +207,15 @@ def list_feedbacks( feedback_type = kwargs.pop('feedback_type', None) start_time = kwargs.pop('start_time', None) end_time = kwargs.pop('end_time', None) + converted_start_time = convert_datetime(start_time) if start_time else None + converted_end_time = convert_datetime(end_time) if end_time else None time_mode = kwargs.pop('time_mode', None) feedback_filter = MetricFeedbackFilter( metric_id=metric_id, dimension_filter=dimension_filter, feedback_type=feedback_type, - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, time_mode=time_mode, ) @@ -253,8 +255,8 @@ def list_incident_root_causes(self, detection_configuration_id, incident_id, **k def list_metric_enriched_series_data( self, detection_configuration_id, # type: str series, # type: Union[List[SeriesIdentity], List[Dict[str, str]]] - start_time, # type: datetime.datetime - end_time, # type: datetime.datetime + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] **kwargs # type: Any ): # type: (...) -> AsyncItemPaged[SeriesResult] @@ -263,8 +265,8 @@ def list_metric_enriched_series_data( :param str detection_configuration_id: anomaly alerting configuration unique id. :param series: List of dimensions specified for series. :type series: ~azure.ai.metricsadvisor.models.SeriesIdentity or list[dict[str, str]] - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :return: Pageable of SeriesResult :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.SeriesResult] :raises ~azure.core.exceptions.HttpResponseError: @@ -277,9 +279,11 @@ def list_metric_enriched_series_data( ] or series series_list = cast(List[SeriesIdentity], series_list) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) detection_series_query = DetectionSeriesQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, series=series_list ) @@ -289,14 +293,20 @@ def list_metric_enriched_series_data( **kwargs) @distributed_trace - def list_alerts_for_alert_configuration(self, alert_configuration_id, start_time, end_time, time_mode, **kwargs): - # type: (str, datetime.datetime, datetime.datetime, Union[str, TimeMode], Any) -> AsyncItemPaged[Alert] + def list_alerts_for_alert_configuration(self, + alert_configuration_id, # type: str + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] + time_mode, # type: Union[str, TimeMode] + **kwargs # type: Any + ): + # type: (...) -> AsyncItemPaged[Alert] """Query alerts under anomaly alert configuration. :param alert_configuration_id: anomaly alert configuration unique id. :type alert_configuration_id: str - :param ~datetime.datetime start_time: start time. - :param ~datetime.datetime end_time: end time. + :param Union[str, ~datetime.datetime] start_time: start time. + :param Union[str, ~datetime.datetime] end_time: end time. :param time_mode: time mode. Possible values include: "AnomalyTime", "CreatedTime", "ModifiedTime". :type time_mode: str or ~azure.ai.metricsadvisor.models.TimeMode @@ -316,10 +326,12 @@ def list_alerts_for_alert_configuration(self, alert_configuration_id, start_time """ skip = kwargs.pop('skip', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) alerting_result_query = AlertingResultQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, time_mode=time_mode, ) @@ -366,14 +378,14 @@ def list_anomalies_for_alert(self, alert_configuration_id, alert_id, **kwargs): @distributed_trace def list_anomalies_for_detection_configuration(self, detection_configuration_id, start_time, end_time, **kwargs): - # type: (str, datetime.datetime, datetime.datetime, Any) -> AsyncItemPaged[Anomaly] + # type: (str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> AsyncItemPaged[Anomaly] """Query anomalies under anomaly detection configuration. :param detection_configuration_id: anomaly detection configuration unique id. :type detection_configuration_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword int skip: :keyword filter: :paramtype filter: ~azure.ai.metricsadvisor.models.DetectionAnomalyFilterCondition @@ -384,9 +396,11 @@ def list_anomalies_for_detection_configuration(self, detection_configuration_id, skip = kwargs.pop('skip', None) filter_condition = kwargs.pop('filter', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) detection_anomaly_result_query = DetectionAnomalyResultQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, filter=filter_condition, ) @@ -405,15 +419,15 @@ def list_dimension_values_for_detection_configuration( end_time, **kwargs ): - # type: (str, str, datetime.datetime, datetime.datetime, Any) -> AsyncItemPaged[str] + # type: (str, str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> AsyncItemPaged[str] """Query dimension values of anomalies. :param detection_configuration_id: anomaly detection configuration unique id. :type detection_configuration_id: str :param str dimension_name: dimension to query. - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword int skip: :keyword str dimension_name: The dimension name to query. :paramtype dimension_filter: ~azure.ai.metricsadvisor.models.DimensionGroupIdentity @@ -424,9 +438,11 @@ def list_dimension_values_for_detection_configuration( skip = kwargs.pop('skip', None) dimension_filter = kwargs.pop('dimension_filter', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) anomaly_dimension_query = AnomalyDimensionQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, dimension_name=dimension_name, dimension_filter=dimension_filter, ) @@ -464,14 +480,14 @@ def list_incidents_for_alert(self, alert_configuration_id, alert_id, **kwargs): @distributed_trace def list_incidents_for_detection_configuration(self, detection_configuration_id, start_time, end_time, **kwargs): - # type: (str, datetime.datetime, datetime.datetime, Any) -> AsyncItemPaged[Incident] + # type: (str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> AsyncItemPaged[Incident] """Query incidents under a specific alert. :param detection_configuration_id: anomaly detection configuration unique id. :type detection_configuration_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword filter: :paramtype filter: ~azure.ai.metricsadvisor.models.DetectionIncidentFilterCondition :return: Incidents under a specific alert. @@ -480,10 +496,12 @@ def list_incidents_for_detection_configuration(self, detection_configuration_id, """ filter_condition = kwargs.pop('filter', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) detection_incident_result_query = DetectionIncidentResultQuery( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, filter=filter_condition, ) @@ -528,8 +546,8 @@ def list_metric_dimension_values(self, metric_id, dimension_name, **kwargs): @distributed_trace def list_metrics_series_data( self, metric_id, # type: str - start_time, # type: datetime.datetime - end_time, # type: datetime.datetime + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] series_to_filter, # type: List[Dict[str, str]] **kwargs # type: Any ): @@ -539,8 +557,8 @@ def list_metrics_series_data( :param metric_id: metric unique id. :type metric_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :param series_to_filter: query specific series. :type series_to_filter: list[dict[str, str]] :return: Time series data from metric. @@ -548,9 +566,12 @@ def list_metrics_series_data( :raises ~azure.core.exceptions.HttpResponseError: """ + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) + metric_data_query_options = MetricDataQueryOptions( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, series=series_to_filter, ) @@ -596,15 +617,20 @@ def list_metric_series_definitions(self, metric_id, active_since, **kwargs): **kwargs) @distributed_trace - def list_metric_enrichment_status(self, metric_id, start_time, end_time, **kwargs): - # type: (str, datetime.datetime, datetime.datetime, Any) -> AsyncItemPaged[EnrichmentStatus] + def list_metric_enrichment_status(self, + metric_id, # type: str + start_time, # type: Union[str, datetime.datetime] + end_time, # type: Union[str, datetime.datetime] + **kwargs # type: Any + ): + # type: (...) -> AsyncItemPaged[EnrichmentStatus] """Query anomaly detection status. :param metric_id: filter feedbacks by metric id. :type metric_id: str - :param ~datetime.datetime start_time: start time filter under chosen time mode. - :param ~datetime.datetime end_time: end time filter under chosen time mode. + :param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode. + :param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode. :keyword int skip: :return: Anomaly detection status. :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.metricsadvisor.models.EnrichmentStatus] @@ -612,9 +638,11 @@ def list_metric_enrichment_status(self, metric_id, start_time, end_time, **kwarg """ skip = kwargs.pop('skip', None) + converted_start_time = convert_datetime(start_time) + converted_end_time = convert_datetime(end_time) enrichment_status_query_option = EnrichmentStatusQueryOption( - start_time=start_time, - end_time=end_time, + start_time=converted_start_time, + end_time=converted_end_time, ) return self._client.get_enrichment_status_by_metric( # type: ignore diff --git a/sdk/metricsadvisor/azure-ai-metricsadvisor/tests/test_helper_methods.py b/sdk/metricsadvisor/azure-ai-metricsadvisor/tests/test_helper_methods.py new file mode 100644 index 000000000000..37e879a3adcf --- /dev/null +++ b/sdk/metricsadvisor/azure-ai-metricsadvisor/tests/test_helper_methods.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import datetime +import pytest +from azure.ai.metricsadvisor._helpers import convert_datetime + + +def test_convert_datetime(): + input = "2000-01-01 00:00:00" + date_time = convert_datetime(input) + assert date_time == datetime.datetime(2000,1,1) + + input = "2000-01-01T00:00:00Z" + date_time = convert_datetime(input) + assert date_time == datetime.datetime(2000, 1, 1) + + input = "2000-01-01" + date_time = convert_datetime(input) + assert date_time == datetime.datetime(2000, 1, 1) + + input = datetime.datetime(2000, 1, 1) + date_time = convert_datetime(input) + assert date_time == datetime.datetime(2000, 1, 1) + + with pytest.raises(TypeError): + input = tuple("2000-01-01 00:00:00", "2000-01-01 00:00:00") + convert_datetime(input) \ No newline at end of file