Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ma accept str for datetime #14517

Merged
merged 15 commits into from
Oct 16, 2020
Original file line number Diff line number Diff line change
Expand Up @@ -187,3 +187,17 @@ def convert_to_sub_feedback(feedback):
if feedback.feedback_type == "Period":
return PeriodFeedback._from_generated(feedback) # type: ignore
raise HttpResponseError("Invalid feedback type returned in the response.")

def convert_datetime(date_time):
# type: (Union[str, datetime.datetime]) -> datetime.datetime
if isinstance(date_time, datetime.datetime):
return date_time
if isinstance(date_time, six.string_types):
try:
return datetime.datetime.strptime(date_time, "%Y-%m-%d")
except ValueError:
try:
return datetime.datetime.strptime(date_time, "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
return datetime.datetime.strptime(date_time, "%Y-%m-%d %H:%M:%S")
raise TypeError("Bad datetime type")
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@
construct_alert_config_dict,
construct_detection_config_dict,
construct_hook_dict,
construct_data_feed_dict
construct_data_feed_dict,
convert_datetime
)
from .models._models import (
DataFeed,
Expand Down Expand Up @@ -556,8 +557,8 @@ def get_data_feed_ingestion_progress(
def refresh_data_feed_ingestion(
self,
data_feed_id, # type: str
start_time, # type: datetime.datetime
end_time, # type: datetime.datetime
start_time, # type: Union[str, datetime.datetime]
end_time, # type: Union[str, datetime.datetime]
**kwargs # type: Any
):
# type: (...) -> None
Expand All @@ -566,9 +567,9 @@ def refresh_data_feed_ingestion(
:param data_feed_id: The data feed unique id.
:type data_feed_id: str
:param start_time: The start point of time range to refresh data ingestion.
:type start_time: ~datetime.datetime
:type start_time: Union[str, ~datetime.datetime]
:param end_time: The end point of time range to refresh data ingestion.
:type end_time: ~datetime.datetime
:type end_time: Union[str, ~datetime.datetime]
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
Expand All @@ -582,11 +583,13 @@ def refresh_data_feed_ingestion(
:dedent: 4
:caption: Refresh data feed ingestion over a period of time
"""
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)
self._client.reset_data_feed_ingestion_status(
data_feed_id,
body=_IngestionProgressResetOptions(
start_time=start_time,
end_time=end_time
start_time=converted_start_time,
end_time=converted_end_time
),
**kwargs
)
Expand Down Expand Up @@ -1171,18 +1174,18 @@ def list_metric_anomaly_detection_configurations(
def list_data_feed_ingestion_status(
self,
data_feed_id, # type: str
start_time, # type: datetime.datetime
end_time, # type: datetime.datetime
start_time, # type: Union[str, datetime.datetime]
end_time, # type: Union[str, datetime.datetime]
**kwargs # type: Any
):
# type: (...) -> ItemPaged[DataFeedIngestionStatus]
"""Get data ingestion status by data feed.

:param str data_feed_id: The data feed unique id.
:param start_time: Required. the start point of time range to query data ingestion status.
:type start_time: ~datetime.datetime
:type start_time: Union[str, ~datetime.datetime]
:param end_time: Required. the end point of time range to query data ingestion status.
:type end_time: ~datetime.datetime
:type end_time: Union[str, ~datetime.datetime]
:keyword int skip:
:return: Pageable of DataFeedIngestionStatus
:rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.DataFeedIngestionStatus]
Expand All @@ -1199,12 +1202,14 @@ def list_data_feed_ingestion_status(
"""

skip = kwargs.pop("skip", None)
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)

return self._client.get_data_feed_ingestion_status( # type: ignore
data_feed_id=data_feed_id,
body=_IngestionStatusQueryOptions(
start_time=start_time,
end_time=end_time
start_time=converted_start_time,
end_time=converted_end_time
),
skip=skip,
**kwargs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
# pylint: disable=protected-access

from typing import List, Union, Dict, Any, cast, TYPE_CHECKING
import datetime # pylint:disable=unused-import

from azure.core.tracing.decorator import distributed_trace
from ._metrics_advisor_key_credential import MetricsAdvisorKeyCredential
Expand All @@ -27,7 +26,7 @@
FeedbackDimensionFilter,
)
from ._generated import AzureCognitiveServiceMetricsAdvisorRESTAPIOpenAPIV2
from ._helpers import convert_to_sub_feedback
from ._helpers import convert_to_sub_feedback, convert_datetime
from .models._models import (
Incident,
Anomaly,
Expand All @@ -38,6 +37,7 @@
from ._version import SDK_MONIKER

if TYPE_CHECKING:
import datetime
from ._generated.models import (
SeriesResult,
EnrichmentStatus,
Expand Down Expand Up @@ -175,8 +175,8 @@ def list_feedbacks(self, metric_id, **kwargs):
:keyword feedback_type: filter feedbacks by type. Possible values include: "Anomaly",
"ChangePoint", "Period", "Comment".
:paramtype feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType
:keyword ~datetime.datetime start_time: start time filter under chosen time mode.
:keyword ~datetime.datetime end_time: end time filter under chosen time mode.
:keyword Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode.
:keyword Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode.
:keyword time_mode: time mode to filter feedback. Possible values include: "MetricTimestamp",
"FeedbackCreatedTime".
:paramtype time_mode: str or ~azure.ai.metricsadvisor.models.FeedbackQueryTimeMode
Expand All @@ -203,13 +203,15 @@ def list_feedbacks(self, metric_id, **kwargs):
feedback_type = kwargs.pop('feedback_type', None)
start_time = kwargs.pop('start_time', None)
end_time = kwargs.pop('end_time', None)
converted_start_time = convert_datetime(start_time) if start_time else None
converted_end_time = convert_datetime(end_time) if end_time else None
time_mode = kwargs.pop('time_mode', None)
feedback_filter = MetricFeedbackFilter(
metric_id=metric_id,
dimension_filter=dimension_filter,
feedback_type=feedback_type,
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
time_mode=time_mode,
)

Expand Down Expand Up @@ -249,8 +251,8 @@ def list_incident_root_causes(self, detection_configuration_id, incident_id, **k
def list_metric_enriched_series_data(
self, detection_configuration_id, # type: str
series, # type: Union[List[SeriesIdentity], List[Dict[str, str]]]
start_time, # type: datetime.datetime
end_time, # type: datetime.datetime
start_time, # type: Union[str, datetime.datetime]
end_time, # type: Union[str, datetime.datetime]
**kwargs # type: Any
):
# type: (...) -> ItemPaged[SeriesResult]
Expand All @@ -259,8 +261,8 @@ def list_metric_enriched_series_data(
:param str detection_configuration_id: anomaly alerting configuration unique id.
:param series: List of dimensions specified for series.
:type series: ~azure.ai.metricsadvisor.models.SeriesIdentity or list[dict[str, str]]
:param ~datetime.datetime start_time: start time filter under chosen time mode.
:param ~datetime.datetime end_time: end time filter under chosen time mode.
:param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode.
:param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode.
:return: Pageable of SeriesResult
:rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.SeriesResult]
:raises ~azure.core.exceptions.HttpResponseError:
Expand All @@ -273,9 +275,11 @@ def list_metric_enriched_series_data(
] or series

series_list = cast(List[SeriesIdentity], series_list)
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)
detection_series_query = DetectionSeriesQuery(
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
series=series_list
)

Expand All @@ -292,8 +296,8 @@ def list_alerts_for_alert_configuration(self, alert_configuration_id, start_time

:param alert_configuration_id: anomaly alert configuration unique id.
:type alert_configuration_id: str
:param ~datetime.datetime start_time: start time.
:param ~datetime.datetime end_time: end time.
:param Union[str, ~datetime.datetime] start_time: start time.
:param Union[str, ~datetime.datetime] end_time: end time.
:param time_mode: time mode. Possible values include: "AnomalyTime", "CreatedTime",
"ModifiedTime".
:type time_mode: str or ~azure.ai.metricsadvisor.models.TimeMode
Expand All @@ -313,10 +317,12 @@ def list_alerts_for_alert_configuration(self, alert_configuration_id, start_time
"""

skip = kwargs.pop('skip', None)
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)

alerting_result_query = AlertingResultQuery(
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
time_mode=time_mode,
)

Expand Down Expand Up @@ -369,8 +375,8 @@ def list_anomalies_for_detection_configuration(self, detection_configuration_id,

:param detection_configuration_id: anomaly detection configuration unique id.
:type detection_configuration_id: str
:param ~datetime.datetime start_time: start time filter under chosen time mode.
:param ~datetime.datetime end_time: end time filter under chosen time mode.
:param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode.
:param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode.
:keyword int skip:
:keyword filter:
:paramtype filter: ~azure.ai.metricsadvisor.models.DetectionAnomalyFilterCondition
Expand All @@ -381,9 +387,11 @@ def list_anomalies_for_detection_configuration(self, detection_configuration_id,

skip = kwargs.pop('skip', None)
filter_condition = kwargs.pop('filter', None)
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)
detection_anomaly_result_query = DetectionAnomalyResultQuery(
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
filter=filter_condition,
)

Expand All @@ -402,15 +410,15 @@ def list_dimension_values_for_detection_configuration(
end_time,
**kwargs
):
# type: (str, str, datetime.datetime, datetime.datetime, Any) -> ItemPaged[str]
# type: (str, str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> ItemPaged[str]

"""Query dimension values of anomalies.

:param detection_configuration_id: anomaly detection configuration unique id.
:type detection_configuration_id: str
:param str dimension_name: dimension to query.
:param ~datetime.datetime start_time: start time filter under chosen time mode.
:param ~datetime.datetime end_time: end time filter under chosen time mode.
:param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode.
:param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode.
:keyword int skip:
:keyword str dimension_name: The dimension name to query.
:paramtype dimension_filter: ~azure.ai.metricsadvisor.models.DimensionGroupIdentity
Expand All @@ -421,9 +429,11 @@ def list_dimension_values_for_detection_configuration(

skip = kwargs.pop('skip', None)
dimension_filter = kwargs.pop('dimension_filter', None)
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)
anomaly_dimension_query = AnomalyDimensionQuery(
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
dimension_name=dimension_name,
dimension_filter=dimension_filter,
)
Expand Down Expand Up @@ -461,14 +471,14 @@ def list_incidents_for_alert(self, alert_configuration_id, alert_id, **kwargs):

@distributed_trace
def list_incidents_for_detection_configuration(self, detection_configuration_id, start_time, end_time, **kwargs):
# type: (str, datetime.datetime, datetime.datetime, Any) -> ItemPaged[Incident]
# type: (str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> ItemPaged[Incident]

"""Query incidents under a specific alert.

:param detection_configuration_id: anomaly detection configuration unique id.
:type detection_configuration_id: str
:param ~datetime.datetime start_time: start time filter under chosen time mode.
:param ~datetime.datetime end_time: end time filter under chosen time mode.
:param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode.
:param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode.
:keyword filter:
:paramtype filter: ~azure.ai.metricsadvisor.models.DetectionIncidentFilterCondition
:return: Incidents under a specific alert.
Expand All @@ -477,10 +487,12 @@ def list_incidents_for_detection_configuration(self, detection_configuration_id,
"""

filter_condition = kwargs.pop('filter', None)
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)

detection_incident_result_query = DetectionIncidentResultQuery(
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
filter=filter_condition,
)

Expand Down Expand Up @@ -523,25 +535,34 @@ def list_metric_dimension_values(self, metric_id, dimension_name, **kwargs):
**kwargs)

@distributed_trace
def list_metrics_series_data(self, metric_id, start_time, end_time, series_to_filter, **kwargs):
# type: (str, datetime.datetime, datetime.datetime, List[Dict[str, str]], Any) -> ItemPaged[MetricSeriesData]
def list_metrics_series_data(self,
metric_id, # type: str
start_time, # type: Union[str, datetime.datetime]
end_time, # type: Union[str, datetime.datetime]
series_to_filter, # type: List[Dict[str, str]]
**kwargs # type: Any
):
# type: (...) -> ItemPaged[MetricSeriesData]

"""Get time series data from metric.

:param metric_id: metric unique id.
:type metric_id: str
:param ~datetime.datetime start_time: start time filter under chosen time mode.
:param ~datetime.datetime end_time: end time filter under chosen time mode.
:param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode.
:param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode.
:param series_to_filter: query specific series.
:type series_to_filter: list[dict[str, str]]
:return: Time series data from metric.
:rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.MetricSeriesData]
:raises ~azure.core.exceptions.HttpResponseError:
"""

converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)

metric_data_query_options = MetricDataQueryOptions(
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
series=series_to_filter,
)

Expand Down Expand Up @@ -588,24 +609,26 @@ def list_metric_series_definitions(self, metric_id, active_since, **kwargs):

@distributed_trace
def list_metric_enrichment_status(self, metric_id, start_time, end_time, **kwargs):
# type: (str, datetime.datetime, datetime.datetime, Any) -> ItemPaged[EnrichmentStatus]
# type: (str, Union[str, datetime.datetime], Union[str, datetime.datetime], Any) -> ItemPaged[EnrichmentStatus]

"""Query anomaly detection status.

:param metric_id: filter feedbacks by metric id.
:type metric_id: str
:param ~datetime.datetime start_time: start time filter under chosen time mode.
:param ~datetime.datetime end_time: end time filter under chosen time mode.
:param Union[str, ~datetime.datetime] start_time: start time filter under chosen time mode.
:param Union[str, ~datetime.datetime] end_time: end time filter under chosen time mode.
:keyword int skip:
:return: Anomaly detection status.
:rtype: ~azure.core.paging.ItemPaged[~azure.ai.metricsadvisor.models.EnrichmentStatus]
:raises ~azure.core.exceptions.HttpResponseError:
"""

skip = kwargs.pop('skip', None)
converted_start_time = convert_datetime(start_time)
converted_end_time = convert_datetime(end_time)
enrichment_status_query_option = EnrichmentStatusQueryOption(
start_time=start_time,
end_time=end_time,
start_time=converted_start_time,
end_time=converted_end_time,
)

return self._client.get_enrichment_status_by_metric( # type: ignore
Expand Down
Loading