diff --git a/keep-ui/app/dashboard/EditGridItemModal.tsx b/keep-ui/app/dashboard/EditGridItemModal.tsx index 79e9b9ae1..35dfe709e 100644 --- a/keep-ui/app/dashboard/EditGridItemModal.tsx +++ b/keep-ui/app/dashboard/EditGridItemModal.tsx @@ -14,7 +14,7 @@ const EditGridItemModal: React.FC = ({ isOpen, onClose, const [thresholds, setThresholds] = useState([]); useEffect(() => { - if (item) { + if (item?.thresholds) { setThresholds(item.thresholds); } }, [item]); diff --git a/keep-ui/app/dashboard/GridItem.tsx b/keep-ui/app/dashboard/GridItem.tsx index a1ef8a5ea..6c85e2c43 100644 --- a/keep-ui/app/dashboard/GridItem.tsx +++ b/keep-ui/app/dashboard/GridItem.tsx @@ -1,7 +1,7 @@ import React, { useState } from "react"; -import { Card } from "@tremor/react"; +import { AreaChart, Card } from "@tremor/react"; import MenuButton from "./MenuButton"; -import { WidgetData } from "./types"; +import {WidgetData, WidgetType} from "./types"; import AlertQuality from "@/app/alerts/alert-quality-table"; import { useSearchParams } from "next/navigation"; @@ -55,12 +55,14 @@ const GridItem: React.FC = ({ } const getColor = () => { let color = "#000000"; + if (item.widgetType === WidgetType.PRESET && item.thresholds && item.preset) { for (let i = item.thresholds.length - 1; i >= 0; i--) { if (item.preset && item.preset.alerts_count >= item.thresholds[i].value) { color = item.thresholds[i].color; break; } } + } return color; }; @@ -80,11 +82,13 @@ const GridItem: React.FC = ({ }; return ( - +
{/* For table view we need intract with table filter and pagination.so we aare dragging the widget here */} @@ -111,16 +115,40 @@ const GridItem: React.FC = ({
)} -
- -
- -
- ); -}; + { item.metric && ( +
+
+ + `${Intl.NumberFormat().format(number).toString()}` + } + startEndOnly + connectNulls + showLegend={false} + showTooltip={true} + xAxisLabel="Timestamp" + /> +
+
+ + )} + + +
+ +
+ +
+ ); + }; -export default GridItem; + export default GridItem; diff --git a/keep-ui/app/dashboard/GridLayout.tsx b/keep-ui/app/dashboard/GridLayout.tsx index 647fc3f3b..d78df13a3 100644 --- a/keep-ui/app/dashboard/GridLayout.tsx +++ b/keep-ui/app/dashboard/GridLayout.tsx @@ -4,6 +4,7 @@ import GridItemContainer from "./GridItemContainer"; import { LayoutItem, WidgetData } from "./types"; import "react-grid-layout/css/styles.css"; import { Preset } from "app/alerts/models"; +import {MetricsWidget} from "@/utils/hooks/useDashboardMetricWidgets"; const ResponsiveGridLayout = WidthProvider(Responsive); @@ -15,6 +16,7 @@ interface GridLayoutProps { onDelete: (id: string) => void; presets: Preset[]; onSave: (updateItem: WidgetData) => void; + metrics: MetricsWidget[]; } const GridLayout: React.FC = ({ @@ -25,6 +27,7 @@ const GridLayout: React.FC = ({ onDelete, onSave, presets, + metrics }) => { const layouts = { lg: layout }; @@ -52,14 +55,18 @@ const GridLayout: React.FC = ({ draggableHandle=".grid-item__widget" > {data.map((item) => { - //Fixing the static hardcode db value. + //Updating the static hardcode db value. if (item.preset) { const preset = presets?.find((p) => p?.id === item?.preset?.id); item.preset = { ...item.preset, alerts_count: preset?.alerts_count ?? 0, }; - + } else if (item.metric) { + const metric = metrics?.find(m => m?.id === item?.metric?.id); + if (metric) { + item.metric = {...metric} + } } return (
diff --git a/keep-ui/app/dashboard/WidgetModal.tsx b/keep-ui/app/dashboard/WidgetModal.tsx index a264c869a..ff331379f 100644 --- a/keep-ui/app/dashboard/WidgetModal.tsx +++ b/keep-ui/app/dashboard/WidgetModal.tsx @@ -1,32 +1,30 @@ -import React, { useState, useEffect, ChangeEvent, FormEvent } from "react"; +import React, {ChangeEvent, useEffect, useState} from "react"; import Modal from "@/components/ui/Modal"; -import { Button, Subtitle, TextInput, Select, SelectItem, Icon } from "@tremor/react"; -import { Trashcan } from "components/icons"; -import { Threshold, WidgetData, GenericsMertics } from "./types"; -import { Preset } from "app/alerts/models"; -import { useForm, Controller, get } from "react-hook-form"; +import {Button, Icon, Select, SelectItem, Subtitle, TextInput} from "@tremor/react"; +import {Trashcan} from "components/icons"; +import {GenericsMetrics, Threshold, WidgetData, WidgetType} from "./types"; +import {Preset} from "app/alerts/models"; +import {Controller, get, useForm, useWatch} from "react-hook-form"; +import {MetricsWidget} from "@/utils/hooks/useDashboardMetricWidgets"; interface WidgetForm { widgetName: string; selectedPreset: string; thresholds: Threshold[]; - selectedWidgetType: string; + widgetType: WidgetType; + selectedMetricWidget: string; selectedGenericMetrics: string; } interface WidgetModalProps { isOpen: boolean; onClose: () => void; - onAddWidget: ( - preset: Preset | null, - thresholds: Threshold[], - name: string, - widgetType?: string, - genericMetrics?: GenericsMertics | null, + onAddWidget: (name: string, widgetType: WidgetType, preset?: Preset, thresholds?: Threshold[], metric?: MetricsWidget, genericMetrics?: GenericsMetrics, ) => void; onEditWidget: (updatedWidget: WidgetData) => void; presets: Preset[]; editingItem?: WidgetData | null; + metricWidgets: MetricsWidget[]; } const GENERIC_METRICS = [ @@ -35,85 +33,100 @@ const GENERIC_METRICS = [ label: "Alert Quality", widgetType: "table", meta: { - defaultFilters: {"fields":"severity"}, + defaultFilters: {"fields": "severity"}, }, }, -] as GenericsMertics[]; +] as GenericsMetrics[]; -const WidgetModal: React.FC = ({ isOpen, onClose, onAddWidget, onEditWidget, presets, editingItem }) => { +const WidgetModal: React.FC = ({ + isOpen, + onClose, + onAddWidget, + onEditWidget, + presets, + editingItem, + metricWidgets + }) => { const [thresholds, setThresholds] = useState([ - { value: 0, color: '#22c55e' }, // Green - { value: 20, color: '#ef4444' } // Red + {value: 0, color: '#22c55e'}, // Green + {value: 20, color: '#ef4444'} // Red ]); - const { control, handleSubmit, setValue, formState: { errors }, reset } = useForm({ + const {control, handleSubmit, setValue, formState: {errors}, reset} = useForm({ defaultValues: { widgetName: '', selectedPreset: '', thresholds: thresholds, - selectedWidgetType: '', + widgetType: WidgetType.PRESET, selectedGenericMetrics: '' } }); - const [currentWidgetType, setCurrentWidgetType] = useState(''); + const widgetType = useWatch({ + control, + name: 'widgetType', + }); useEffect(() => { if (editingItem) { setValue("widgetName", editingItem.name); + setValue('widgetType', editingItem.widgetType); + + if (editingItem.thresholds) { + setThresholds(editingItem.thresholds); + } setValue("selectedPreset", editingItem?.preset?.id ?? ""); - setValue("selectedWidgetType", editingItem?.widgetType ?? ""); + setValue('selectedMetricWidget', editingItem?.metric?.id ?? ""); setValue("selectedGenericMetrics", editingItem?.genericMetrics?.key ?? ""); - setThresholds(editingItem.thresholds); } else { reset({ widgetName: '', selectedPreset: '', + selectedMetricWidget: '', + selectedGenericMetrics: '', thresholds: thresholds, - selectedWidgetType: "", + widgetType: WidgetType.PRESET, }); } }, [editingItem, setValue, reset]); const handleThresholdChange = (index: number, field: 'value' | 'color', e: ChangeEvent) => { const value = field === 'value' ? e.target.value : e.target.value; - const updatedThresholds = thresholds.map((t, i) => i === index ? { ...t, [field]: value } : t); + const updatedThresholds = thresholds.map((t, i) => i === index ? {...t, [field]: value} : t); setThresholds(updatedThresholds); }; const handleThresholdBlur = () => { setThresholds(prevThresholds => { return prevThresholds - .map(t => ({ - ...t, - value: parseInt(t.value.toString(), 10) || 0 - })) - .sort((a, b) => a.value - b.value); + .map(t => ({ + ...t, + value: parseInt(t.value.toString(), 10) || 0 + })) + .sort((a, b) => a.value - b.value); }); }; const handleAddThreshold = () => { const maxThreshold = Math.max(...thresholds.map(t => t.value), 0); - setThresholds([...thresholds, { value: maxThreshold + 10, color: '#000000' }]); + setThresholds([...thresholds, {value: maxThreshold + 10, color: '#000000'}]); }; const handleRemoveThreshold = (index: number) => { setThresholds(thresholds.filter((_, i) => i !== index)); }; - const deepClone = (obj: GenericsMertics|undefined) => { - if(!obj){ - return null; - } - try{ - return JSON.parse(JSON.stringify(obj)) as GenericsMertics; - }catch(e){ - return null; + const deepClone = (obj: GenericsMetrics | undefined) => { + if (!obj) { + return obj; } + return JSON.parse(JSON.stringify(obj)) as GenericsMetrics; + }; const onSubmit = (data: WidgetForm) => { - const preset = presets.find(p => p.id === data.selectedPreset) ?? null; + const preset = presets.find(p => p.id === data.selectedPreset); + const metric = metricWidgets.find(p => p.id === data.selectedMetricWidget); if (preset || data.selectedGenericMetrics) { const formattedThresholds = thresholds.map(t => ({ ...t, @@ -124,31 +137,51 @@ const WidgetModal: React.FC = ({ isOpen, onClose, onAddWidget, let updatedWidget: WidgetData = { ...editingItem, name: data.widgetName, - widgetType: data.selectedWidgetType || "preset", //backwards compatibility + widgetType: data.widgetType || WidgetType.PRESET, // backwards compatibility preset, thresholds: formattedThresholds, - genericMetrics: editingItem.genericMetrics || null, + genericMetrics: editingItem.genericMetrics, }; onEditWidget(updatedWidget); } else { onAddWidget( - preset, - formattedThresholds, - data.widgetName, - data.selectedWidgetType, - deepClone(GENERIC_METRICS.find((g) => g.key === data.selectedGenericMetrics)) + data.widgetName, + data.widgetType, + preset, + formattedThresholds, + undefined, + deepClone(GENERIC_METRICS.find((g) => g.key === data.selectedGenericMetrics)) ); // cleanup form setThresholds([ - { value: 0, color: '#22c55e' }, // Green - { value: 20, color: '#ef4444' } // Red + {value: 0, color: '#22c55e'}, // Green + {value: 20, color: '#ef4444'} // Red ]); reset({ widgetName: '', selectedPreset: '', thresholds: thresholds, selectedGenericMetrics: '', - selectedWidgetType: '', + widgetType: WidgetType.PRESET, + }); + } + onClose(); + } + if (metric) { + if (editingItem) { + const updatedWidget: WidgetData = { + ...editingItem, + name: data.widgetName, + widgetType: data.widgetType, + }; + onEditWidget(updatedWidget); + } else { + onAddWidget(data.widgetName, data.widgetType, undefined, undefined, metric, undefined); + reset({ + widgetName: '', + selectedPreset: '', + widgetType: WidgetType.PRESET, + thresholds: thresholds, }); } onClose(); @@ -156,135 +189,156 @@ const WidgetModal: React.FC = ({ isOpen, onClose, onAddWidget, }; return ( - -
-
- Widget Name - ( - - )} - /> -
-
- Widget Type - { - setCurrentWidgetType(field.value); - return - }} - /> -
- {currentWidgetType === 'preset' ? ( - <> -
- Preset - ( - - )} - /> -
-
-
- Thresholds - -
-
- {thresholds.map((threshold, index) => ( -
- handleThresholdChange(index, 'value', e)} - onBlur={handleThresholdBlur} - placeholder="Threshold value" - required - /> - handleThresholdChange(index, 'color', e)} - className="w-10 h-10 p-1 border" - required - /> - {thresholds.length > 1 && ( - + + +
+ Widget Name + ( + )} -
- ))} + />
-
- - ): currentWidgetType === 'generic_metrics' && <>
- Generic Metrics - ( - - )} - /> + Widget Type + { + return + }} + />
- } - - - + {widgetType === WidgetType.PRESET ? ( + <> +
+ Preset + ( + + )} + /> +
+
+
+ Thresholds + +
+
+ {thresholds.map((threshold, index) => ( +
+ handleThresholdChange(index, 'value', e)} + onBlur={handleThresholdBlur} + placeholder="Threshold value" + required + /> + handleThresholdChange(index, 'color', e)} + className="w-10 h-10 p-1 border" + required + /> + {thresholds.length > 1 && ( + + )} +
+ ))} +
+
+ + ) : widgetType === WidgetType.GENERICS_METRICS ? <> +
+ Generic Metrics + ( + + )} + /> +
+ : +
+ Widget + ( + + )} + /> +
} + + + ); }; diff --git a/keep-ui/app/dashboard/[id]/dashboard.tsx b/keep-ui/app/dashboard/[id]/dashboard.tsx index fef94ac8c..77b060624 100644 --- a/keep-ui/app/dashboard/[id]/dashboard.tsx +++ b/keep-ui/app/dashboard/[id]/dashboard.tsx @@ -1,20 +1,20 @@ "use client"; -import { useParams } from "next/navigation"; -import { useState, useEffect, ChangeEvent } from "react"; +import {useParams} from "next/navigation"; +import {ChangeEvent, useEffect, useState} from "react"; import GridLayout from "../GridLayout"; -import { usePresets } from "utils/hooks/usePresets"; import WidgetModal from "../WidgetModal"; -import { Button, Card, TextInput, Subtitle, Icon } from "@tremor/react"; -import { LayoutItem, WidgetData, Threshold, GenericsMertics } from "../types"; -import { Preset } from "app/alerts/models"; -import { FiSave, FiEdit2 } from "react-icons/fi"; -import { useSession } from "next-auth/react"; -import { useDashboards } from "utils/hooks/useDashboards"; -import { useApiUrl } from "utils/hooks/useConfig"; +import {Button, Card, Icon, Subtitle, TextInput} from "@tremor/react"; +import {GenericsMetrics, LayoutItem, Threshold, WidgetData, WidgetType} from "../types"; +import {Preset} from "app/alerts/models"; +import {FiEdit2, FiSave} from "react-icons/fi"; +import {useSession} from "next-auth/react"; +import {useDashboards} from "utils/hooks/useDashboards"; +import {useApiUrl} from "utils/hooks/useConfig"; import "./../styles.css"; -import { toast } from "react-toastify"; -import { GenericFilters } from "@/components/filters/GenericFilters"; -import { useDashboardPreset } from "utils/hooks/useDashboardPresets"; +import {toast} from "react-toastify"; +import {GenericFilters} from "@/components/filters/GenericFilters"; +import {useDashboardPreset} from "utils/hooks/useDashboardPresets"; +import {MetricsWidget, useDashboardMetricWidgets} from '@/utils/hooks/useDashboardMetricWidgets'; const DASHBOARD_FILTERS = [ { @@ -33,6 +33,7 @@ const DashboardPage = () => { const [isModalOpen, setIsModalOpen] = useState(false); const [layout, setLayout] = useState([]); const [widgetData, setWidgetData] = useState([]); + const {widgets: allMetricWidgets} = useDashboardMetricWidgets(true); const [editingItem, setEditingItem] = useState(null); const [dashboardName, setDashboardName] = useState(decodeURIComponent(id)); const [isEditingName, setIsEditingName] = useState(false); @@ -58,21 +59,20 @@ const DashboardPage = () => { const closeModal = () => setIsModalOpen(false); const handleAddWidget = ( - preset: Preset | null, - thresholds: Threshold[], - name: string, - widgetType?: string, - genericMetrics?: GenericsMertics | null + name: string, widgetType: WidgetType, preset?: Preset , + thresholds?: Threshold[], + metric?: MetricsWidget, + genericMetrics?: GenericsMetrics ) => { const uniqueId = `w-${Date.now()}`; const newItem: LayoutItem = { i: uniqueId, x: (layout.length % 12) * 2, y: Math.floor(layout.length / 12) * 2, - w: genericMetrics ? 12 : 3, - h: genericMetrics ? 20 : 3, - minW: genericMetrics ? 10 : 2, - minH: genericMetrics ? 15 : 2, + w: widgetType === WidgetType.GENERICS_METRICS ? 12 : widgetType === WidgetType.METRIC ? 6 : 3, + h: widgetType === WidgetType.GENERICS_METRICS ? 20 : widgetType === WidgetType.METRIC ? 8 : 3, + minW: widgetType === WidgetType.GENERICS_METRICS ? 10 : 2, + minH: widgetType === WidgetType.GENERICS_METRICS ? 15 : widgetType === WidgetType.METRIC ? 7 : 3, static: false, }; const newWidget: WidgetData = { @@ -80,8 +80,9 @@ const DashboardPage = () => { thresholds, preset, name, - widgetType: widgetType || "preset", - genericMetrics: genericMetrics || null, + widgetType, + genericMetrics, + metric }; setLayout((prevLayout) => [...prevLayout, newItem]); setWidgetData((prevData) => [...prevData, newWidget]); @@ -89,6 +90,7 @@ const DashboardPage = () => { const handleEditWidget = (id: string, update?: WidgetData) => { let itemToEdit = widgetData.find((d) => d.i === id) || null; + console.log(itemToEdit, update) if (itemToEdit && update) { setEditingItem({ ...itemToEdit, ...update }); } else { @@ -226,6 +228,7 @@ const DashboardPage = () => { onDelete={handleDeleteWidget} onSave={handleSaveEdit} presets={allPresets} + metrics={allMetricWidgets} /> )} @@ -236,6 +239,7 @@ const DashboardPage = () => { onEditWidget={handleSaveEdit} presets={allPresets} editingItem={editingItem} + metricWidgets={allMetricWidgets} />
); diff --git a/keep-ui/app/dashboard/types.tsx b/keep-ui/app/dashboard/types.tsx index 05ca0cd95..be80ac842 100644 --- a/keep-ui/app/dashboard/types.tsx +++ b/keep-ui/app/dashboard/types.tsx @@ -1,35 +1,44 @@ -import { Preset } from "app/alerts/models"; +import {Preset} from "app/alerts/models"; +import {MetricsWidget} from "@/utils/hooks/useDashboardMetricWidgets"; + export interface LayoutItem { - i: string; - x: number; - y: number; - w: number; - h: number; - minW?: number; - minH?: number; - static: boolean; - } + i: string; + x: number; + y: number; + w: number; + h: number; + minW?: number; + minH?: number; + static: boolean; +} - export interface GenericsMertics { - key: string; - label: string; - widgetType: "table" | "chart"; - meta: { - defaultFilters: { - [key: string]: string|string[]; - }, - } +export interface GenericsMetrics { + key: string; + label: string; + widgetType: "table" | "chart"; + meta: { + defaultFilters: { + [key: string]: string | string[]; + }, } +} - export interface WidgetData extends LayoutItem { - thresholds: Threshold[]; - preset: Preset | null; - name: string; - widgetType?:string; - genericMetrics?: GenericsMertics| null; - } +export enum WidgetType { + PRESET = 'PRESET', + METRIC = 'METRIC', + GENERICS_METRICS = 'GENERICS_METRICS' +} - export interface Threshold { - value: number; - color: string; - } +export interface WidgetData extends LayoutItem { + thresholds?: Threshold[]; + preset?: Preset; + name: string; + widgetType: WidgetType; + genericMetrics?: GenericsMetrics; + metric?: MetricsWidget; +} + +export interface Threshold { + value: number; + color: string; +} diff --git a/keep-ui/utils/hooks/useDashboardMetricWidgets.ts b/keep-ui/utils/hooks/useDashboardMetricWidgets.ts new file mode 100644 index 000000000..d7d24ecc8 --- /dev/null +++ b/keep-ui/utils/hooks/useDashboardMetricWidgets.ts @@ -0,0 +1,66 @@ + import {useSession} from "next-auth/react"; + import { useApiUrl } from "./useConfig"; +import useSWR from "swr"; +import {fetcher} from "@/utils/fetcher"; +import { usePathname, useSearchParams } from "next/navigation"; + +export interface MetricsWidget { + id: string; + name: string; + data: DistributionData[]; +} + +interface DistributionData { + hour: string; + number: number +} + +interface DashboardDistributionData { + mttr: DistributionData[]; + ipd: DistributionData[]; + apd: DistributionData[]; + wpd: DistributionData[]; + +} + +export const useDashboardMetricWidgets = (useFilters?: boolean) => { + const {data: session} = useSession(); + const apiUrl = useApiUrl(); + const searchParams = useSearchParams(); + const filters = searchParams?.toString(); + + const {data, error, mutate} = useSWR( + session ? `${apiUrl}/dashboard/metric-widgets${ + useFilters && filters ? `?${filters}` : "" + }` : null, + (url: string) => fetcher(url, session!.accessToken) + ) + console.log(filters) + + let widgets: MetricsWidget[] = [] + if (data) { + widgets = [ + { + id: "mttr", + name: "MTTR", + data: data.mttr + }, + { + id: "apd", + "name": "Alerts/Day", + data: data.apd + }, + { + id: "ipd", + name: "Incidents/Day", + data: data.ipd + }, + { + id: "wpd", + name: "Workflows/Day", + data: data.wpd + } + ]; + } + return {widgets}; +} \ No newline at end of file diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 503e0e018..1b18a0c47 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -44,6 +44,7 @@ from keep.api.models.db.tenant import * # pylint: disable=unused-wildcard-import from keep.api.models.db.topology import * # pylint: disable=unused-wildcard-import from keep.api.models.db.workflow import * # pylint: disable=unused-wildcard-import +from keep.api.models.time_stamp import TimeStampFilter logger = logging.getLogger(__name__) @@ -1016,7 +1017,11 @@ def get_enrichment_with_session(session, tenant_id, fingerprint, refresh=False): def get_alerts_with_filters( - tenant_id, provider_id=None, filters=None, time_delta=1, with_incidents=False, + tenant_id, + provider_id=None, + filters=None, + time_delta=1, + with_incidents=False, ) -> list[Alert]: with Session(engine) as session: # Create the query @@ -1190,7 +1195,7 @@ def get_last_alerts( Returns: List[Alert]: A list of Alert objects including the first time the alert was triggered. """ - with (Session(engine) as session): + with Session(engine) as session: # Subquery that selects the max and min timestamp for each fingerprint. subquery = ( session.query( @@ -1254,7 +1259,10 @@ def get_last_alerts( query = query.add_columns(AlertToIncident.incident_id.label("incident")) query = query.outerjoin( AlertToIncident, - and_(AlertToIncident.alert_id == Alert.id, AlertToIncident.deleted_at == NULL_FOR_DELETED_AT), + and_( + AlertToIncident.alert_id == Alert.id, + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, + ), ) if provider_id: @@ -1718,7 +1726,7 @@ def get_rule_distribution(tenant_id, minute=False): .join(AlertToIncident, Incident.id == AlertToIncident.incident_id) .filter( AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, - AlertToIncident.timestamp >= seven_days_ago + AlertToIncident.timestamp >= seven_days_ago, ) .filter(Rule.tenant_id == tenant_id) # Filter by tenant_id .group_by( @@ -2092,12 +2100,47 @@ def get_linked_providers(tenant_id: str) -> List[Tuple[str, str, datetime]]: return providers -def get_provider_distribution(tenant_id: str) -> dict: - """Returns hits per hour and the last alert timestamp for each provider, limited to the last 24 hours.""" +def get_provider_distribution( + tenant_id: str, + aggregate_all: bool = False, + timestamp_filter: TimeStampFilter = None, +) -> ( + list[dict[str, int | Any]] + | dict[str, dict[str, datetime | list[dict[str, int]] | Any]] +): + """ + Calculate the distribution of incidents created over time for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose incidents are being queried. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly distribution of incidents. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'number' (int): Number of incidents created in that hour. + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. + """ with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H" + filters = [Alert.tenant_id == tenant_id] + + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append(Alert.timestamp >= timestamp_filter.lower_timestamp) + if timestamp_filter.upper_timestamp: + filters.append(Alert.timestamp <= timestamp_filter.upper_timestamp) + else: + filters.append(Alert.timestamp >= twenty_four_hours_ago) + if session.bind.dialect.name == "mysql": timestamp_format = func.date_format(Alert.timestamp, time_format) elif session.bind.dialect.name == "postgresql": @@ -2107,62 +2150,339 @@ def get_provider_distribution(tenant_id: str) -> dict: elif session.bind.dialect.name == "sqlite": timestamp_format = func.strftime(time_format, Alert.timestamp) - # Adjusted query to include max timestamp + if aggregate_all: + # Query for combined alert distribution across all providers + query = ( + session.query( + timestamp_format.label("time"), func.count().label("hits") + ) + .filter(*filters) + .group_by("time") + .order_by("time") + ) + + results = query.all() + + results = {str(time): hits for time, hits in results} + + # Create a complete list of timestamps within the specified range + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + distribution.append( + { + "timestamp": timestamp_str + ":00", + "number": results.get(timestamp_str, 0), + } + ) + current_time += timedelta(hours=1) + return distribution + + else: + # Query for alert distribution grouped by provider + query = ( + session.query( + Alert.provider_id, + Alert.provider_type, + timestamp_format.label("time"), + func.count().label("hits"), + func.max(Alert.timestamp).label("last_alert_timestamp"), + ) + .filter(*filters) + .group_by(Alert.provider_id, Alert.provider_type, "time") + .order_by(Alert.provider_id, Alert.provider_type, "time") + ) + + results = query.all() + + provider_distribution = {} + + for provider_id, provider_type, time, hits, last_alert_timestamp in results: + provider_key = f"{provider_id}_{provider_type}" + last_alert_timestamp = ( + datetime.fromisoformat(last_alert_timestamp) + if isinstance(last_alert_timestamp, str) + else last_alert_timestamp + ) + + if provider_key not in provider_distribution: + provider_distribution[provider_key] = { + "provider_id": provider_id, + "provider_type": provider_type, + "alert_last_24_hours": [ + {"hour": i, "number": 0} for i in range(24) + ], + "last_alert_received": last_alert_timestamp, + } + else: + provider_distribution[provider_key]["last_alert_received"] = max( + provider_distribution[provider_key]["last_alert_received"], + last_alert_timestamp, + ) + + time = datetime.strptime(time, time_format) + index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + + if 0 <= index < 24: + provider_distribution[provider_key]["alert_last_24_hours"][index][ + "number" + ] += hits + + return provider_distribution + + +def get_combined_workflow_execution_distribution( + tenant_id: str, timestamp_filter: TimeStampFilter = None +): + """ + Calculate the distribution of WorkflowExecutions started over time, combined across all workflows for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose workflow executions are being analyzed. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly distribution of workflow executions. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'number' (int): Number of workflow executions started in that hour. + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. + """ + with Session(engine) as session: + twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) + time_format = "%Y-%m-%d %H" + + filters = [WorkflowExecution.tenant_id == tenant_id] + + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append( + WorkflowExecution.started >= timestamp_filter.lower_timestamp + ) + if timestamp_filter.upper_timestamp: + filters.append( + WorkflowExecution.started <= timestamp_filter.upper_timestamp + ) + else: + filters.append(WorkflowExecution.started >= twenty_four_hours_ago) + + # Database-specific timestamp formatting + if session.bind.dialect.name == "mysql": + timestamp_format = func.date_format(WorkflowExecution.started, time_format) + elif session.bind.dialect.name == "postgresql": + timestamp_format = func.to_char(WorkflowExecution.started, "YYYY-MM-DD HH") + elif session.bind.dialect.name == "sqlite": + timestamp_format = func.strftime(time_format, WorkflowExecution.started) + + # Query for combined execution count across all workflows query = ( session.query( - Alert.provider_id, - Alert.provider_type, timestamp_format.label("time"), - func.count().label("hits"), - func.max(Alert.timestamp).label( - "last_alert_timestamp" - ), # Include max timestamp - ) - .filter( - Alert.tenant_id == tenant_id, - Alert.timestamp >= twenty_four_hours_ago, + func.count().label("executions"), ) - .group_by(Alert.provider_id, Alert.provider_type, "time") - .order_by(Alert.provider_id, Alert.provider_type, "time") + .filter(*filters) + .group_by("time") + .order_by("time") ) - results = query.all() + results = {str(time): executions for time, executions in query.all()} + + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + distribution.append( + { + "timestamp": timestamp_str + ":00", + "number": results.get(timestamp_str, 0), + } + ) + current_time += timedelta(hours=1) + + return distribution + + +def get_incidents_created_distribution( + tenant_id: str, timestamp_filter: TimeStampFilter = None +): + """ + Calculate the distribution of incidents created over time for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose incidents are being queried. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly distribution of incidents. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'number' (int): Number of incidents created in that hour. + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. + """ + with Session(engine) as session: + twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) + time_format = "%Y-%m-%d %H" + + filters = [Incident.tenant_id == tenant_id] - provider_distribution = {} + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append( + Incident.creation_time >= timestamp_filter.lower_timestamp + ) + if timestamp_filter.upper_timestamp: + filters.append( + Incident.creation_time <= timestamp_filter.upper_timestamp + ) + else: + filters.append(Incident.creation_time >= twenty_four_hours_ago) + + # Database-specific timestamp formatting + if session.bind.dialect.name == "mysql": + timestamp_format = func.date_format(Incident.creation_time, time_format) + elif session.bind.dialect.name == "postgresql": + timestamp_format = func.to_char(Incident.creation_time, "YYYY-MM-DD HH") + elif session.bind.dialect.name == "sqlite": + timestamp_format = func.strftime(time_format, Incident.creation_time) - for provider_id, provider_type, time, hits, last_alert_timestamp in results: - provider_key = f"{provider_id}_{provider_type}" - last_alert_timestamp = ( - datetime.fromisoformat(last_alert_timestamp) - if isinstance(last_alert_timestamp, str) - else last_alert_timestamp + query = ( + session.query( + timestamp_format.label("time"), func.count().label("incidents") ) + .filter(*filters) + .group_by("time") + .order_by("time") + ) - if provider_key not in provider_distribution: - provider_distribution[provider_key] = { - "provider_id": provider_id, - "provider_type": provider_type, - "alert_last_24_hours": [ - {"hour": i, "number": 0} for i in range(24) - ], - "last_alert_received": last_alert_timestamp, # Initialize with the first seen timestamp + results = {str(time): incidents for time, incidents in query.all()} + + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + distribution.append( + { + "timestamp": timestamp_str + ":00", + "number": results.get(timestamp_str, 0), } - else: - # Update the last alert timestamp if the current one is more recent - provider_distribution[provider_key]["last_alert_received"] = max( - provider_distribution[provider_key]["last_alert_received"], - last_alert_timestamp, + ) + current_time += timedelta(hours=1) + + return distribution + + +def calc_incidents_mttr(tenant_id: str, timestamp_filter: TimeStampFilter = None): + """ + Calculate the Mean Time to Resolve (MTTR) for incidents over time for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose incidents are being analyzed. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly MTTR of incidents. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'mttr' (float): Mean Time to Resolve incidents in that hour (in hours). + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Only includes resolved incidents. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. + """ + with Session(engine) as session: + twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) + time_format = "%Y-%m-%d %H" + + filters = [ + Incident.tenant_id == tenant_id, + Incident.status == IncidentStatus.RESOLVED.value, + ] + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append( + Incident.creation_time >= timestamp_filter.lower_timestamp + ) + if timestamp_filter.upper_timestamp: + filters.append( + Incident.creation_time <= timestamp_filter.upper_timestamp ) + else: + filters.append(Incident.creation_time >= twenty_four_hours_ago) - time = datetime.strptime(time, time_format) - index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + # Database-specific timestamp formatting + if session.bind.dialect.name == "mysql": + timestamp_format = func.date_format(Incident.creation_time, time_format) + elif session.bind.dialect.name == "postgresql": + timestamp_format = func.to_char(Incident.creation_time, "YYYY-MM-DD HH") + elif session.bind.dialect.name == "sqlite": + timestamp_format = func.strftime(time_format, Incident.creation_time) - if 0 <= index < 24: - provider_distribution[provider_key]["alert_last_24_hours"][index][ - "number" - ] += hits + query = ( + session.query( + timestamp_format.label("time"), + Incident.start_time, + Incident.end_time, + func.count().label("incidents"), + ) + .filter(*filters) + .group_by("time", Incident.start_time, Incident.end_time) + .order_by("time") + ) + results = {} + for time, start_time, end_time, incidents in query.all(): + if start_time and end_time: + resolution_time = ( + end_time - start_time + ).total_seconds() / 3600 # in hours + time_str = str(time) + if time_str not in results: + results[time_str] = {"number": 0, "mttr": 0} + + results[time_str]["number"] += incidents + results[time_str]["mttr"] += resolution_time * incidents + + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + if timestamp_str in results and results[timestamp_str]["number"] > 0: + avg_mttr = ( + results[timestamp_str]["mttr"] / results[timestamp_str]["number"] + ) + else: + avg_mttr = 0 - return provider_distribution + distribution.append( + { + "timestamp": timestamp_str + ":00", + "mttr": avg_mttr, + } + ) + current_time += timedelta(hours=1) + + return distribution def get_presets( @@ -2901,11 +3221,14 @@ def get_incident_alerts_and_links_by_incident_id( return query.all(), total_count + def get_incident_alerts_by_incident_id(*args, **kwargs) -> tuple[List[Alert], int]: """ Unpacking (List[(Alert, AlertToIncident)], int) to (List[Alert], int). """ - alerts_and_links, total_alerts = get_incident_alerts_and_links_by_incident_id(*args, **kwargs) + alerts_and_links, total_alerts = get_incident_alerts_and_links_by_incident_id( + *args, **kwargs + ) alerts = [alert_and_link[0] for alert_and_link in alerts_and_links] return alerts, total_alerts @@ -2931,21 +3254,20 @@ def get_future_incidents_by_incident_id( def get_all_same_alert_ids( - tenant_id: str, - alert_ids: List[str | UUID], - session: Optional[Session] = None + tenant_id: str, alert_ids: List[str | UUID], session: Optional[Session] = None ): with existed_or_new_session(session) as session: - fingerprints_subquery = session.query(Alert.fingerprint).where( - Alert.tenant_id == tenant_id, - col(Alert.id).in_(alert_ids) - ).subquery() + fingerprints_subquery = ( + session.query(Alert.fingerprint) + .where(Alert.tenant_id == tenant_id, col(Alert.id).in_(alert_ids)) + .subquery() + ) query = session.scalars( - select(Alert.id) - .where( - Alert.tenant_id == tenant_id, - col(Alert.fingerprint).in_(fingerprints_subquery) - )) + select(Alert.id).where( + Alert.tenant_id == tenant_id, + col(Alert.fingerprint).in_(fingerprints_subquery), + ) + ) return query.all() @@ -3023,7 +3345,9 @@ def add_alerts_to_incident_by_incident_id( if not incident: return None - return add_alerts_to_incident(tenant_id, incident, alert_ids, is_created_by_ai, session) + return add_alerts_to_incident( + tenant_id, incident, alert_ids, is_created_by_ai, session + ) def add_alerts_to_incident( @@ -3067,7 +3391,9 @@ def add_alerts_to_incident( ) new_alert_ids = [ - alert_id for alert_id in all_alert_ids if alert_id not in existing_alert_ids + alert_id + for alert_id in all_alert_ids + if alert_id not in existing_alert_ids ] if not new_alert_ids: @@ -3076,10 +3402,12 @@ def add_alerts_to_incident( alerts_data_for_incident = get_alerts_data_for_incident(new_alert_ids, existing_fingerprints, session) incident.sources = list( - set(incident.sources if incident.sources else []) | set(alerts_data_for_incident["sources"]) + set(incident.sources if incident.sources else []) + | set(alerts_data_for_incident["sources"]) ) incident.affected_services = list( - set(incident.affected_services if incident.affected_services else []) | set(alerts_data_for_incident["services"]) + set(incident.affected_services if incident.affected_services else []) + | set(alerts_data_for_incident["services"]) ) # If incident has alerts already, use the max severity between existing and new alerts, otherwise use the new alerts max severity incident.severity = max(incident.severity, alerts_data_for_incident["max_severity"].order) if incident.alerts_count else alerts_data_for_incident["max_severity"].order @@ -3087,7 +3415,10 @@ def add_alerts_to_incident( alert_to_incident_entries = [ AlertToIncident( - alert_id=alert_id, incident_id=incident.id, tenant_id=tenant_id, is_created_by_ai=is_created_by_ai + alert_id=alert_id, + incident_id=incident.id, + tenant_id=tenant_id, + is_created_by_ai=is_created_by_ai, ) for alert_id in new_alert_ids ] @@ -3187,9 +3518,12 @@ def remove_alerts_to_incident_by_incident_id( AlertToIncident.tenant_id == tenant_id, AlertToIncident.incident_id == incident.id, col(AlertToIncident.alert_id).in_(all_alert_ids), - ).update({ - "deleted_at": datetime.now(datetime.now().astimezone().tzinfo), - }) + ) + .update( + { + "deleted_at": datetime.now(datetime.now().astimezone().tzinfo), + } + ) ) session.commit() @@ -3639,7 +3973,10 @@ def get_alerts_fields(tenant_id: str) -> List[AlertField]: def change_incident_status_by_id( - tenant_id: str, incident_id: UUID | str, status: IncidentStatus + tenant_id: str, + incident_id: UUID | str, + status: IncidentStatus, + end_time: datetime | None = None, ) -> bool: with Session(engine) as session: stmt = ( @@ -3648,7 +3985,10 @@ def change_incident_status_by_id( Incident.tenant_id == tenant_id, Incident.id == incident_id, ) - .values(status=status.value) + .values( + status=status.value, + end_time=end_time, + ) ) updated = session.execute(stmt) session.commit() @@ -3701,7 +4041,7 @@ def get_workflow_executions_for_incident_or_alert( .join(AlertToIncident, Alert.id == AlertToIncident.alert_id) .where( AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, - AlertToIncident.incident_id == incident_id + AlertToIncident.incident_id == incident_id, ) ) @@ -3811,37 +4151,35 @@ def is_edge_incident_alert_resolved( AlertEnrichment, Alert.fingerprint == AlertEnrichment.alert_fingerprint ) .join(AlertToIncident, AlertToIncident.alert_id == Alert.id) - .where( - AlertToIncident.incident_id == incident.id - ) + .where(AlertToIncident.incident_id == incident.id) .group_by(Alert.fingerprint) .having(func.max(Alert.timestamp)) .order_by(direction(Alert.timestamp)) ).first() - - return ( - enriched_status == AlertStatus.RESOLVED.value or - (enriched_status is None and status == AlertStatus.RESOLVED.value) + + return enriched_status == AlertStatus.RESOLVED.value or ( + enriched_status is None and status == AlertStatus.RESOLVED.value ) + def get_alerts_metrics_by_provider( tenant_id: str, - start_date: Optional[datetime] = None, + start_date: Optional[datetime] = None, end_date: Optional[datetime] = None, - fields: Optional[List[str]] = [] + fields: Optional[List[str]] = [], ) -> Dict[str, Dict[str, Any]]: - + dynamic_field_sums = [ func.sum( case( [ ( - func.json_extract(Alert.event, f'$.{field}').isnot(None) & - (func.json_extract(Alert.event, f'$.{field}') != False), - 1 + func.json_extract(Alert.event, f"$.{field}").isnot(None) + & (func.json_extract(Alert.event, f"$.{field}") != False), + 1, ) - ], - else_=0 + ], + else_=0, ) ).label(f"{field}_count") for field in fields @@ -3853,8 +4191,10 @@ def get_alerts_metrics_by_provider( Alert.provider_type, Alert.provider_id, func.count(Alert.id).label("total_alerts"), - func.sum(case([(AlertToIncident.alert_id.isnot(None), 1)], else_=0)).label("correlated_alerts"), - *dynamic_field_sums + func.sum( + case([(AlertToIncident.alert_id.isnot(None), 1)], else_=0) + ).label("correlated_alerts"), + *dynamic_field_sums, ) .outerjoin(AlertToIncident, Alert.id == AlertToIncident.alert_id) .filter( @@ -3865,18 +4205,19 @@ def get_alerts_metrics_by_provider( # Add timestamp filter only if both start_date and end_date are provided if start_date and end_date: query = query.filter( - Alert.timestamp >= start_date, - Alert.timestamp <= end_date + Alert.timestamp >= start_date, Alert.timestamp <= end_date ) results = query.group_by(Alert.provider_id, Alert.provider_type).all() - + return { f"{row.provider_id}_{row.provider_type}": { "total_alerts": row.total_alerts, "correlated_alerts": row.correlated_alerts, "provider_type": row.provider_type, - **{f"{field}_count": getattr(row, f"{field}_count") for field in fields} # Add field-specific counts + **{ + f"{field}_count": getattr(row, f"{field}_count") for field in fields + }, # Add field-specific counts } for row in results } diff --git a/keep/api/models/time_stamp.py b/keep/api/models/time_stamp.py index 67c910538..afe9b0c13 100644 --- a/keep/api/models/time_stamp.py +++ b/keep/api/models/time_stamp.py @@ -1,10 +1,27 @@ +import json from typing import Optional + +from fastapi import Query, HTTPException from pydantic import BaseModel, Field from datetime import datetime + + class TimeStampFilter(BaseModel): - lower_timestamp: Optional[datetime] = Field(None, alias='start') - upper_timestamp: Optional[datetime] = Field(None, alias='end') + lower_timestamp: Optional[datetime] = Field(None, alias="start") + upper_timestamp: Optional[datetime] = Field(None, alias="end") class Config: allow_population_by_field_name = True - \ No newline at end of file + + +# Function to handle the time_stamp query parameter and parse it +def _get_time_stamp_filter(time_stamp: Optional[str] = Query(None)) -> TimeStampFilter: + if time_stamp: + try: + # Parse the JSON string + time_stamp_dict = json.loads(time_stamp) + # Return the TimeStampFilter object, Pydantic will map 'from' -> lower_timestamp and 'to' -> upper_timestamp + return TimeStampFilter(**time_stamp_dict) + except (json.JSONDecodeError, TypeError): + raise HTTPException(status_code=400, detail="Invalid time_stamp format") + return TimeStampFilter() diff --git a/keep/api/routes/dashboard.py b/keep/api/routes/dashboard.py index 32e437947..cfcb61d4a 100644 --- a/keep/api/routes/dashboard.py +++ b/keep/api/routes/dashboard.py @@ -1,16 +1,23 @@ import json import logging import os -from datetime import datetime +from datetime import datetime, timedelta from typing import Dict, List, Optional from fastapi import APIRouter, Depends, HTTPException from pydantic import BaseModel -from keep.api.core.db import create_dashboard as create_dashboard_db +from keep.api.core.db import ( + create_dashboard as create_dashboard_db, + get_provider_distribution, + get_incidents_created_distribution, + get_combined_workflow_execution_distribution, + calc_incidents_mttr, +) from keep.api.core.db import delete_dashboard as delete_dashboard_db from keep.api.core.db import get_dashboards as get_dashboards_db from keep.api.core.db import update_dashboard as update_dashboard_db +from keep.api.models.time_stamp import TimeStampFilter, _get_time_stamp_filter from keep.identitymanager.authenticatedentity import AuthenticatedEntity from keep.identitymanager.identitymanagerfactory import IdentityManagerFactory @@ -136,3 +143,40 @@ def delete_dashboard( if not dashboard: raise HTTPException(status_code=404, detail="Dashboard not found") return {"ok": True} + + +@router.get("/metric-widgets") +def get_metric_widgets( + time_stamp: TimeStampFilter = Depends(_get_time_stamp_filter), + mttr: bool = True, + apd: bool = True, + ipd: bool = True, + wpd: bool = True, + authenticated_entity: AuthenticatedEntity = Depends( + IdentityManagerFactory.get_auth_verifier(["read:dashboards"]) + ), +): + data = {} + tenant_id = authenticated_entity.tenant_id + if not time_stamp.lower_timestamp or not time_stamp.upper_timestamp: + time_stamp = TimeStampFilter( + upper_timestamp=datetime.utcnow(), + lower_timestamp=datetime.utcnow() - timedelta(hours=24), + ) + if apd: + data["apd"] = get_provider_distribution( + tenant_id=tenant_id, aggregate_all=True, timestamp_filter=time_stamp + ) + if ipd: + data["ipd"] = get_incidents_created_distribution( + tenant_id=tenant_id, timestamp_filter=time_stamp + ) + if wpd: + data["wpd"] = get_combined_workflow_execution_distribution( + tenant_id=tenant_id, timestamp_filter=time_stamp + ) + if mttr: + data["mttr"] = calc_incidents_mttr( + tenant_id=tenant_id, timestamp_filter=time_stamp + ) + return data diff --git a/keep/api/routes/incidents.py b/keep/api/routes/incidents.py index ed87d105a..f46c855c2 100644 --- a/keep/api/routes/incidents.py +++ b/keep/api/routes/incidents.py @@ -569,7 +569,9 @@ async def add_alerts_to_incident( if not incident: raise HTTPException(status_code=404, detail="Incident not found") - add_alerts_to_incident_by_incident_id(tenant_id, incident_id, alert_ids, is_created_by_ai) + add_alerts_to_incident_by_incident_id( + tenant_id, incident_id, alert_ids, is_created_by_ai + ) try: logger.info("Pushing enriched alert to elasticsearch") elastic_client = ElasticClient(tenant_id) @@ -762,7 +764,10 @@ def change_incident_status( # We need to do something only if status really changed if not change.status == incident.status: - result = change_incident_status_by_id(tenant_id, incident_id, change.status) + end_time = datetime.utcnow() if change.status == IncidentStatus.RESOLVED else None + result = change_incident_status_by_id( + tenant_id, incident_id, change.status, end_time + ) if not result: raise HTTPException( status_code=500, detail="Error changing incident status" @@ -778,7 +783,7 @@ def change_incident_status( ), authenticated_entity=authenticated_entity, ) - + incident.end_time = end_time incident.status = change.status new_incident_dto = IncidentDto.from_db_incident(incident) diff --git a/keep/api/routes/preset.py b/keep/api/routes/preset.py index cfa701681..a4f87cc08 100644 --- a/keep/api/routes/preset.py +++ b/keep/api/routes/preset.py @@ -1,16 +1,13 @@ -import json import logging import os import uuid from datetime import datetime -from typing import Optional from fastapi import ( APIRouter, BackgroundTasks, Depends, HTTPException, - Query, Request, Response, ) @@ -34,7 +31,7 @@ Tag, TagDto, ) -from keep.api.models.time_stamp import TimeStampFilter +from keep.api.models.time_stamp import TimeStampFilter, _get_time_stamp_filter from keep.api.tasks.process_event_task import process_event from keep.api.tasks.process_topology_task import process_topology from keep.contextmanager.contextmanager import ContextManager @@ -177,19 +174,6 @@ def pull_data_from_providers( ) -# Function to handle the time_stamp query parameter and parse it -def _get_time_stamp_filter(time_stamp: Optional[str] = Query(None)) -> TimeStampFilter: - if time_stamp: - try: - # Parse the JSON string - time_stamp_dict = json.loads(time_stamp) - # Return the TimeStampFilter object, Pydantic will map 'from' -> lower_timestamp and 'to' -> upper_timestamp - return TimeStampFilter(**time_stamp_dict) - except (json.JSONDecodeError, TypeError): - raise HTTPException(status_code=400, detail="Invalid time_stamp format") - return TimeStampFilter() - - @router.get( "", description="Get all presets for tenant",