Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat/7077 | Related metrics api #7149

Open
wants to merge 36 commits into
base: feat/7080
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
f6ac729
feat(summary view): added metric details apis | 7082
aniketio-ctrl Feb 11, 2025
9385029
feat(summary view): added metric details apis | 7082
aniketio-ctrl Feb 11, 2025
0ca886e
feat(summary view): added list metric details apis | 7084
aniketio-ctrl Feb 11, 2025
bab1399
feat(summary view): added tree map metric details apis | 7087
aniketio-ctrl Feb 11, 2025
9676b7e
feat(summary view): added list metric details apis | 7084
aniketio-ctrl Feb 12, 2025
969d6b0
feat(summary): updated contribution queries for metric metadata| 7082
aniketio-ctrl Feb 13, 2025
2a53b95
Merge branch 'feat/7080' of github.com:SigNoz/signoz into feat/7082
aniketio-ctrl Feb 13, 2025
51794ba
Merge branch 'feat/7082' of github.com:SigNoz/signoz into feat/7084
aniketio-ctrl Feb 13, 2025
d4bdcb1
feat(summary): added which table to use functions| 7084
aniketio-ctrl Feb 13, 2025
03d9c62
feat(summary): added clickhouse settings| 7084
aniketio-ctrl Feb 16, 2025
bba49c1
Merge branch 'feat/7084' of github.com:SigNoz/signoz into feat/7087
aniketio-ctrl Feb 16, 2025
3e43a96
feat(summary): added clickhouse settings| 7087
aniketio-ctrl Feb 16, 2025
7394c06
feat(summary): added clickhouse queries| 7082
aniketio-ctrl Feb 16, 2025
43a3122
feat(explorer): added clickhouse queries| 7077
aniketio-ctrl Feb 16, 2025
8149bb5
feat(explorer): added clickhouse queries| 7077
aniketio-ctrl Feb 17, 2025
d83daa6
feat(summary): removed cardinality from metadata | 7082
aniketio-ctrl Feb 17, 2025
a41d413
Merge branch 'feat/7082' of github.com:SigNoz/signoz into feat/7084
aniketio-ctrl Feb 17, 2025
08b9e9b
feat(summary): updated list metrics api into two parts| 7084
aniketio-ctrl Feb 17, 2025
4590195
feat(summary): added default values for list api| 7084
aniketio-ctrl Feb 17, 2025
e7269bb
Merge branch 'feat/7084' of github.com:SigNoz/signoz into feat/7087
aniketio-ctrl Feb 17, 2025
ea4c7ac
feat(summary): updated tree map samples query into two parts| 7087
aniketio-ctrl Feb 18, 2025
c1f86b1
feat(summary): updated tree map samples query into two parts| 7087
aniketio-ctrl Feb 18, 2025
bc61850
Merge branch 'feat/7087' of github.com:SigNoz/signoz into feat/7077_1
aniketio-ctrl Feb 18, 2025
c5459f3
feat(explorer): updated related metrics query| 7077
aniketio-ctrl Feb 18, 2025
e2ccc5c
feat(explorer): added clickhouse max threads settings| 7077
aniketio-ctrl Feb 18, 2025
4809dc0
Merge branch 'feat/7080' of github.com:SigNoz/signoz into feat/7077_1
aniketio-ctrl Feb 18, 2025
d91e7c1
feat(explorer): added clickhouse max threads settings| 7077
aniketio-ctrl Feb 18, 2025
843d9e1
feat(explorer): added clickhouse max threads settings| 7077
aniketio-ctrl Feb 18, 2025
baebb13
feat(explorer): added clickhouse max threads settings| 7077
aniketio-ctrl Feb 18, 2025
a70cf32
feat(explorer): added query range with related metrics api| 7077
aniketio-ctrl Feb 19, 2025
211b0b9
feat(explorer): added distributed ts table and query builder| 7077
aniketio-ctrl Feb 20, 2025
c2bb82f
Merge branch 'feat/7077_1' of github.com:SigNoz/signoz into feat/7077_1
aniketio-ctrl Feb 20, 2025
e87753a
Merge branch 'feat/7080' into feat/7077_1
aniketio-ctrl Feb 20, 2025
bded71e
feat(explorer): improved clickhouse queries
aniketio-ctrl Feb 22, 2025
f094fb3
Merge branch 'feat/7077_1' of github.com:SigNoz/signoz into feat/7077_1
aniketio-ctrl Feb 22, 2025
ead25be
feat(explorer): improved clickhouse queries
aniketio-ctrl Feb 22, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
feat(explorer): added distributed ts table and query builder| 7077
  • Loading branch information
aniketio-ctrl committed Feb 20, 2025
commit 211b0b9259514bdd129323fe79decb27c7a3b6cf
142 changes: 112 additions & 30 deletions pkg/query-service/app/clickhouseReader/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -6141,21 +6141,25 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
return &heatmap, nil
}

func (r *ClickHouseReader) GetRelatedMetrics(ctx context.Context, target string, start, end int64) (map[string]metrics_explorer.RelatedMetricsScore, *model.ApiError) {
func (r *ClickHouseReader) GetRelatedMetrics(ctx context.Context, req *metrics_explorer.RelatedMetricsRequest) (map[string]metrics_explorer.RelatedMetricsScore, *model.ApiError) {
// First query: Compute name similarity and get candidate metric names.
start, end, tsTable, _ := utils.WhichTSTableToUse(req.Start, req.End)
query := fmt.Sprintf(`
SELECT
metric_name,
any(type) as type,
any(temporality) as temporality,
any(is_monotonic) as monotonic,
1 - (levenshteinDistance(?, metric_name) / greatest(NULLIF(length(?), 0), NULLIF(length(metric_name), 0))) AS name_similarity
FROM %s.%s
WHERE metric_name != ?
AND unix_milli BETWEEN ? AND ?
GROUP BY metric_name
ORDER BY name_similarity DESC
LIMIT 30;
`, signozMetricDBName, signozTSTableNameV41Week)
`, signozMetricDBName, tsTable)

rows, err := r.db.Query(ctx, query, target, target, target, start, end)
rows, err := r.db.Query(ctx, query, req.CurrentMetricName, req.CurrentMetricName, req.CurrentMetricName, start, end)
if err != nil {
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
Expand All @@ -6166,11 +6170,17 @@ func (r *ClickHouseReader) GetRelatedMetrics(ctx context.Context, target string,
for rows.Next() {
var metric string
var sim float64
if err := rows.Scan(&metric, &sim); err != nil {
var metricType v3.MetricType
var temporality v3.Temporality
var isMonotonic bool
if err := rows.Scan(&metric, &metricType, &temporality, &isMonotonic, &sim); err != nil {
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
result[metric] = metrics_explorer.RelatedMetricsScore{
NameSimilarity: sim,
MetricType: metricType,
Temporality: temporality,
IsMonotonic: isMonotonic,
}
metricNames = append(metricNames, metric)
}
Expand All @@ -6179,55 +6189,104 @@ func (r *ClickHouseReader) GetRelatedMetrics(ctx context.Context, target string,
return result, nil
}

// --- STEP 1: Get the extracted labels for the target metric ---
extractedLabelsQuery := fmt.Sprintf(`
SELECT
kv.1 AS label_key,
kv.2 AS label_value
FROM %s.%s
ARRAY JOIN JSONExtractKeysAndValuesRaw(labels) AS kv
WHERE metric_name = ?
AND unix_milli BETWEEN ? AND ?
AND NOT startsWith(kv.1, '__')
`, signozMetricDBName, signozTSTableNameV41Week)
SELECT
kv.1 AS label_key,
arraySlice(
arrayDistinct(
groupArray(replaceRegexpAll(kv.2, '^"(.*)"$', '\\1'))
),
1,
10
) AS label_values
FROM %s.%s
ARRAY JOIN JSONExtractKeysAndValuesRaw(labels) AS kv
WHERE metric_name = ?
AND NOT startsWith(kv.1, '__')
AND unix_milli between ? and ?
GROUP BY label_key
LIMIT 50
`, signozMetricDBName, tsTable)

var targetKeys []string
var targetValues []string
rows, err = r.db.Query(ctx, extractedLabelsQuery, target, start, end)
rows, err = r.db.Query(ctx, extractedLabelsQuery, req.CurrentMetricName, start, end)
if err != nil {
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
defer rows.Close()
for rows.Next() {
var key, value string
var key string
var value []string
if err := rows.Scan(&key, &value); err != nil {
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
targetKeys = append(targetKeys, key)
targetValues = append(targetValues, value)
targetValues = append(targetValues, value...)
}

targetKeysList := "'" + strings.Join(targetKeys, "', '") + "'"
targetValuesList := "'" + strings.Join(targetValues, "', '") + "'"

// --- STEP 2: Get labels for candidate metrics ---
var priorityList []string
for _, f := range req.Filters.Items {
if f.Operator == v3.FilterOperatorEqual { //currently only supporting for equal
priorityList = append(priorityList, fmt.Sprintf("tuple('%s', '%s')", f.Key.Key, f.Value))
}
}
priorityListString := strings.Join(priorityList, ", ")

//Get labels for candidate metrics ---
// we use rand() %%100< 10 to sample 10 percent
candidateLabelsQuery := fmt.Sprintf(`
WITH
WITH
arrayDistinct([%s]) AS filter_keys,
arrayDistinct([%s]) AS filter_values
arrayDistinct([%s]) AS filter_values,
[%s] AS priority_pairs_input,
%d AS priority_multiplier
SELECT
metric_name,
SUM(arrayExists(kv ->
has(filter_keys, kv.1) AND has(filter_values, kv.2),
JSONExtractKeysAndValues(labels, 'String')
))::UInt64 AS total_matches
any(type) as type,
any(temporality) as temporality,
any(is_monotonic) as monotonic,
SUM(
arraySum(
kv -> if(has(filter_keys, kv.1) AND has(filter_values, kv.2), 1, 0),
JSONExtractKeysAndValues(labels, 'String')
)
)::UInt64 AS raw_match_count,
SUM(
arraySum(
kv ->
if(
arrayExists(pr -> pr.1 = kv.1 AND pr.2 = kv.2, priority_pairs_input),
priority_multiplier,
0
),
JSONExtractKeysAndValues(labels, 'String')
)
)::UInt64 AS weighted_match_count,
toJSONString(
arrayDistinct(
arrayFlatten(
groupArray(
arrayFilter(
kv -> arrayExists(pr -> pr.1 = kv.1 AND pr.2 = kv.2, priority_pairs_input),
JSONExtractKeysAndValues(labels, 'String')
)
)
)
)
) AS priority_pairs

FROM %s.%s
WHERE rand() %% 100 < 10
AND unix_milli BETWEEN ? AND ?
AND unix_milli between ? and ?
GROUP BY metric_name
ORDER BY total_matches DESC
ORDER BY weighted_match_count DESC, raw_match_count DESC
LIMIT 30
`, targetKeysList, targetValuesList, signozMetricDBName, signozTSTableNameV41Week)
`, targetKeysList, targetValuesList, priorityListString, 2,
signozMetricDBName, tsTable)

rows, err = r.db.Query(ctx, candidateLabelsQuery, start, end)
if err != nil {
Expand All @@ -6238,12 +6297,33 @@ LIMIT 30
attributeMap := make(map[string]uint64)
for rows.Next() {
var metric string
var matches uint64
if err := rows.Scan(&metric, &matches); err != nil {
var metricType v3.MetricType
var temporality v3.Temporality
var isMonotonic bool
var weightedMatchCount, rawMatchCount uint64
var priorityPairsJSON string // Scan into a string

if err := rows.Scan(&metric, &metricType, &temporality, &isMonotonic, &rawMatchCount, &weightedMatchCount, &priorityPairsJSON); err != nil {
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
attributeMap[metric] = weightedMatchCount + (rawMatchCount)/10
var priorityPairs [][]string
if err := json.Unmarshal([]byte(priorityPairsJSON), &priorityPairs); err != nil {
priorityPairs = [][]string{}
}
if _, ok := result[metric]; ok {
attributeMap[metric] = matches
result[metric] = metrics_explorer.RelatedMetricsScore{
NameSimilarity: result[metric].NameSimilarity,
Filters: priorityPairs,
MetricType: metricType,
Temporality: temporality,
IsMonotonic: isMonotonic,
}
} else {
result[metric] = metrics_explorer.RelatedMetricsScore{
Filters: priorityPairs,
MetricType: metricType,
}
}
}

Expand All @@ -6255,6 +6335,8 @@ LIMIT 30
result[metric] = metrics_explorer.RelatedMetricsScore{
NameSimilarity: result[metric].NameSimilarity,
AttributeSimilarity: data,
Filters: result[metric].Filters,
MetricType: result[metric].MetricType,
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/query-service/app/http_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -629,7 +629,7 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *AuthMiddlewar
Methods(http.MethodPost)
router.HandleFunc("/api/v1/metrics/related",
am.ViewAccess(ah.GetRelatedMetrics)).
Methods(http.MethodGet)
Methods(http.MethodPost)
}

func Intersection(a, b []int) (c []int) {
Expand Down
13 changes: 2 additions & 11 deletions pkg/query-service/app/metricsexplorer/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,8 @@ func ParseTreeMapMetricsParams(r *http.Request) (*metrics_explorer.TreeMapMetric

func ParseRelatedMetricsParams(r *http.Request) (*metrics_explorer.RelatedMetricsRequest, *model.ApiError) {
var relatedMetricParams metrics_explorer.RelatedMetricsRequest
relatedMetricParams.CurrentMetricName = r.URL.Query().Get("currentMetricName")
start, err := strconv.ParseInt(r.URL.Query().Get("start"), 10, 64)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("start param should be of type int64: %v", err)}
}
end, err := strconv.ParseInt(r.URL.Query().Get("end"), 10, 64)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("end param should be of type int64: %v", err)}
if err := json.NewDecoder(r.Body).Decode(&relatedMetricParams); err != nil {
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("cannot parse the request body: %v", err)}
}
relatedMetricParams.Start = start
relatedMetricParams.End = end

return &relatedMetricParams, nil
}
67 changes: 63 additions & 4 deletions pkg/query-service/app/metricsexplorer/summary.go
Original file line number Diff line number Diff line change
Expand Up @@ -212,16 +212,14 @@ func (receiver *SummaryService) GetMetricsTreemap(ctx context.Context, params *m
func (receiver *SummaryService) GetRelatedMetrics(ctx context.Context, params *metrics_explorer.RelatedMetricsRequest) (*metrics_explorer.RelatedMetricsResponse, *model.ApiError) {
var relatedMetricsResponse metrics_explorer.RelatedMetricsResponse

relatedMetricsMap, err := receiver.reader.GetRelatedMetrics(ctx, params.CurrentMetricName, params.Start, params.End)
relatedMetricsMap, err := receiver.reader.GetRelatedMetrics(ctx, params)
if err != nil {
return nil, &model.ApiError{Typ: "Error", Err: err}
}

// Combine the name and attribute similarity scores using weights.
// Here we're using 0.5 for each, but adjust as needed.
finalScores := make(map[string]float64)
for metric, scores := range relatedMetricsMap {
finalScores[metric] = scores.NameSimilarity*0.5 + scores.AttributeSimilarity*0.5
finalScores[metric] = scores.NameSimilarity*0.7 + scores.AttributeSimilarity*0.3
}

type metricScore struct {
Expand Down Expand Up @@ -269,10 +267,71 @@ func (receiver *SummaryService) GetRelatedMetrics(ctx context.Context, params *m
relatedMetric := metrics_explorer.RelatedMetrics{
Name: ms.Name,
Dashboards: dashboardsList,
Query: getQueryRangeForRelateMetricsList(ms.Name, relatedMetricsMap[ms.Name]),
}

relatedMetricsResponse.RelatedMetrics = append(relatedMetricsResponse.RelatedMetrics, relatedMetric)
}

return &relatedMetricsResponse, nil
}

func getQueryRangeForRelateMetricsList(metricName string, scores metrics_explorer.RelatedMetricsScore) *v3.BuilderQuery {
var filterItems []v3.FilterItem
for _, pair := range scores.Filters {
if len(pair) < 2 {
continue // Skip invalid filter pairs.
}
filterItem := v3.FilterItem{
Key: v3.AttributeKey{
Key: pair[0], // Default type, or you can use v3.AttributeKeyTypeUnspecified.
IsColumn: false,
IsJSON: false,
},
Value: pair[1],
Operator: v3.FilterOperatorEqual, // Using "=" as the operator.
}
filterItems = append(filterItems, filterItem)
}

// If there are any filters, combine them with an "AND" operator.
var filters *v3.FilterSet
if len(filterItems) > 0 {
filters = &v3.FilterSet{
Operator: "AND",
Items: filterItems,
}
}

// Create the BuilderQuery. Here we set the QueryName to the metric name.
query := v3.BuilderQuery{
QueryName: metricName,
DataSource: v3.DataSourceMetrics,
Expression: metricName, // Using metric name as expression
Filters: filters,
}

if scores.MetricType == "Sum" && !scores.IsMonotonic && scores.Temporality == v3.Cumulative {
scores.MetricType = "Gauge"
}

switch scores.MetricType {
case v3.MetricTypeGauge:
query.TimeAggregation = v3.TimeAggregationAvg
query.SpaceAggregation = v3.SpaceAggregationAvg
case v3.MetricTypeSum:
query.TimeAggregation = v3.TimeAggregationRate
query.SpaceAggregation = v3.SpaceAggregationSum
case v3.MetricTypeHistogram:
query.SpaceAggregation = v3.SpaceAggregationPercentile95
}

query.AggregateAttribute = v3.AttributeKey{
Key: metricName,
Type: v3.AttributeKeyType(scores.MetricType),
}

query.StepInterval = 60

return &query
}
2 changes: 2 additions & 0 deletions pkg/query-service/app/summary.go
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,8 @@ func (aH *APIHandler) GetTreeMap(w http.ResponseWriter, r *http.Request) {
}

func (aH *APIHandler) GetRelatedMetrics(w http.ResponseWriter, r *http.Request) {
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
params, apiError := explorer.ParseRelatedMetricsParams(r)
if apiError != nil {
Expand Down
2 changes: 1 addition & 1 deletion pkg/query-service/interfaces/interface.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ type Reader interface {

GetMetricsTimeSeriesPercentage(ctx context.Context, request *metrics_explorer.TreeMapMetricsRequest) (*[]metrics_explorer.TreeMapResponseItem, *model.ApiError)
GetMetricsSamplesPercentage(ctx context.Context, req *metrics_explorer.TreeMapMetricsRequest) (*[]metrics_explorer.TreeMapResponseItem, *model.ApiError)
GetRelatedMetrics(ctx context.Context, target string, start, end int64) (map[string]metrics_explorer.RelatedMetricsScore, *model.ApiError)
GetRelatedMetrics(ctx context.Context, req *metrics_explorer.RelatedMetricsRequest) (map[string]metrics_explorer.RelatedMetricsScore, *model.ApiError)
}

type Querier interface {
Expand Down
Loading