From 40181881d53caad2614a2bb33db554bb05687c38 Mon Sep 17 00:00:00 2001 From: jcountsNR Date: Fri, 16 Aug 2024 10:19:11 -0700 Subject: [PATCH] chore: Fix account ids --- .../ext-databricks-cluster/dashboard.json | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/entity-types/ext-databricks-cluster/dashboard.json b/entity-types/ext-databricks-cluster/dashboard.json index 363995536..3c40aa7c3 100644 --- a/entity-types/ext-databricks-cluster/dashboard.json +++ b/entity-types/ext-databricks-cluster/dashboard.json @@ -28,7 +28,7 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT average(spark.app.stage.executor.runTime) WHERE spark.app.stage.executor.runTime IS NOT NULL TIMESERIES" } ], @@ -70,7 +70,7 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT average(spark.app.stage.executor.cpuTime) / 1000000 WHERE spark.app.stage.executor.cpuTime IS NOT NULL TIMESERIES" } ], @@ -112,7 +112,7 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT average(spark.app.stage.jvmGcTime) WHERE spark.app.stage.jvmGcTime IS NOT NULL TIMESERIES" } ], @@ -154,7 +154,7 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT average(spark.app.stage.executor.deserializeTime) WHERE spark.app.stage.executor.deserializeTime IS NOT NULL TIMESERIES" } ], @@ -196,7 +196,7 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT average(spark.app.stage.executor.deserializeCpuTime) / 1000000 WHERE spark.app.stage.executor.deserializeCpuTime IS NOT NULL TIMESERIES" } ], @@ -238,7 +238,7 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT average(spark.app.stage.resultSerializationTime) WHERE spark.app.stage.resultSerializationTime IS NOT NULL TIMESERIES" } ], @@ -280,11 +280,11 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.inputBytes) AS 'Bytes In' WHERE spark.app.stage.inputBytes IS NOT NULL TIMESERIES" }, { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.outputBytes) AS 'Bytes Out' WHERE spark.app.stage.outputBytes IS NOT NULL TIMESERIES" } ], @@ -323,11 +323,11 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.inputRecords) AS 'Records In' WHERE spark.app.stage.inputRecords IS NOT NULL TIMESERIES" }, { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.outputRecords) AS 'Records Out' WHERE spark.app.stage.outputRecords IS NOT NULL TIMESERIES" } ], @@ -366,11 +366,11 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.shuffle.readBytes) AS 'Bytes Written' WHERE spark.app.stage.shuffle.readBytes IS NOT NULL TIMESERIES" }, { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.shuffle.writeBytes) AS 'Bytes Out' WHERE spark.app.stage.shuffle.writeBytes IS NOT NULL TIMESERIES" } ], @@ -409,11 +409,11 @@ }, "nrqlQueries": [ { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.shuffle.readRecords) AS 'Records Read' WHERE spark.app.stage.shuffle.readRecords IS NOT NULL TIMESERIES" }, { - "accountIds": [], + "accountId": 0, "query": "FROM Metric SELECT latest(spark.app.stage.shuffle.writeRecords) AS 'Records Written' WHERE spark.app.stage.shuffle.writeRecords IS NOT NULL TIMESERIES" } ],