Skip to content

Commit 7a3b2da

Browse files
committed
Add initial clusterinstance analysis script
1 parent 42e7ef3 commit 7a3b2da

File tree

1 file changed

+212
-0
lines changed

1 file changed

+212
-0
lines changed
Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
#!/usr/bin/env python3
2+
#
3+
# Analyze ClusterInstance data on a hub cluster to determine count/min/avg/max/50p/95p/99p timings
4+
#
5+
# Copyright 2024 Red Hat
6+
#
7+
# Licensed under the Apache License, Version 2.0 (the "License");
8+
# you may not use this file except in compliance with the License.
9+
# You may obtain a copy of the License at
10+
#
11+
# http://www.apache.org/licenses/LICENSE-2.0
12+
#
13+
# Unless required by applicable law or agreed to in writing, software
14+
# distributed under the License is distributed on an "AS IS" BASIS,
15+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
# See the License for the specific language governing permissions and
17+
# limitations under the License.
18+
19+
import argparse
20+
from datetime import datetime
21+
import json
22+
from utils.command import command
23+
from utils.output import log_write
24+
import logging
25+
import numpy as np
26+
import os
27+
import sys
28+
import time
29+
30+
31+
logging.basicConfig(level=logging.INFO, format="%(asctime)s : %(levelname)s : %(threadName)s : %(message)s")
32+
logger = logging.getLogger("acm-deploy-load")
33+
logging.Formatter.converter = time.gmtime
34+
35+
36+
def main():
37+
start_time = time.time()
38+
39+
parser = argparse.ArgumentParser(
40+
description="Analyze ClusterInstance data",
41+
prog="analyze-clusterinstances.py", formatter_class=argparse.ArgumentDefaultsHelpFormatter)
42+
parser.add_argument("-o", "--offline-process", action="store_true", default=False,
43+
help="Uses previously stored raw data")
44+
parser.add_argument("-r", "--raw-data-file", type=str, default="",
45+
help="Set raw json data file for offline processing. Empty finds last file")
46+
parser.add_argument("results_directory", type=str, help="The location to place analyzed data")
47+
cliargs = parser.parse_args()
48+
49+
logger.info("Analyze clusterinstances")
50+
ts = datetime.now().strftime("%Y%m%d-%H%M%S")
51+
52+
raw_data_file = "{}/clusterinstances-{}.json".format(cliargs.results_directory, ts)
53+
if cliargs.offline_process:
54+
if cliargs.raw_data_file == "":
55+
# Detect last raw data file
56+
dir_scan = sorted([ f.path for f in os.scandir(cliargs.results_directory) if f.is_file() and "clusterinstances" in f.path and "json" in f.path ])
57+
if len(dir_scan) == 0:
58+
logger.error("No previous offline file found. Exiting")
59+
sys.exit(1)
60+
raw_data_file = dir_scan[-1]
61+
else:
62+
raw_data_file = cliargs.raw_data_file
63+
logger.info("Reading raw data from: {}".format(raw_data_file))
64+
else:
65+
logger.info("Storing raw data file at: {}".format(raw_data_file))
66+
67+
ci_csv_file = "{}/clusterinstances-{}.csv".format(cliargs.results_directory, ts)
68+
ci_stats_file = "{}/clusterinstances-{}.stats".format(cliargs.results_directory, ts)
69+
70+
if not cliargs.offline_process:
71+
oc_cmd = ["oc", "get", "clusterinstances", "-A", "-o", "json"]
72+
rc, output = command(oc_cmd, False, retries=3, no_log=True)
73+
if rc != 0:
74+
logger.error("analyze-clusterinstances, oc get clusterinstances rc: {}".format(rc))
75+
sys.exit(1)
76+
with open(raw_data_file, "w") as ci_data_file:
77+
ci_data_file.write(output)
78+
with open(raw_data_file, "r") as ci_file_data:
79+
ci_data = json.load(ci_file_data)
80+
81+
logger.info("Writing CSV: {}".format(ci_csv_file))
82+
with open(ci_csv_file, "w") as csv_file:
83+
csv_file.write("name,status,creationTimestamp,ClusterInstanceValidated.lastTransitionTime,"
84+
"RenderedTemplates.lastTransitionTime,RenderedTemplatesValidated.lastTransitionTime,"
85+
"RenderedTemplatesApplied.lastTransitionTime,Provisioned.lastTransitionTime,"
86+
"ci_ct_iv_duration,ci_iv_rt_duration,ci_rt_rtv_duration,ci_rtv_rta_duration,"
87+
"ci_rta_p_duration,total_duration\n")
88+
89+
ci_instancevalidated_durations = []
90+
ci_provisioned_durations = []
91+
for item in ci_data["items"]:
92+
ci_name = item["metadata"]["name"]
93+
ci_status = "unknown"
94+
ci_creationTimestamp = datetime.strptime(item["metadata"]["creationTimestamp"], "%Y-%m-%dT%H:%M:%SZ")
95+
ci_instancevalidated_ts = ""
96+
ci_renderedtemplates_ts = ""
97+
ci_renderedtemplatesvalidated_ts = ""
98+
ci_renderedtemplatesapplied_ts = ""
99+
ci_provisioned_ts = ""
100+
101+
if "status" in item and "conditions" in item["status"]:
102+
for condition in item["status"]["conditions"]:
103+
if "type" in condition and "status" in condition:
104+
if condition["type"] == "ClusterInstanceValidated" and condition["status"] == "True":
105+
ci_instancevalidated_ts = datetime.strptime(condition["lastTransitionTime"], "%Y-%m-%dT%H:%M:%SZ")
106+
elif condition["type"] == "RenderedTemplates" and condition["status"] == "True":
107+
ci_renderedtemplates_ts = datetime.strptime(condition["lastTransitionTime"], "%Y-%m-%dT%H:%M:%SZ")
108+
elif condition["type"] == "RenderedTemplatesValidated" and condition["status"] == "True":
109+
ci_renderedtemplatesvalidated_ts = datetime.strptime(condition["lastTransitionTime"], "%Y-%m-%dT%H:%M:%SZ")
110+
elif condition["type"] == "RenderedTemplatesApplied" and condition["status"] == "True":
111+
ci_renderedtemplatesapplied_ts = datetime.strptime(condition["lastTransitionTime"], "%Y-%m-%dT%H:%M:%SZ")
112+
elif condition["type"] == "Provisioned" and condition["status"] == "True":
113+
ci_provisioned_ts = datetime.strptime(condition["lastTransitionTime"], "%Y-%m-%dT%H:%M:%SZ")
114+
ci_status = "Provisioned"
115+
else:
116+
logger.warning("ICI: {}, 'type' or 'status' missing in condition: {}".format(ici_name, condition))
117+
else:
118+
logger.warning("status or conditions not found in imageclusterinstall object: {}".format(item))
119+
120+
logger.info("{}, {}, {}, {}, {}, {}, {}, {}".format(
121+
ci_name, ci_status, ci_creationTimestamp, ci_instancevalidated_ts, ci_renderedtemplates_ts,
122+
ci_renderedtemplatesvalidated_ts, ci_renderedtemplatesapplied_ts, ci_provisioned_ts))
123+
124+
ci_ct_iv_duration = (ci_instancevalidated_ts - ci_creationTimestamp).total_seconds()
125+
ci_iv_rt_duration = (ci_renderedtemplates_ts - ci_instancevalidated_ts).total_seconds()
126+
if ci_renderedtemplatesvalidated_ts != "":
127+
ci_rt_rtv_duration = (ci_renderedtemplatesvalidated_ts - ci_renderedtemplates_ts).total_seconds()
128+
else:
129+
ci_rt_rtv_duration = 0
130+
if ci_renderedtemplatesvalidated_ts != "" and ci_renderedtemplatesapplied_ts != "":
131+
ci_rtv_rta_duration = (ci_renderedtemplatesapplied_ts - ci_renderedtemplatesvalidated_ts).total_seconds()
132+
else:
133+
ci_rtv_rta_duration = 0
134+
if ci_renderedtemplatesapplied_ts != "" and ci_provisioned_ts != "":
135+
ci_rta_p_duration = (ci_provisioned_ts - ci_renderedtemplatesapplied_ts).total_seconds()
136+
else:
137+
ci_rta_p_duration = 0
138+
if ci_provisioned_ts != "":
139+
total_duration = (ci_provisioned_ts - ci_creationTimestamp).total_seconds()
140+
else:
141+
total_duration = 0
142+
143+
ci_instancevalidated_durations.append(ci_ct_iv_duration)
144+
if ci_status == "Provisioned":
145+
ci_provisioned_durations.append(total_duration)
146+
147+
# logger.info("Durations: {}, {}, {}, {}, {}, {}".format(
148+
# ci_ct_iv_duration, ci_iv_rt_duration, ci_rt_rtv_duration, ci_rtv_rta_duration,
149+
# ci_rta_p_duration, total_duration))
150+
151+
with open(ci_csv_file, "a") as csv_file:
152+
csv_file.write(
153+
"{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n".format(ci_name, ci_status,
154+
ci_creationTimestamp, ci_instancevalidated_ts, ci_renderedtemplates_ts,
155+
ci_renderedtemplatesvalidated_ts, ci_renderedtemplatesapplied_ts, ci_provisioned_ts,
156+
ci_ct_iv_duration, ci_iv_rt_duration, ci_rt_rtv_duration, ci_rtv_rta_duration,
157+
ci_rta_p_duration, total_duration))
158+
159+
logger.info("Writing Stats: {}".format(ci_stats_file))
160+
161+
with open(ci_stats_file, "w") as stats_file:
162+
stats_count = len(ci_instancevalidated_durations)
163+
stats_min = 0
164+
stats_avg = 0
165+
stats_50p = 0
166+
stats_95p = 0
167+
stats_99p = 0
168+
stats_max = 0
169+
if stats_count > 0:
170+
stats_min = np.min(ci_instancevalidated_durations)
171+
stats_avg = round(np.mean(ci_instancevalidated_durations), 1)
172+
stats_50p = round(np.percentile(ci_instancevalidated_durations, 50), 1)
173+
stats_95p = round(np.percentile(ci_instancevalidated_durations, 95), 1)
174+
stats_99p = round(np.percentile(ci_instancevalidated_durations, 99), 1)
175+
stats_max = np.max(ci_instancevalidated_durations)
176+
log_write(stats_file, "Stats on ClusterInstances CRs with CreationTimeStamp until InstanceValidated Timestamp")
177+
log_write(stats_file, "Count: {}".format(stats_count))
178+
log_write(stats_file, "Min: {}".format(stats_min))
179+
log_write(stats_file, "Average: {}".format(stats_avg))
180+
log_write(stats_file, "50 percentile: {}".format(stats_50p))
181+
log_write(stats_file, "95 percentile: {}".format(stats_95p))
182+
log_write(stats_file, "99 percentile: {}".format(stats_99p))
183+
log_write(stats_file, "Max: {}".format(stats_max))
184+
185+
stats_count = len(ci_provisioned_durations)
186+
stats_min = 0
187+
stats_avg = 0
188+
stats_50p = 0
189+
stats_95p = 0
190+
stats_99p = 0
191+
stats_max = 0
192+
if stats_count > 0:
193+
stats_min = np.min(ci_provisioned_durations)
194+
stats_avg = round(np.mean(ci_provisioned_durations), 1)
195+
stats_50p = round(np.percentile(ci_provisioned_durations, 50), 1)
196+
stats_95p = round(np.percentile(ci_provisioned_durations, 95), 1)
197+
stats_99p = round(np.percentile(ci_provisioned_durations, 99), 1)
198+
stats_max = np.max(ci_provisioned_durations)
199+
log_write(stats_file, "Total Duration Stats only on ClusterInstances CRs in Provisioned")
200+
log_write(stats_file, "Count: {}".format(stats_count))
201+
log_write(stats_file, "Min: {}".format(stats_min))
202+
log_write(stats_file, "Average: {}".format(stats_avg))
203+
log_write(stats_file, "50 percentile: {}".format(stats_50p))
204+
log_write(stats_file, "95 percentile: {}".format(stats_95p))
205+
log_write(stats_file, "99 percentile: {}".format(stats_99p))
206+
log_write(stats_file, "Max: {}".format(stats_max))
207+
208+
end_time = time.time()
209+
logger.info("Took {}s".format(round(end_time - start_time, 1)))
210+
211+
if __name__ == "__main__":
212+
sys.exit(main())

0 commit comments

Comments
 (0)