Skip to content

Calculate Idle Time for All KeyTypes and Display Statistics #64

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions rma/rule/Hash.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ def __init__(self, info, redis):
self.values = []
self.encoding = info["encoding"]
self.ttl = info["ttl"]
self.idleTime=info["idleTime"]

for key, value in redis.hscan_iter(key_name, '*'):
self.keys.append(key)
Expand Down Expand Up @@ -66,7 +67,7 @@ class HashAggregator(object):
def __init__(self, all_obj, total):
self.total_elements = total

g00, g0, g1, g2, g3, v1, v2, ttl = tee(all_obj, 8)
g00, g0, g1, g2, g3, v1, v2, ttl,idleTime = tee(all_obj, 9)

self.encoding = pref_encoding([obj.encoding for obj in g00], redis_encoding_id_to_str)
self.system = sum(obj.system for obj in g0)
Expand All @@ -87,6 +88,10 @@ def __init__(self, all_obj, total):
self.ttlMin = min(ttls)
self.ttlMax = max(ttls)
self.ttlAvg = statistics.mean( ttls ) if len(ttls) > 1 else min(ttls)
idleTimes = [obj.idleTime for obj in idleTime]
self.idleTimeMin = min(idleTimes)
self.idleTimeMax = max(idleTimes)
self.idleTimeAvg = statistics.mean( idleTimes ) if len(idleTimes) > 1 else min(idleTimes)

def __enter__(self):
return self
Expand All @@ -106,7 +111,7 @@ def __init__(self, redis):
def analyze(self, keys, total=0):
key_stat = {
'headers': ['Match', "Count", "Avg field count", "Key mem", "Real", "Ratio", "Value mem", "Real", "Ratio",
"System", "Encoding", "Total mem", "Total aligned", "TTL Min", "TTL Max", "TTL Avg."],
"System", "Encoding", "Total mem", "Total aligned", "TTL Min", "TTL Max", "TTL Avg","idleTime Min", "idleTime Max", "idleTime Avg."],
'data': []
}

Expand Down Expand Up @@ -135,13 +140,17 @@ def analyze(self, keys, total=0):
agg.ttlMin,
agg.ttlMax,
agg.ttlAvg,
agg.idleTimeMin,
agg.idleTimeMax,
agg.idleTimeAvg,

]

key_stat['data'].append(stat_entry)

key_stat['data'].sort(key=lambda x: x[12], reverse=True)
key_stat['data'].append(
make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, sum, 0, sum, '', sum, sum, min, max, math.nan]))
make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, sum, 0, sum, '', sum, sum, min, max, math.nan,min, max, math.nan]))

progress.close()

Expand Down
17 changes: 11 additions & 6 deletions rma/rule/KeyString.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
import math

class StringEntry(object):
def __init__(self, value="", ttl=-1):
def __init__(self, value="", ttl=-1,idleTime=-1):
self.encoding = get_string_encoding(value)
self.ttl = ttl
self.idleTime=idleTime
self.useful_bytes = len(value)
self.free_bytes = 0
self.aligned = size_of_aligned_string(value, encoding=self.encoding)
Expand Down Expand Up @@ -36,7 +37,7 @@ def analyze(self, keys, total=0):
:return:
"""
key_stat = {
'headers': ['Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg."],
'headers': ['Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg.","idleTime Min", "idleTime Max", "idleTime Avg"],
'data': []
}

Expand All @@ -46,8 +47,8 @@ def analyze(self, keys, total=0):
leave=False)

for pattern, data in keys.items():
used_bytes_iter, aligned_iter, encoding_iter, ttl_iter = tee(
progress_iterator((StringEntry(value=x["name"], ttl=x["ttl"]) for x in data), progress), 4)
used_bytes_iter, aligned_iter, encoding_iter, ttl_iter,idle_time_iter = tee(
progress_iterator((StringEntry(value=x["name"], ttl=x["ttl"],idleTime=x["idleTime"]) for x in data), progress), 5)

total_elements = len(data)
if total_elements == 0:
Expand All @@ -70,15 +71,19 @@ def analyze(self, keys, total=0):
min_ttl = min(ttls)
max_ttl = max(ttls)
avg_ttl = statistics.mean(ttls) if len(ttls) > 1 else min(ttls)
idle_times = [obj.idleTime for obj in idle_time_iter]
min_idle_time = min(idle_times)
max_idle_time = max(idle_times)
avg_idle_time = statistics.mean(idle_times) if len(idle_times) > 1 else min(idle_times)

stat_entry = [
pattern, total_elements, used_user, aligned, aligned / used_user, prefered_encoding,
min_value, max(max_iter), avg, min_ttl, max_ttl, avg_ttl
min_value, max(max_iter), avg, min_ttl, max_ttl, avg_ttl,min_idle_time,max_idle_time,avg_idle_time
]
key_stat['data'].append(stat_entry)

key_stat['data'].sort(key=lambda x: x[1], reverse=True)
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, sum, 0, '', 0, 0, 0, min, max, math.nan]))
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, sum, 0, '', 0, 0, 0, min, max, math.nan, min, max, math.nan]))

progress.close()

Expand Down
16 changes: 12 additions & 4 deletions rma/rule/List.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ def __init__(self, info, redis):
key_name = info["name"]
self.encoding = info['encoding']
self.ttl = info['ttl']
self.idleTime = info["idleTime"]

self.values = redis.lrange(key_name, 0, -1)
self.count = len(self.values)
Expand Down Expand Up @@ -46,8 +47,8 @@ class ListAggregator(object):
def __init__(self, all_obj, total):
self.total_elements = total

encode_iter, sys_iter, avg_iter, stdev_iter, min_iter, max_iter, value_used_iter, value_align_iter, ttl_iter = \
tee(all_obj, 9)
encode_iter, sys_iter, avg_iter, stdev_iter, min_iter, max_iter, value_used_iter, value_align_iter, ttl_iter,idle_time_iter = \
tee(all_obj, 10)

self.encoding = pref_encoding([obj.encoding for obj in encode_iter], redis_encoding_id_to_str)
self.system = sum(obj.system for obj in sys_iter)
Expand Down Expand Up @@ -75,6 +76,10 @@ def __init__(self, all_obj, total):
self.ttlMin = min(ttls)
self.ttlMax = max(ttls)
self.ttlAvg = statistics.mean( ttls ) if len(ttls) > 1 else min(ttls)
idleTimes = [obj.idleTime for obj in idle_time_iter]
self.idleTimeMin = min(idleTimes)
self.idleTimeMax = max(idleTimes)
self.idleTimeAvg = statistics.mean(idleTimes) if len(idleTimes) > 1 else min(idleTimes)

def __enter__(self):
return self
Expand All @@ -93,7 +98,7 @@ def __init__(self, redis):

def analyze(self, keys, total=0):
key_stat = {
'headers': ['Match', "Count", "Avg Count", "Min Count", "Max Count", "Stdev Count", "Value mem", "Real", "Ratio", "System", "Encoding", "Total", 'TTL Min', 'TTL Max', 'TTL Avg'],
'headers': ['Match', "Count", "Avg Count", "Min Count", "Max Count", "Stdev Count", "Value mem", "Real", "Ratio", "System", "Encoding", "Total", 'TTL Min', 'TTL Max', 'TTL Avg',"idleTime Min", "idleTime Max", "idleTime Avg"],
'data': []
}

Expand Down Expand Up @@ -121,13 +126,16 @@ def analyze(self, keys, total=0):
agg.ttlMin,
agg.ttlMax,
agg.ttlAvg,
agg.idleTimeMin,
agg.idleTimeMax,
agg.idleTimeAvg,
]

key_stat['data'].append(stat_entry)
progress.update()

key_stat['data'].sort(key=lambda x: x[8], reverse=True)
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, 0, 0, 0, sum, sum, 0, sum, '', sum, min, max, math.nan]))
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, 0, 0, 0, sum, sum, 0, sum, '', sum, min, max, math.nan, min, max, math.nan]))

progress.close()

Expand Down
14 changes: 11 additions & 3 deletions rma/rule/Set.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ def __init__(self, info, redis):
self.values = [v for v in redis.sscan_iter(key_name, '*', 1000)]
self.encoding = info["encoding"]
self.ttl = info["ttl"]
self.idleTime = info["idleTime"]
self.count = len(self.values)

if self.encoding == REDIS_ENCODING_ID_HASHTABLE:
Expand All @@ -39,7 +40,7 @@ class SetAggregator(object):
def __init__(self, all_obj, total):
self.total_elements = total

g00, g0, g3, v1, v2, v3, ttl = tee(all_obj, 7)
g00, g0, g3, v1, v2, v3, ttl,idleTime = tee(all_obj, 8)

self.encoding = pref_encoding([obj.encoding for obj in g00], redis_encoding_id_to_str)
self.system = sum(obj.system for obj in g0)
Expand All @@ -59,6 +60,10 @@ def __init__(self, all_obj, total):
self.ttlMin = min(ttls)
self.ttlMax = max(ttls)
self.ttlAvg = statistics.mean( ttls ) if len(ttls) > 1 else min(ttls)
idleTimes = [obj.idleTime for obj in idleTime]
self.idleTimeMin = min(idleTimes)
self.idleTimeMax = max(idleTimes)
self.idleTimeAvg = statistics.mean(idleTimes) if len(idleTimes) > 1 else min(idleTimes)

def __enter__(self):
return self
Expand All @@ -77,7 +82,7 @@ def __init__(self, redis):

def analyze(self, keys, total=0):
key_stat = {
'headers': ['Match', "Count", "Avg Count", "Value mem", "Real", "Ratio", "System*", "Encoding", "Total", "TTL Min", "TTL Max", "TTL Avg."],
'headers': ['Match', "Count", "Avg Count", "Value mem", "Real", "Ratio", "System*", "Encoding", "Total", "TTL Min", "TTL Max", "TTL Avg.","idleTime Min", "idleTime Max", "idleTime Avg."],
'data': []
}

Expand All @@ -102,12 +107,15 @@ def analyze(self, keys, total=0):
agg.ttlMin,
agg.ttlMax,
agg.ttlAvg,
agg.idleTimeMin,
agg.idleTimeMax,
agg.idleTimeAvg,
]

key_stat['data'].append(stat_entry)

key_stat['data'].sort(key=lambda x: x[8], reverse=True)
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, '', sum, min, max, math.nan]))
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, '', sum, min, max, math.nan,min, max, math.nan]))

progress.close()

Expand Down
13 changes: 11 additions & 2 deletions rma/rule/ValueString.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ def __init__(self, redis, info, use_debug=True):
key_name = info["name"]
self.encoding = info["encoding"]
self.ttl = info["ttl"]
self.idleTime = info["idleTime"]
self.logger = logging.getLogger(__name__)

if self.encoding == REDIS_ENCODING_ID_INT:
Expand Down Expand Up @@ -65,7 +66,7 @@ def __init__(self, redis):

def analyze(self, keys, total=0):
key_stat = {
'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg"],
'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg","idleTime Min", "idleTime Max", "idleTime Avg"],
'data': []
}

Expand All @@ -81,6 +82,7 @@ def analyze(self, keys, total=0):
aligned_bytes = []
encodings = []
ttl = []
idleTime=[]

for key_info in progress_iterator(data, progress):
try:
Expand All @@ -90,6 +92,7 @@ def analyze(self, keys, total=0):
aligned_bytes.append(stat.aligned)
encodings.append(stat.encoding)
ttl.append(stat.ttl)
idleTime.append(stat.idleTime)
except RedisError as e:
# This code works in real time so key me be deleted and this code fail
error_string = repr(e)
Expand All @@ -111,6 +114,9 @@ def analyze(self, keys, total=0):
min_ttl = min(ttl) if len(ttl) >= 1 else -1
max_ttl = max(ttl) if len(ttl) >= 1 else -1
mean_ttl = statistics.mean(ttl) if len(ttl) > 1 else min_ttl
min_idle_time = min(idleTime) if len(idleTime) >= 1 else -1
max_idle_time = max(idleTime) if len(idleTime) >= 1 else -1
mean_idle_time = statistics.mean(idleTime) if len(idleTime) > 1 else min_idle_time

stat_entry = [
pattern,
Expand All @@ -126,11 +132,14 @@ def analyze(self, keys, total=0):
min_ttl,
max_ttl,
mean_ttl,
min_idle_time,
max_idle_time,
mean_idle_time,
]
key_stat['data'].append(stat_entry)

key_stat['data'].sort(key=lambda e: e[1], reverse=True)
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0, min, max, math.nan]))
key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0, min, max, math.nan,min, max, math.nan]))

progress.close()

Expand Down
11 changes: 7 additions & 4 deletions rma/scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ def __init__(self, redis, match="*", accepted_types=None):
local type = redis.call("TYPE", KEYS[i])
local encoding = redis.call("OBJECT", "ENCODING",KEYS[i])
local ttl = redis.call("TTL", KEYS[i])
ret[i] = {type["ok"], encoding, ttl}
local idleTime=redis.call("OBJECT", "IDLETIME",KEYS[i])
ret[i] = {type["ok"], encoding, ttl,idleTime}
end
return cmsgpack.pack(ret)
""")
Expand Down Expand Up @@ -76,7 +77,8 @@ def resolve_with_pipe(self, ret):
pipe.type(key)
pipe.object('ENCODING', key)
pipe.ttl(key)
key_with_types = [{'type': x, 'encoding': y, 'ttl': z} for x, y, z in chunker(pipe.execute(), 3)]
pipe.object('idletime', key)
key_with_types = [{'type': x, 'encoding': y, 'ttl': z,'idleTime':i} for x, y, z,i in chunker(pipe.execute(), 4)]
return key_with_types

def scan(self, limit=1000):
Expand All @@ -86,7 +88,7 @@ def scan(self, limit=1000):
total = 0
for key_tuple in self.batch_scan():
key_info, key_name = key_tuple
key_type, key_encoding, key_ttl = key_info
key_type, key_encoding, key_ttl,key_idle_time = key_info
if not key_name:
self.logger.warning(
'\r\nWarning! Scan iterator return key with empty name `` and type %s', key_type)
Expand All @@ -98,7 +100,8 @@ def scan(self, limit=1000):
'name': key_name.decode("utf-8", "replace"),
'type': to_id,
'encoding': redis_encoding_str_to_id(key_encoding),
'ttl': key_ttl
'ttl': key_ttl,
'idleTime': key_idle_time
}
yield key_info_obj

Expand Down