From 3c8f4a28e8885829db36c4168be6c2b7e3fd5c95 Mon Sep 17 00:00:00 2001 From: Gwyn Date: Sun, 24 Oct 2021 09:35:13 +0800 Subject: [PATCH] WIP --- .gitignore | 1 + data_functions.py | 32 +++++++++++++++++++++++++------ main.py | 49 +++++++++++++++++++++++++++++++++++++---------- 3 files changed, 66 insertions(+), 16 deletions(-) diff --git a/.gitignore b/.gitignore index 7e05d62..644d6df 100644 --- a/.gitignore +++ b/.gitignore @@ -130,6 +130,7 @@ dmypy.json ### Working files *.csv +*.json ### Personal Stuff csv/ diff --git a/data_functions.py b/data_functions.py index bf7e914..9f93d5b 100644 --- a/data_functions.py +++ b/data_functions.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timedelta def process_header(header): header = header.lower() @@ -16,12 +16,14 @@ def process_header(header): return header -def process_detail(header, detail): - +def process_dates(header, detail): if header in ('tracking_start', 'tracking_end', 'alarm_scheduled'): - detail = datetime.strptime(detail, '%d. %m. %Y %H:%M').strftime('%Y-%m-%d %H:%M') + datetime_value = datetime.strptime(detail, '%d. %m. %Y %H:%M') - return detail + if header == 'id': + datetime_value = datetime.fromtimestamp(int(detail)/1000) + + return datetime_value def process_event(event): event_parts = event.split('-') @@ -42,4 +44,22 @@ def process_event(event): 'event_value': event_value } - return event_dict \ No newline at end of file + return event_dict + +def process_actigraphy(time, value, start_time): + act_time_part = datetime.strptime(time, '%H:%M').time() + start_time_part = start_time.time() + start_time_date = start_time.date() + next_day_date = start_time_date + timedelta(days = 1) + + if act_time_part > start_time_part: + act_datetime = datetime.combine(start_time_date, act_time_part) + else: + act_datetime = datetime.combine(next_day_date, act_time_part) + + act_dict = { + 'actigraphic_time': act_datetime.strftime('%Y-%m-%d %H:%M'), + 'actigraphic_value': value + } + + return act_dict \ No newline at end of file diff --git a/main.py b/main.py index 99246b1..be334a4 100644 --- a/main.py +++ b/main.py @@ -1,4 +1,5 @@ import csv, json, data_functions as df +from datetime import datetime # Step 1: Open CSV and read # Step 2: Select first row @@ -28,7 +29,7 @@ # IF field = Snore: ? # IF field = Noise: ? # IF field = Cycles: ? -# IF field = DeepSleep: ? Is this a percent? +# IF field = DeepSleep: ? Is this a percent?l # IF field = LenAdjust: ? # IF field = Geo: ? # IF field = digit-type: actigraphy, learn about it and how to display @@ -54,7 +55,6 @@ def conversion(csv_file): first_pass = [] - json_array = [] # read CSV file with open(csv_file, encoding='utf-8') as csvf: @@ -80,25 +80,54 @@ def conversion(csv_file): dictionary = dict(zip_it) first_pass.append(dictionary) - i = 0 + json_array = [] for record in first_pass: + events = [] + actigraphies = [] + for key in record: + headers = [] + details = [] val = record[key] - #print("{}: {}".format(key, val)) header = df.process_header(key) - print(header) + + if header in ('id', 'tracking_start', 'tracking_end', 'alarm_scheduled'): + datetime_value = df.process_dates(header, val) + + if header == 'id': + id = datetime_value + else: + val = datetime.strftime(datetime_value, '%Y-%m-%d %H:%M') if header.startswith('event'): event = df.process_event(val) - print(event) - - #field = df.process_detail(header, val) - #print(field) - #break + header = 'events' + events.append(event) + val = {header: events} + + elif header[0].isdigit(): + actigraphy = df.process_actigraphy(header, val, id) + header = 'actigraphy' + actigraphies.append(actigraphy) + val = {header: actigraphies} + headers.append(header) + details.append(val) + + zip_it = zip(headers, details) + json_array.append(dict(zip_it)) + break + result = [json.dumps(record) for record in json_array] + + # convert Python json_array to JSON String and write to file + with open(r'sleep-export.json', 'w', encoding='utf-8') as jsonf: + for d in result: + jsonf.write(''.join(d)) + jsonf.write('\n') + csv_file = r'sleep-export.csv' conversion(csv_file)