Skip to content

Commit

Permalink
Richer activity title with more sport types and locations (#733)
Browse files Browse the repository at this point in the history
* show activity name with more detailed type

* Added a flag for whether to turn on rich activity titles
  • Loading branch information
NaturezzZ authored Nov 24, 2024
1 parent 8e1f868 commit fb4290a
Show file tree
Hide file tree
Showing 11 changed files with 216 additions and 34 deletions.
40 changes: 37 additions & 3 deletions run_page/garmin_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,13 @@ async def get_activities(self, start, limit):
url = url + "&activityType=running"
return await self.fetch_data(url)

async def get_activity_summary(self, activity_id):
"""
Fetch activity summary
"""
url = f"{self.modern_url}/activity-service/activity/{activity_id}"
return await self.fetch_data(url)

async def download_activity(self, activity_id, file_type="gpx"):
url = f"{self.modern_url}/download-service/export/{file_type}/activity/{activity_id}"
if file_type == "fit":
Expand Down Expand Up @@ -287,6 +294,16 @@ async def download_new_activities(
to_generate_garmin_ids = list(set(activity_ids) - set(downloaded_ids))
print(f"{len(to_generate_garmin_ids)} new activities to be downloaded")

to_generate_garmin_id2title = {}
for id in to_generate_garmin_ids:
try:
activity_summary = await client.get_activity_summary(id)
activity_title = activity_summary.get("activityName", "")
to_generate_garmin_id2title[id] = activity_title
except Exception as e:
print(f"Failed to get activity summary {id}: {str(e)}")
continue

start_time = time.time()
await gather_with_concurrency(
10,
Expand All @@ -298,7 +315,7 @@ async def download_new_activities(
print(f"Download finished. Elapsed {time.time()-start_time} seconds")

await client.req.aclose()
return to_generate_garmin_ids
return to_generate_garmin_ids, to_generate_garmin_id2title


if __name__ == "__main__":
Expand Down Expand Up @@ -350,6 +367,14 @@ async def download_new_activities(
os.mkdir(folder)
downloaded_ids = get_downloaded_ids(folder)

if file_type == "fit":
gpx_folder = FOLDER_DICT["gpx"]
if not os.path.exists(gpx_folder):
os.mkdir(gpx_folder)
downloaded_gpx_ids = get_downloaded_ids(gpx_folder)
# merge downloaded_ids:list
downloaded_ids = list(set(downloaded_ids + downloaded_gpx_ids))

loop = asyncio.get_event_loop()
future = asyncio.ensure_future(
download_new_activities(
Expand All @@ -362,7 +387,16 @@ async def download_new_activities(
)
)
loop.run_until_complete(future)
new_ids, id2title = future.result()
# fit may contain gpx(maybe upload by user)
if file_type == "fit":
make_activities_file(SQL_FILE, FOLDER_DICT["gpx"], JSON_FILE, file_suffix="gpx")
make_activities_file(SQL_FILE, folder, JSON_FILE, file_suffix=file_type)
make_activities_file(
SQL_FILE,
FOLDER_DICT["gpx"],
JSON_FILE,
file_suffix="gpx",
activity_title_dict=id2title,
)
make_activities_file(
SQL_FILE, folder, JSON_FILE, file_suffix=file_type, activity_title_dict=id2title
)
10 changes: 7 additions & 3 deletions run_page/garmin_sync_cn_global.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@
)
)
loop.run_until_complete(future)
new_ids = future.result()
new_ids, id2title = future.result()

to_upload_files = []
for i in new_ids:
Expand All @@ -89,5 +89,9 @@

# Step 2:
# Generate track from fit/gpx file
make_activities_file(SQL_FILE, GPX_FOLDER, JSON_FILE, file_suffix="gpx")
make_activities_file(SQL_FILE, FIT_FOLDER, JSON_FILE, file_suffix="fit")
make_activities_file(
SQL_FILE, GPX_FOLDER, JSON_FILE, file_suffix="gpx", activity_title_dict=id2title
)
make_activities_file(
SQL_FILE, FIT_FOLDER, JSON_FILE, file_suffix="fit", activity_title_dict=id2title
)
2 changes: 1 addition & 1 deletion run_page/garmin_to_strava_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
)
)
loop.run_until_complete(future)
new_ids = future.result()
new_ids, id2title = future.result()
print(f"To upload to strava {len(new_ids)} files")
index = 1
for i in new_ids:
Expand Down
7 changes: 5 additions & 2 deletions run_page/generator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,11 @@ def sync(self, force):
sys.stdout.flush()
self.session.commit()

def sync_from_data_dir(self, data_dir, file_suffix="gpx"):
def sync_from_data_dir(self, data_dir, file_suffix="gpx", activity_title_dict={}):
loader = track_loader.TrackLoader()
tracks = loader.load_tracks(data_dir, file_suffix=file_suffix)
tracks = loader.load_tracks(
data_dir, file_suffix=file_suffix, activity_title_dict=activity_title_dict
)
print(f"load {len(tracks)} tracks")
if not tracks:
print("No tracks found.")
Expand Down Expand Up @@ -120,6 +122,7 @@ def sync_from_app(self, app_tracks):
self.session.commit()

def load(self):
# if sub_type is not in the db, just add an empty string to it
activities = (
self.session.query(Activity)
.filter(Activity.distance > 0.1)
Expand Down
46 changes: 43 additions & 3 deletions run_page/generator/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,16 @@

import geopy
from geopy.geocoders import Nominatim
from sqlalchemy import Column, Float, Integer, Interval, String, create_engine
from sqlalchemy import (
Column,
Float,
Integer,
Interval,
String,
create_engine,
inspect,
text,
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker

Expand All @@ -29,6 +38,7 @@ def randomword():
"distance",
"moving_time",
"type",
"subtype",
"start_date",
"start_date_local",
"location_country",
Expand All @@ -47,6 +57,7 @@ class Activity(Base):
moving_time = Column(Interval)
elapsed_time = Column(Interval)
type = Column(String)
subtype = Column(String)
start_date = Column(String)
start_date_local = Column(String)
location_country = Column(String)
Expand Down Expand Up @@ -106,6 +117,7 @@ def update_or_create_activity(session, run_activity):
moving_time=run_activity.moving_time,
elapsed_time=run_activity.elapsed_time,
type=run_activity.type,
subtype=run_activity.subtype,
start_date=run_activity.start_date,
start_date_local=run_activity.start_date_local,
location_country=location_country,
Expand All @@ -123,6 +135,7 @@ def update_or_create_activity(session, run_activity):
activity.moving_time = run_activity.moving_time
activity.elapsed_time = run_activity.elapsed_time
activity.type = run_activity.type
activity.subtype = run_activity.subtype
activity.average_heartrate = run_activity.average_heartrate
activity.average_speed = float(run_activity.average_speed)
activity.summary_polyline = (
Expand All @@ -135,10 +148,37 @@ def update_or_create_activity(session, run_activity):
return created


def add_missing_columns(engine, model):
inspector = inspect(engine)
table_name = model.__tablename__
columns = {col["name"] for col in inspector.get_columns(table_name)}
missing_columns = []

for column in model.__table__.columns:
if column.name not in columns:
missing_columns.append(column)
if missing_columns:
with engine.connect() as conn:
for column in missing_columns:
column_type = str(column.type)
conn.execute(
text(
f"ALTER TABLE {table_name} ADD COLUMN {column.name} {column_type}"
)
)


def init_db(db_path):
engine = create_engine(
f"sqlite:///{db_path}", connect_args={"check_same_thread": False}
)
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)
return session()

# check missing columns
add_missing_columns(engine, Activity)

sm = sessionmaker(bind=engine)
session = sm()
# apply the changes
session.commit()
return session
19 changes: 12 additions & 7 deletions run_page/gpxtrackposter/track.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def __init__(self):
self.file_names = []
self.polylines = []
self.polyline_str = ""
self.track_name = None
self.start_time = None
self.end_time = None
self.start_time_local = None
Expand All @@ -52,6 +53,7 @@ def __init__(self):
self.run_id = 0
self.start_latlng = []
self.type = "Run"
self.subtype = None # for fit file
self.device = ""

def load_gpx(self, file_name):
Expand Down Expand Up @@ -190,6 +192,8 @@ def _load_gpx_data(self, gpx):
polyline_container = []
heart_rate_list = []
for t in gpx.tracks:
if self.track_name is None:
self.track_name = t.name
for s in t.segments:
try:
extensions = [
Expand Down Expand Up @@ -246,7 +250,11 @@ def _load_fit_data(self, fit: dict):
self.average_heartrate = (
message["avg_heart_rate"] if "avg_heart_rate" in message else None
)
self.type = message["sport"].lower()
if message["sport"].lower() == "running":
self.type = "Run"
else:
self.type = message["sport"].lower()
self.subtype = message["sub_sport"] if "sub_sport" in message else None

# moving_dict
self.moving_dict["distance"] = message["total_distance"]
Expand Down Expand Up @@ -333,12 +341,9 @@ def _get_moving_data(gpx):
def to_namedtuple(self, run_from="gpx"):
d = {
"id": self.run_id,
"name": (
f"run from {run_from} by {self.device}"
if self.device
else f"run from {run_from}"
), # maybe change later
"type": "Run", # Run for now only support run for now maybe change later
"name": (self.track_name if self.track_name else ""), # maybe change later
"type": self.type,
"subtype": (self.subtype if self.subtype else ""),
"start_date": self.start_time.strftime("%Y-%m-%d %H:%M:%S"),
"end": self.end_time.strftime("%Y-%m-%d %H:%M:%S"),
"start_date_local": self.start_time_local.strftime("%Y-%m-%d %H:%M:%S"),
Expand Down
25 changes: 18 additions & 7 deletions run_page/gpxtrackposter/track_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,33 @@
log = logging.getLogger(__name__)


def load_gpx_file(file_name):
def load_gpx_file(file_name, activity_title_dict={}):
"""Load an individual GPX file as a track by using Track.load_gpx()"""
t = Track()
t.load_gpx(file_name)
file_id = os.path.basename(file_name).split(".")[0]
if activity_title_dict:
t.track_name = activity_title_dict.get(file_id, t.track_name)
return t


def load_tcx_file(file_name):
def load_tcx_file(file_name, activity_title_dict={}):
"""Load an individual TCX file as a track by using Track.load_tcx()"""
t = Track()
t.load_tcx(file_name)
file_id = os.path.basename(file_name).split(".")[0]
if activity_title_dict:
t.track_name = activity_title_dict.get(file_id, t.track_name)
return t


def load_fit_file(file_name):
def load_fit_file(file_name, activity_title_dict={}):
"""Load an individual FIT file as a track by using Track.load_fit()"""
t = Track()
t.load_fit(file_name)
file_id = os.path.basename(file_name).split(".")[0]
if activity_title_dict:
t.track_name = activity_title_dict.get(file_id, t.track_name)
return t


Expand All @@ -66,15 +75,17 @@ def __init__(self):
"fit": load_fit_file,
}

def load_tracks(self, data_dir, file_suffix="gpx"):
def load_tracks(self, data_dir, file_suffix="gpx", activity_title_dict={}):
"""Load tracks data_dir and return as a List of tracks"""
file_names = [x for x in self._list_data_files(data_dir, file_suffix)]
print(f"{file_suffix.upper()} files: {len(file_names)}")

tracks = []

loaded_tracks = self._load_data_tracks(
file_names, self.load_func_dict.get(file_suffix, load_gpx_file)
file_names,
self.load_func_dict.get(file_suffix, load_gpx_file),
activity_title_dict,
)

tracks.extend(loaded_tracks.values())
Expand Down Expand Up @@ -146,14 +157,14 @@ def _merge_tracks(tracks):
return merged_tracks

@staticmethod
def _load_data_tracks(file_names, load_func=load_gpx_file):
def _load_data_tracks(file_names, load_func=load_gpx_file, activity_title_dict={}):
"""
TODO refactor with _load_tcx_tracks
"""
tracks = {}
with concurrent.futures.ProcessPoolExecutor() as executor:
future_to_file_name = {
executor.submit(load_func, file_name): file_name
executor.submit(load_func, file_name, activity_title_dict): file_name
for file_name in file_names
}
for future in concurrent.futures.as_completed(future_to_file_name):
Expand Down
1 change: 0 additions & 1 deletion run_page/keep_to_strava_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@


def run_keep_sync(email, password, keep_sports_data_api, with_download_gpx=False):

if not os.path.exists(KEEP2STRAVA_BK_PATH):
file = open(KEEP2STRAVA_BK_PATH, "w")
file.close()
Expand Down
8 changes: 6 additions & 2 deletions run_page/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,13 @@ def to_date(ts):
raise ValueError(f"cannot parse timestamp {ts} into date with fmts: {ts_fmts}")


def make_activities_file(sql_file, data_dir, json_file, file_suffix="gpx"):
def make_activities_file(
sql_file, data_dir, json_file, file_suffix="gpx", activity_title_dict={}
):
generator = Generator(sql_file)
generator.sync_from_data_dir(data_dir, file_suffix=file_suffix)
generator.sync_from_data_dir(
data_dir, file_suffix=file_suffix, activity_title_dict=activity_title_dict
)
activities_list = generator.load()
with open(json_file, "w") as f:
json.dump(activities_list, f)
Expand Down
Loading

0 comments on commit fb4290a

Please sign in to comment.