Skip to content

Commit

Permalink
Merge pull request #13 from mmann1123/5minute
Browse files Browse the repository at this point in the history
1 minute refresh timer
  • Loading branch information
mmann1123 authored May 30, 2024
2 parents c1827d1 + a314d55 commit b806eca
Showing 1 changed file with 116 additions and 103 deletions.
219 changes: 116 additions & 103 deletions osm_leaderboard/dash_app.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
# %%

import base64
import io
import dash
from dash import dcc, html, dash_table
from dash.dependencies import Input, Output, State
import yaml
import requests
from multiprocessing import Pool, freeze_support
import pandas as pd
import geopandas as gpd
from multiprocessing import Pool, freeze_support
import webbrowser
from threading import Timer
import os
import signal
from flask import Flask
import platform
import socket
Expand Down Expand Up @@ -41,7 +40,7 @@
dcc.Upload(
id="upload-data",
children=html.Div(
["Drag and Drop or ", html.A("Select \n config.yaml file")]
["Drag and Drop or ", html.A("Select a config.yaml file")]
),
style={
"width": "100%",
Expand All @@ -64,6 +63,50 @@
dash_table.DataTable(id="table"),
],
),
dcc.Interval(
id="interval-component",
interval=60 * 1000, # 30 seconds
n_intervals=0,
max_intervals=60,
),
dcc.Store(
id="stored-data"
), # Hidden storage for persisting data across callbacks
html.Div(
[
html.A(
html.Img(
src="https://github.com/mmann1123/OSM_LeaderBoard/blob/main/video/gw.png?raw=true",
style={"height": "70px", "margin-right": "10px"},
),
href="https://geography.columbian.gwu.edu/",
),
html.A(
html.Img(
src="https://github.com/mmann1123/OSM_LeaderBoard/blob/main/video/pygis.png?raw=true",
style={"height": "70px", "margin-right": "10px"},
),
href="https://pygis.io",
),
html.A(
html.Img(
src="https://github.com/mmann1123/OSM_LeaderBoard/blob/main/video/youthmappers.webp?raw=true",
style={"height": "70px"},
),
href="https://www.youthmappers.org/",
),
html.Br(),
html.Br(),
html.Br(),
html.A(
html.Img(
src="https://zenodo.org/badge/DOI/10.5281/zenodo.11387666.svg"
),
href="https://doi.org/10.5281/zenodo.11387666",
),
],
style={"textAlign": "center", "padding": "20px"},
),
]
)

Expand All @@ -75,10 +118,6 @@ def convert_to_iso8601(date_str):
return None


# Logging application start
logging.info("Starting application setup.")


# Function to fetch node count for a username
def fetch_node_count(username, newer_date, bbox):
logging.debug(
Expand Down Expand Up @@ -115,118 +154,93 @@ def fetch_node_count(username, newer_date, bbox):
return username, "N/A"


# Define a callback to handle the uploaded file
# Callback to handle the file upload and initialize data
@app.callback(
[
Output("output-data-upload", "children"),
Output("map", "srcDoc"),
Output("table", "columns"),
Output("table", "data"),
],
Output("stored-data", "data"),
Input("upload-data", "contents"),
State("upload-data", "filename"),
)
def update_output(content, name):
logging.info("Received file upload.")

if content is not None:
logging.info(f"File name: {name}")
content_type, content_string = content.split(",")
def handle_upload(contents, filename):
if contents:
content_type, content_string = contents.split(",")
decoded = base64.b64decode(content_string)
try:
if "yaml" in name:
# Assume that the user uploaded a yaml file
if "yaml" in filename:
config = yaml.safe_load(io.StringIO(decoded.decode("utf-8")))
# Use the config data to update your app
bbox = config["bbox"]
usernames = config["usernames"]
newer_date = config.get("newer_than_date")
newer_date = convert_to_iso8601(newer_date)

with Pool(processes=len(usernames)) as pool:
results = pool.starmap(
fetch_node_count,
[(username, newer_date, bbox) for username in usernames],
)

# Initialize a dictionary to store the count of nodes added by each user
user_node_counts = dict(results)

# convert Node_count to integers using dictionary comprehension
user_node_counts = {k: int(v) for k, v in user_node_counts.items()}

out = pd.DataFrame(
user_node_counts.items(), columns=["Username", "Node_Count"]
)
# sort in descending order
df = out.sort_values(by="Node_Count", ascending=False)

# Create a simple GeoDataFrame with a bounding box
min_lat, min_lon, max_lat, max_lon = map(float, bbox.split(","))
bbox_gpd = gpd.GeoDataFrame.from_features(
[
{
"type": "Feature",
"properties": {},
"geometry": {
"type": "Polygon",
"coordinates": [
[
[min_lon, min_lat],
[min_lon, max_lat],
[max_lon, max_lat],
[max_lon, min_lat],
[min_lon, min_lat],
]
],
},
}
],
crs="EPSG:4326",
)

map_obj = bbox_gpd.explore(
style_kwds={"fillColor": "blue", "color": "black"}
)

map_src = map_obj.get_root().render()
table_columns = [{"name": i, "id": i} for i in df.columns]
table_data = df.to_dict("records")

return (
# 'File "{}" successfully uploaded.'.format(name),
"",
map_src,
table_columns,
table_data,
)
return config # Store the entire configuration
except Exception as e:
print(e)
return (
"There was an error processing this file.",
dash.no_update,
dash.no_update,
dash.no_update,
logging.error("Failed to process uploaded file:", exc_info=True)
return None
return None


# Callback to update data based on the interval
@app.callback(
[
Output("output-data-upload", "children"),
Output("map", "srcDoc"),
Output("table", "columns"),
Output("table", "data"),
],
[Input("interval-component", "n_intervals"), Input("stored-data", "data")],
)
def update_data(n_intervals, stored_data):
if stored_data:
bbox = stored_data["bbox"]
usernames = stored_data["usernames"]
newer_date = convert_to_iso8601(stored_data.get("newer_than_date"))

with Pool(processes=len(usernames)) as pool:
results = pool.starmap(
fetch_node_count,
[(username, newer_date, bbox) for username in usernames],
)
else:
logging.error("No content uploaded.")
return None, dash.no_update, dash.no_update, dash.no_update

user_node_counts = {username: count for username, count in results}
df = pd.DataFrame(user_node_counts.items(), columns=["Username", "Node_Count"])
df.sort_values(by="Node_Count", ascending=False, inplace=True)

# Generate a simple GeoDataFrame with a bounding box
min_lat, min_lon, max_lat, max_lon = map(float, bbox.split(","))
bbox_gpd = gpd.GeoDataFrame.from_features(
[
{
"type": "Feature",
"properties": {},
"geometry": {
"type": "Polygon",
"coordinates": [
[
[min_lon, min_lat],
[min_lon, max_lat],
[max_lon, max_lat],
[max_lon, min_lat],
[min_lon, min_lat],
]
],
},
}
],
crs="EPSG:4326",
)

# Define a function to stop the server
def stop_server():
if platform.system() == "Windows":
os.kill(os.getpid(), signal.SIGTERM)
else:
os.kill(os.getpid(), signal.SIGINT)
map_obj = bbox_gpd.explore(style_kwds={"fillColor": "blue", "color": "black"})
map_src = map_obj.get_root().render()

return (
f"Remaining updates: {60-n_intervals}",
map_src,
[{"name": i, "id": i} for i in df.columns],
df.to_dict("records"),
)
return dash.no_update, dash.no_update, dash.no_update, dash.no_update


def open_browser(port):
webbrowser.open_new(f"http://127.0.0.1:{port}/")


def main():
print('This app brought to you by https://pygis.io')
logging.info("Executing main block.")
# Ensure compatibility with Windows exe for threading
if platform.system() == "Windows":
Expand All @@ -252,7 +266,6 @@ def main():

if __name__ == "__main__":
main()

# %%
# build the app with pyinstaller
# pyinstaller --onefile --name leaderboard_linux dash_app.py
Expand Down

0 comments on commit b806eca

Please sign in to comment.