Skip to content

Commit

Permalink
Fix #296 & pdr-web#284: get_predictoor_info.py refactor & lite module…
Browse files Browse the repository at this point in the history
… for serving pdr metrics (#300)

* pdr-web-issue-284 part 1

* Threading in every 5 mins and serving with the endpoint

* issue-296

* reformat issues

* reformat issue

* mypy issues

* Pylint issues

* inconsistent return type issue

* Black and pylint issue

* function naming and routename issues

* calculation based on slot data

* black lint issue

* debug prints are deleted

* Issue296: Type definition fixes, function descriptions are added

* black and pylint fixes

* Issue296: Tests are added

* issue296: missing enforce_types are added
  • Loading branch information
kdetry authored Nov 9, 2023
1 parent c37a76d commit 95131b1
Show file tree
Hide file tree
Showing 16 changed files with 1,479 additions and 257 deletions.
5 changes: 3 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ cython_debug/
# predictoor dynamic modeling
out*.txt
csvs/

# pdr_backend accuracy output
pdr_backend/accuracy/output/*.json
# pm2 configs
scripts/pm2-config/*private*
scripts/pm2-config/*private*
3 changes: 3 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,6 @@ ignore_missing_imports = True

[mypy-pdr_backend.predictoor.examples.*]
ignore_missing_imports = True

[mypy-flask.*]
ignore_missing_imports = True
153 changes: 153 additions & 0 deletions pdr_backend/accuracy/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
import threading
import json
from datetime import datetime, timedelta
from typing import Tuple
from enforce_typing import enforce_types
from flask import Flask, jsonify

from pdr_backend.util.subgraph_predictions import get_all_contract_ids_by_owner
from pdr_backend.util.subgraph_slot import calculate_statistics_for_all_assets
from pdr_backend.util.subgraph_predictions import fetch_contract_id_and_spe

app = Flask(__name__)
JSON_FILE_PATH = "pdr_backend/accuracy/output/accuracy_data.json"


@enforce_types
def calculate_timeframe_timestamps(contract_timeframe: str) -> Tuple[int, int]:
"""
Calculates and returns a tuple of Unix timestamps for a start and end time
based on a given contract timeframe. The start time is determined to be either
2 weeks or 4 weeks in the past, depending on whether the contract timeframe is
5 minutes or 1 hour, respectively. The end time is the current timestamp.
Args:
contract_timeframe (str): The contract timeframe, '5m' for 5 minutes or
other string values for different timeframes.
Returns:
Tuple[int, int]: A tuple containing the start and end Unix timestamps.
"""

end_ts = int(datetime.utcnow().timestamp())
time_delta = (
timedelta(weeks=2)
if contract_timeframe == "5m"
else timedelta(weeks=4)
# timedelta(days=1)
# if contract_timeframe == "5m"
# else timedelta(days=1)
)
start_ts = int((datetime.utcnow() - time_delta).timestamp())

return start_ts, end_ts


@enforce_types
def save_statistics_to_file():
"""
Periodically fetches and saves statistical data to a JSON file.
This function runs an infinite loop that every 5 minutes triggers
data fetching for contract statistics. It uses prefetched contract
addresses and timeframes to gather statistics and save them to a file
in JSON format.
If the process encounters an exception, it prints an error message and
continues after the next interval.
The data includes statistics for each contract based on the 'seconds per epoch'
value defined for each statistic type.
"""

network_param = "mainnet" # or 'testnet' depending on your preference

statistic_types = [
{
"alias": "5m",
"seconds_per_epoch": 300,
},
{
"alias": "1h",
"seconds_per_epoch": 3600,
},
]

contract_addresses = get_all_contract_ids_by_owner(
"0x4ac2e51f9b1b0ca9e000dfe6032b24639b172703", network_param
)

contract_information = fetch_contract_id_and_spe(contract_addresses, network_param)

while True:
try:
output = []

for statistic_type in statistic_types:
seconds_per_epoch = statistic_type["seconds_per_epoch"]
contracts = list(
filter(
lambda item, spe=seconds_per_epoch: int(
item["seconds_per_epoch"]
)
== spe,
contract_information,
)
)

start_ts_param, end_ts_param = calculate_timeframe_timestamps(
statistic_type["alias"]
)

contract_ids = [contract["id"] for contract in contracts]

# Get statistics for all contracts
statistics = calculate_statistics_for_all_assets(
contract_ids, start_ts_param, end_ts_param, network_param
)

output.append(
{
"alias": statistic_type["alias"],
"statistics": statistics,
}
)

with open(JSON_FILE_PATH, "w") as f:
json.dump(output, f)

print("Data saved to JSON")
except Exception as e:
print("Error:", e)

threading.Event().wait(300) # Wait for 5 minutes (300 seconds)


@enforce_types
@app.route("/statistics", methods=["GET"])
def serve_statistics_from_file():
"""
Serves statistical data from a JSON file via a GET request.
When a GET request is made to the '/statistics' route,
this function reads the statistical data from the JSON file
and returns it as a JSON response.
If the file cannot be read or another error occurs, it returns a 500 Internal Server Error.
"""

try:
with open(JSON_FILE_PATH, "r") as f:
data = json.load(f)
return jsonify(data)
except Exception as e:
# abort(500, description=str(e))
return jsonify({"error": "Internal Server Error", "message": str(e)}), 500


if __name__ == "__main__":
# Start the thread to save predictions data to a file every 5 minutes
thread = threading.Thread(target=save_statistics_to_file)
thread.start()

app.run(debug=True)
Empty file.
23 changes: 23 additions & 0 deletions pdr_backend/models/prediction.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
class Prediction:
# pylint: disable=too-many-instance-attributes
def __init__(
self,
pair,
timeframe,
prediction,
stake,
trueval,
timestamp,
source,
payout,
user,
) -> None:
self.pair = pair
self.timeframe = timeframe
self.prediction = prediction
self.stake = stake
self.trueval = trueval
self.timestamp = timestamp
self.source = source
self.payout = payout
self.user = user
7 changes: 7 additions & 0 deletions pdr_backend/simulation/test/test_timeutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
dt_to_ut,
ut_to_dt,
timestr_to_ut,
ms_to_seconds,
)


Expand Down Expand Up @@ -53,3 +54,9 @@ def test_dt_to_ut_and_back():

dt2 = ut_to_dt(ut)
assert dt2 == dt


@enforce_types
def test_ms_to_seconds():
seconds = ms_to_seconds(1648576500000)
assert seconds == 1648576500
6 changes: 6 additions & 0 deletions pdr_backend/simulation/timeutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,9 @@ def ut_to_dt(ut: int) -> datetime.datetime:
assert ut2 == ut, (ut, ut2)

return dt


@enforce_types
def ms_to_seconds(ms: int) -> int:
"""Convert milliseconds to seconds"""
return int(ms / 1000)
36 changes: 36 additions & 0 deletions pdr_backend/util/csvs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import os
import csv


def write_prediction_csv(all_predictions, csv_output_dir):
if not os.path.exists(csv_output_dir):
os.makedirs(csv_output_dir)

data = {}
for prediction in all_predictions:
key = (
prediction.pair.replace("/", "-") + prediction.timeframe + prediction.source
)
if key not in data:
data[key] = []
data[key].append(prediction)

for key, predictions in data.items():
predictions.sort(key=lambda x: x.timestamp)
filename = os.path.join(csv_output_dir, key + ".csv")
with open(filename, "w", newline="") as file:
writer = csv.writer(file)
writer.writerow(
["Predicted Value", "True Value", "Timestamp", "Stake", "Payout"]
)
for prediction in predictions:
writer.writerow(
[
prediction.prediction,
prediction.trueval,
prediction.timestamp,
prediction.stake,
prediction.payout,
]
)
print(f"CSV file '{filename}' created successfully.")
20 changes: 20 additions & 0 deletions pdr_backend/util/networkutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,23 @@ def send_encrypted_tx(
gasCost,
nonce,
)


@enforce_types
def get_subgraph_url(network: str) -> str:
"""
Returns the subgraph URL for the given network.
Args:
network (str): The network name ("mainnet" or "testnet").
Returns:
str: The subgraph URL for the specified network.
"""
if network not in ["mainnet", "testnet"]:
raise ValueError(
"Invalid network. Acceptable values are 'mainnet' or 'testnet'."
)

# pylint: disable=line-too-long
return f"https://v4.subgraph.sapphire-{network}.oceanprotocol.com/subgraphs/name/oceanprotocol/ocean-subgraph"
Loading

0 comments on commit 95131b1

Please sign in to comment.