Skip to content

Commit

Permalink
Style: Black formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
Akhil-Sharma30 committed Oct 14, 2024
1 parent bdd2487 commit 83a5b0e
Show file tree
Hide file tree
Showing 8 changed files with 265 additions and 117 deletions.
220 changes: 151 additions & 69 deletions pystocktopus/GUI.py

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pystocktopus/__main__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .GUI import main

main()
main()
1 change: 1 addition & 0 deletions pystocktopus/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
pystocktopus.
"""

from __future__ import annotations

import os
Expand Down
16 changes: 12 additions & 4 deletions pystocktopus/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def ticker_data_collection(
timespan: str,
multiplier: int,
user_date: str,
days: int
days: int,
) -> list[float]:
"""Extracts stock data closing price from Polygon.io.
Expand Down Expand Up @@ -80,7 +80,11 @@ def ticker_data_collection(

# Store the close_list in the dictionary with ticker as the key
ticker_data[ticker] = close_list
logging.info("Collected %d data points for ticker: %s", len(close_list), ticker)
logging.info(
"Collected %d data points for ticker: %s",
len(close_list),
ticker,
)
else:
logging.warning("No results found for ticker: %s", ticker)

Expand Down Expand Up @@ -111,8 +115,12 @@ def CalculateDate(start_date_str: str, days_lag: int):
# Calculate the start_date by subtracting the days_lag from the end_date
start_date = end_date - datetime.timedelta(days=days_lag)

logging.info("Calculated start date: %s from end date: %s with a lag of %d days",
start_date.strftime("%Y-%m-%d"), start_date_str, days_lag)
logging.info(
"Calculated start date: %s from end date: %s with a lag of %d days",
start_date.strftime("%Y-%m-%d"),
start_date_str,
days_lag,
)

# Return the start_date as a string in the format "YYYY-MM-DD"
return start_date.strftime("%Y-%m-%d")
Expand Down
38 changes: 25 additions & 13 deletions pystocktopus/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,31 +8,40 @@

# Set up logging configuration
logging.basicConfig(
filename="Demo_file_logs.log",
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s"
filename="Demo_file_logs.log",
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
)


def main():
try:
data = StockExtractor.ticker_data_collection(ticker_values=ticker_values, timespan=timespan, multiplier=multiplier, user_date=user_date,days=days)
data = StockExtractor.ticker_data_collection(
ticker_values=ticker_values,
timespan=timespan,
multiplier=multiplier,
user_date=user_date,
days=days,
)
logging.info(f"Stock data collected for tickers: {ticker_values}")
except Exception as e:
logging.error(f"Error collecting stock data: {str(e)}")

try:
CSVDataHandler.close_list_csv(data)
logging.info("CSV data list successfully closed and saved.")

result = CSVDataHandler.combine_data_csv(data_values=ticker_values, close_list=data)

result = CSVDataHandler.combine_data_csv(
data_values=ticker_values, close_list=data
)
logging.info(f"CSV data combined successfully: {result}")
except Exception as e:
logging.error(f"Error handling CSV data: {str(e)}")

try:
df = pd.read_csv("Your-CSV-FilePath")
logging.info("CSV file loaded successfully.")

# Store the second column name in a variable
second_column_name = df.columns[1]
logging.info(f"Second column name: {second_column_name}")
Expand All @@ -43,7 +52,7 @@ def main():
prediction = ModelStockData.create_fit_train_rnn(
csv_file="GUI_GENERATED_DATA.csv",
epochs=400,
stock_closing_price_column_name="Your-column-name-for-analysis-and-training"
stock_closing_price_column_name="Your-column-name-for-analysis-and-training",
)
logging.info(f"Model training and prediction complete: {prediction}")
except Exception as e:
Expand All @@ -52,19 +61,22 @@ def main():
try:
result = News.new_data_extract(ticker_values=Article, predict_date=predict)
logging.info(f"News data extracted for analysis: {result}")

ans = News.news_predict_analysis(result)
logging.info(f"News analysis result: {ans}")
except Exception as e:
logging.error(f"Error in news analysis: {str(e)}")

try:
News.create_csv_with_predictions(csv_filename="stock_data.csv", analysis_results=ans)
News.create_csv_with_predictions(
csv_filename="stock_data.csv", analysis_results=ans
)
logging.info("CSV file created with predictions.")
except Exception as e:
logging.error(f"Error creating CSV with predictions: {str(e)}")

if __name__=="__main__":

if __name__ == "__main__":
config.api_key = "your-polygon-api-key"
config.news_api = "your-news-api-key"

Expand All @@ -74,7 +86,7 @@ def main():
multiplier = 2
user_date = "2023-10-01"
predict = "2024-10-01"
days=340
days = 340

# Calling the func
main(ticker_values,Article,timespan,multiplier,user_date,predict,days)
main(ticker_values, Article, timespan, multiplier, user_date, predict, days)
19 changes: 14 additions & 5 deletions pystocktopus/news_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
level=logging.INFO, # Log level
)


class News:
"""Class for handling news data."""

Expand Down Expand Up @@ -64,7 +65,9 @@ def _combine_news_article(all_articles):
@staticmethod
def new_data_extract(ticker_values, predict_date, days: int = 10):
"""Extracts news articles for a given list of tickers and date range."""
logging.info(f"Extracting news data for tickers: {ticker_values} from {predict_date} for {days} days.")
logging.info(
f"Extracting news data for tickers: {ticker_values} from {predict_date} for {days} days."
)
load_dotenv()

try:
Expand All @@ -85,11 +88,13 @@ def new_data_extract(ticker_values, predict_date, days: int = 10):
from_param=start_date,
to=end_date,
language="en",
sort_by="relevancy"
sort_by="relevancy",
)

# Log the number of articles fetched
logging.info(f"Fetched {len(all_articles.get('articles', []))} articles for {ticker}.")
logging.info(
f"Fetched {len(all_articles.get('articles', []))} articles for {ticker}."
)

# Store the result in the dictionary
results_dict[ticker] = all_articles
Expand Down Expand Up @@ -128,8 +133,12 @@ def news_predict_analysis(data: dict[str, str]) -> dict[str, str]:
positive += 1

# Store the analysis results in the dictionary
analysis_results[ticker] = "NEGATIVE" if negative >= positive else "POSITIVE"
logging.info(f"Sentiment for {ticker}: {'NEGATIVE' if negative >= positive else 'POSITIVE'}")
analysis_results[ticker] = (
"NEGATIVE" if negative >= positive else "POSITIVE"
)
logging.info(
f"Sentiment for {ticker}: {'NEGATIVE' if negative >= positive else 'POSITIVE'}"
)

return analysis_results

Expand Down
42 changes: 29 additions & 13 deletions pystocktopus/stock_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ def csv_data_reader(csv_file, csv_stock_column_name: str) -> list[str]:
data_values: list[str] = []

try:
logging.info(f"Attempting to read column '{csv_stock_column_name}' from CSV file: {csv_file}")
logging.info(
f"Attempting to read column '{csv_stock_column_name}' from CSV file: {csv_file}"
)
with open(csv_file) as file:
csv_reader = csv.reader(file)

Expand All @@ -51,15 +53,19 @@ def csv_data_reader(csv_file, csv_stock_column_name: str) -> list[str]:
# Check if the column was found
if data_column_index == -1:
raise ValueError(f"No {csv_stock_column_name} column found.")

logging.info(f"Column '{csv_stock_column_name}' found at index {data_column_index}.")

logging.info(
f"Column '{csv_stock_column_name}' found at index {data_column_index}."
)

# Read the data from the specified column
for row in csv_reader:
if data_column_index < len(row):
data_values.append(row[data_column_index])

logging.info(f"Successfully read {len(data_values)} rows from column '{csv_stock_column_name}'.")
logging.info(
f"Successfully read {len(data_values)} rows from column '{csv_stock_column_name}'."
)

except FileNotFoundError:
logging.error(f"File not found: {csv_file}")
Expand Down Expand Up @@ -88,7 +94,9 @@ def _getValue(closing_price: dict[float, float]):
last_value = value_list[-1]
last_values.append(last_value)

logging.info(f"Extracted {len(last_values)} values from closing price dictionary.")
logging.info(
f"Extracted {len(last_values)} values from closing price dictionary."
)
return last_values

@staticmethod
Expand All @@ -101,7 +109,7 @@ def combine_data_csv(
results: list[float] = []
for bought, closing_price in zip(data_values, data_extractor):
results.append(float(bought) * float(closing_price))

logging.info(f"Combined data into {len(results)} result values.")
return results

Expand All @@ -111,14 +119,18 @@ def update_csv(
) -> None:
"""Updates the CSV file with a new column for the calculated values."""
try:
logging.info(f"Updating CSV file: {csv_path} with new column '{new_column_name}'.")

logging.info(
f"Updating CSV file: {csv_path} with new column '{new_column_name}'."
)

df = pd.read_csv(csv_path)
df[new_column_name] = results

df.to_csv(csv_path, index=False)

logging.info(f"Successfully added '{new_column_name}' column with {len(results)} values to the CSV file.")
logging.info(
f"Successfully added '{new_column_name}' column with {len(results)} values to the CSV file."
)

except FileNotFoundError:
logging.error(f"CSV file not found: {csv_path}")
Expand All @@ -131,12 +143,12 @@ def update_csv(
def close_list_csv(
ticker_data: dict[str, list[float]],
closing_data_fieldname: list[str] | None = None,
csv_file_name="stock_data.csv"
csv_file_name="stock_data.csv",
) -> None:
"""Stores the closing list stock results in a CSV file."""
if closing_data_fieldname is None:
closing_data_fieldname = ["closing_stock_data"]

if not ticker_data:
logging.warning("Ticker data is empty. No CSV file will be created.")
return
Expand All @@ -145,7 +157,9 @@ def close_list_csv(
logging.info(f"Writing closing stock data to CSV file: {csv_file_name}")
with open(csv_file_name, "w", newline="") as csvfile:
fieldnames = ["Date"] + [
f"{field}_{ticker}" for ticker in ticker_data for field in closing_data_fieldname
f"{field}_{ticker}"
for ticker in ticker_data
for field in closing_data_fieldname
]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
Expand All @@ -158,7 +172,9 @@ def close_list_csv(
data_row[f"{field}_{ticker}"] = close_list[i]
writer.writerow(data_row)

logging.info(f"CSV file '{csv_file_name}' created successfully with {num_rows} rows.")
logging.info(
f"CSV file '{csv_file_name}' created successfully with {num_rows} rows."
)
file_path = os.path.abspath(csv_file_name)
logging.info(f"CSV file saved at: {file_path}")

Expand Down
Loading

0 comments on commit 83a5b0e

Please sign in to comment.