Skip to content

Morningstar Data #467

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions docs/source/readers/morningstar.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
Morningstar
-----------

.. py:module:: pandas_datareader.mstar.daily

.. autoclass:: MorningstarDailyReader
:members:
:inherited-members:

.. py:module:: pandas_datareader.mstar.financials

.. autoclass:: MorningstarDailyReader
:members:
:inherited-members:
2 changes: 2 additions & 0 deletions docs/source/whatsnew/v0.6.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ Enhancements
- A new data connector for data provided by the Bank of Canada was
introduced. (:issue:`440`)

- A new data connector for stock pricing data provided by Morningstar was introduced.

.. _whatsnew_060.api_breaking:

Backwards incompatible API changes
Expand Down
20 changes: 12 additions & 8 deletions pandas_datareader/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from ._version import get_versions

from .data import (get_components_yahoo, get_data_famafrench, get_data_google,
get_data_yahoo, get_data_enigma, get_data_yahoo_actions,
get_quote_google, get_quote_yahoo, get_tops_iex,
get_last_iex, get_markets_iex, get_summary_iex,
get_records_iex, get_recent_iex, get_iex_symbols,
get_iex_book, DataReader, Options)
from .data import (DataReader, Options, get_components_yahoo,
get_dailysummary_iex, get_data_enigma, get_data_famafrench,
get_data_fred, get_data_google, get_data_moex,
get_data_morningstar, get_data_quandl, get_data_stooq,
get_data_yahoo, get_data_yahoo_actions, get_iex_book,
get_iex_symbols, get_last_iex, get_markets_iex,
get_nasdaq_symbols,
get_quote_google, get_quote_yahoo, get_recent_iex,
get_records_iex, get_summary_iex, get_tops_iex)

__version__ = get_versions()['version']
del get_versions
Expand All @@ -16,4 +18,6 @@
'get_iex_book', 'get_iex_symbols', 'get_last_iex',
'get_markets_iex', 'get_recent_iex', 'get_records_iex',
'get_summary_iex', 'get_tops_iex',
'DataReader', 'Options']
'get_nasdaq_symbols', 'get_nasdaq_symbols', 'get_data_quandl', 'get_data_moex',
'get_data_fred', 'get_dailysummary_iex', 'get_data_morningstar',
'get_data_stooq','DataReader', 'Options']
19 changes: 15 additions & 4 deletions pandas_datareader/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,19 @@
from pandas_datareader.edgar import EdgarIndexReader
from pandas_datareader.enigma import EnigmaReader
from pandas_datareader.eurostat import EurostatReader
from pandas_datareader.exceptions import ImmediateDeprecationError, \
DEP_ERROR_MSG
from pandas_datareader.exceptions import DEP_ERROR_MSG, \
ImmediateDeprecationError
from pandas_datareader.famafrench import FamaFrenchReader
from pandas_datareader.fred import FredReader
from pandas_datareader.google.daily import GoogleDailyReader
from pandas_datareader.google.options import Options as GoogleOptions
from pandas_datareader.google.quotes import GoogleQuotesReader
from pandas_datareader.iex.daily import IEXDailyReader
from pandas_datareader.iex.deep import Deep as IEXDeep
from pandas_datareader.iex.tops import LastReader as IEXLasts
from pandas_datareader.iex.tops import TopsReader as IEXTops
from pandas_datareader.iex.tops import LastReader as IEXLasts, \
TopsReader as IEXTops
from pandas_datareader.moex import MoexReader
from pandas_datareader.mstar.daily import MorningstarDailyReader
from pandas_datareader.nasdaq_trader import get_nasdaq_symbols
from pandas_datareader.oecd import OECDReader
from pandas_datareader.quandl import QuandlReader
Expand All @@ -38,6 +39,7 @@
'get_tops_iex', 'get_summary_iex', 'get_records_iex',
'get_recent_iex', 'get_markets_iex', 'get_last_iex',
'get_iex_symbols', 'get_iex_book', 'get_dailysummary_iex',
'get_data_morningstar', 'get_data_stooq',
'get_data_stooq', 'DataReader']


Expand Down Expand Up @@ -97,6 +99,10 @@ def get_last_iex(*args, **kwargs):
return IEXLasts(*args, **kwargs).read()


def get_data_morningstar(*args, **kwargs):
return MorningstarDailyReader(*args, **kwargs).read()


def get_markets_iex(*args, **kwargs):
"""
Returns near-real time volume data across markets segregated by tape
Expand Down Expand Up @@ -359,6 +365,11 @@ def DataReader(name, data_source=None, start=None, end=None,
return MoexReader(symbols=name, start=start, end=end,
retry_count=retry_count, pause=pause,
session=session).read()
elif data_source == "morningstar":
return MorningstarDailyReader(symbols=name, start=start, end=end,
retry_count=retry_count, pause=pause,
session=session, interval="d").read()

else:
msg = "data_source=%r is not implemented" % data_source
raise NotImplementedError(msg)
Expand Down
4 changes: 4 additions & 0 deletions pandas_datareader/mstar/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""
written and developed by Daniel Temkin
please refer to LICENSE for ownership and reference information
"""
193 changes: 193 additions & 0 deletions pandas_datareader/mstar/daily.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,193 @@
import time
from datetime import datetime, timedelta
from warnings import warn

import requests
from pandas import DataFrame

from pandas_datareader._utils import (SymbolWarning, _sanitize_dates)


class MorningstarDailyReader(object):

def __init__(self, start=None, end=None, *args, **kwargs):
if end is None:
end = datetime.today().strftime("%Y-%m-%d")

self.start, self.end = _sanitize_dates(start, end)

self.retry_count = kwargs.get("retry_count", 3)
self.pause = kwargs.get("pause", 0.001)
self.timeout = kwargs.get("timeout", 30)
self.session = kwargs.get("session", requests.session())

self.incl_splits = kwargs.get("incl_splits", False)
self.incl_dividends = kwargs.get("incl_dividends", False)
self.incl_vol = kwargs.get("incl_volume", True)
self.currency = kwargs.get("currency", "usd")
self.interval = kwargs.get("interval", "d")

self.symbols = kwargs.get("symbols")

self._symbol_data_cache = []

def _url_params(self):
if self.interval not in ['d', 'wk', 'mo', 'm', 'w']:
raise ValueError("Invalid interval: valid values are 'd', 'wk' "
"and 'mo'. 'm' and 'w' have been implemented for "
"backward compatibility")
elif self.interval in ['m', 'mo']:
self.interval = 'm'
elif self.interval in ['w', 'wk']:
self.interval = 'w'

if self.currency != "usd":
warn("Caution! There is no explicit check for a valid currency "
"acronym\nIf an error is encountered consider changing this "
"value.")

p = {"range": "|".join(
[self.start.strftime("%Y-%m-%d"), self.end.strftime("%Y-%m-%d")]),
"f": self.interval, "curry": self.currency,
"dtype": "his", "showVol": "true",
"hasF": "true", "isD": "true", "isS": "true",
"ProdCode": "DIRECT"}

return p

def _check_dates(self, *dates):
if dates[0] > dates[1]:
raise ValueError("Invalid start & end date! Start date cannot "
"be later than end date.")
else:
return dates[0], dates[1]

def _dl_mult_symbols(self, symbols):
failed = []
symbol_data = []
for symbol in symbols:

params = self._url_params()
params.update({"ticker": symbol})
_baseurl = "http://globalquote.morningstar.com/globalcomponent/" \
"RealtimeHistoricalStockData.ashx"

try:
resp = requests.get(_baseurl, params=params)
except Exception:
if symbol not in failed:
if self.retry_count == 0:
warn("skipping symbol %s: number of retries "
"exceeded." % symbol)
pass
else:
print("adding %s to retry list" % symbol)
failed.append(symbol)
else:
if resp.status_code == requests.codes.ok:
jsdata = self._restruct_json(symbol=symbol,
jsondata=resp.json())
symbol_data.extend(jsdata)
else:
raise Exception("Request Error!: %s : %s" % (
resp.status_code, resp.reason))

time.sleep(self.pause)

if len(failed) > 0 and self.retry_count > 0:
self._dl_mult_symbols(symbols=failed)
self.retry_count -= 1
else:
self.retry_count = 0

if self.retry_count == 0 and len(failed) > 0:
warn("The following symbols were excluded do to http "
"request errors: \n %s" % failed, SymbolWarning)

symbols_df = DataFrame(data=symbol_data)
dfx = symbols_df.set_index(["Symbol", "Date"])
return dfx

@staticmethod
def _convert_index2date(enddate, indexvals):
i = 0
while i < len(indexvals):
days = indexvals[len(indexvals) - 1] - indexvals[i]
d = enddate - timedelta(days=days)
i += 1
yield d.strftime("%Y-%m-%d")

#
# def _adjust_close_price(price, event_type, event_value): #noqa
# if event_type is "split":
# e, s = event_value.split(":")
# adj=(price * int(s))/e
# elif event_type is "dividend":
# adj = price - float(event_value)
# else:
# raise ValueError("Invalid event_type")
# return adj

def _restruct_json(self, symbol, jsondata):

divdata = jsondata["DividendData"]

pricedata = jsondata["PriceDataList"][0]["Datapoints"]
dateidx = jsondata["PriceDataList"][0]["DateIndexs"]
volumes = jsondata["VolumeList"]["Datapoints"]

date_ = self._convert_index2date(enddate=self.end, indexvals=dateidx)
barss = []
for p in range(len(pricedata)):
bar = pricedata[p]
d = next(date_)
bardict = {
"Symbol": symbol, "Date": d, "Open": bar[0], "High": bar[1],
"Low": bar[2],
"Close": bar[3]
}
if len(divdata) == 0:
pass
else:
events = []
for x in divdata:
delta = (datetime.strptime(x["Date"], "%Y-%m-%d") -
datetime.strptime(d, "%Y-%m-%d"))
if delta.days == 0:
events.append(x)
for e in events:
if (self.incl_dividends is True and
e["Type"].find("Div") > -1):
val = e["Desc"].replace(e["Type"], "")
bardict.update({"isDividend": val})
elif (self.incl_splits is True and
e["Type"].find("Split") > -1):
val = e["Desc"].replace(e["Type"], "")
bardict.update({"isSplit": val})
else:
pass
if self.incl_vol is True:
bardict.update({"Volume": int(volumes[p] * 1000000)})
else:
pass

barss.append(bardict)
return barss

def read(self):
if type(self.symbols) is str:
df = self._dl_mult_symbols(symbols=[self.symbols])
if len(df.Close.keys()) == 0:
raise IndexError("None of the provided symbols were valid")
else:
return df
elif hasattr(self.symbols, "__iter__"):
df = self._dl_mult_symbols(symbols=self.symbols)
if len(df.Close.keys()) == 0:
raise IndexError("None of the provided symbols were valid")
else:
return df
else:
raise TypeError(
"symbols must be iterable or string and not type %s" %
type(self.symbols))
Empty file.
Loading