Skip to content

Commit

Permalink
Adding API key parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
saeedamen committed Aug 6, 2019
1 parent 83005fc commit 30d4e7c
Show file tree
Hide file tree
Showing 3 changed files with 86 additions and 38 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,8 @@ In findatapy/examples you will find several demos

# Coding log

* 06 Aug 2019
* Adding parameters to MarketDataRequest for user specified API keys (Quandl, FRED & Alphavantage)
* 23 Jul 2019
* Changed some rolling calculations in Calculation class to work with newer pandas
* 12 Jul 2019
Expand Down
8 changes: 4 additions & 4 deletions findatapy/market/datavendorweb.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def download_daily(self, market_data_request):

while(trials < 5):
try:
data_frame = Quandl.get(market_data_request.tickers, authtoken=DataConstants().quandl_api_key, trim_start=market_data_request.start_date,
data_frame = Quandl.get(market_data_request.tickers, authtoken=market_data_request.quandl_api_key, trim_start=market_data_request.start_date,
trim_end=market_data_request.finish_date)

break
Expand Down Expand Up @@ -204,7 +204,7 @@ def download_daily(self, market_data_request):
for i in range(0, len(market_data_request.tickers)):
while (trials < 5):
try:
fred = Fred(api_key=DataConstants().fred_api_key)
fred = Fred(api_key=market_data_request.fred_api_key)

# acceptable fields: close, actual-release, release-date-time-full
if 'close' in market_data_request.fields and 'release-date-time-full' in market_data_request.fields:
Expand Down Expand Up @@ -1240,7 +1240,7 @@ def download_tick(self, market_data_request):
time_list = self.hour_range(market_data_request.start_date, market_data_request.finish_date)

do_retrieve_df = True # convert inside loop?
multi_threaded = False # multithreading (can sometimes get errors but it's fine when retried)
multi_threaded = True # multithreading (can sometimes get errors but it's fine when retried)

if multi_threaded:
# use threading (not process interface)
Expand Down Expand Up @@ -2219,7 +2219,7 @@ def load_ticker(self, market_data_request):
def download(self, market_data_request):
trials = 0

ts = TimeSeries(key=DataConstants().alpha_vantage_api_key, output_format='pandas', indexing_type='date')
ts = TimeSeries(key=market_data_request.alpha_vantage_api_key, output_format='pandas', indexing_type='date')

data_frame = None

Expand Down
114 changes: 80 additions & 34 deletions findatapy/market/marketdatarequest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,17 @@
# See the License for the specific language governing permissions and limitations under the License.
#

from findatapy.util.dataconstants import DataConstants
from findatapy.util.loggermanager import LoggerManager

from datetime import timedelta
import datetime

import copy

data_constants = DataConstants()


class MarketDataRequest(object):
"""Provides parameters for requesting market data.
Expand Down Expand Up @@ -56,22 +60,27 @@ def generate_key(self):
"""
from findatapy.market.ioengine import SpeedCache

if self.freq == 'daily': ticker = None
else: ticker = self.tickers[0]
if self.freq == 'daily':
ticker = None
else:
ticker = self.tickers[0]

self.__category_key = self.create_category_key(self, ticker=ticker)

return SpeedCache().generate_key(self, ['logger', '_MarketDataRequest__abstract_curve', '_MarketDataRequest__cache_algo',
return SpeedCache().generate_key(self, ['logger', '_MarketDataRequest__abstract_curve',
'_MarketDataRequest__cache_algo',
'_MarketDataRequest__overrides'])

def __init__(self, data_source = None,
start_date ='year', finish_date = datetime.datetime.utcnow(),
tickers = None, category = None, freq_mult = 1, freq = "daily",
gran_freq = None, cut = "NYC",
fields = ['close'], cache_algo = "internet_load_return",
vendor_tickers = None, vendor_fields = None,
environment = "backtest", trade_side = 'trade', expiry_date = None, resample = None, resample_how = 'last',
md_request = None, abstract_curve = None, overrides = {}
def __init__(self, data_source=None,
start_date='year', finish_date=datetime.datetime.utcnow(),
tickers=None, category=None, freq_mult=1, freq="daily",
gran_freq=None, cut="NYC",
fields=['close'], cache_algo="internet_load_return",
vendor_tickers=None, vendor_fields=None,
environment="backtest", trade_side='trade', expiry_date=None, resample=None, resample_how='last',
md_request=None, abstract_curve=None, quandl_api_key=data_constants.quandl_api_key,
fred_api_key=data_constants.fred_api_key, alpha_vantage_api_key=data_constants.alpha_vantage_api_key,
overrides={}
):

# can deep copy MarketDataRequest (use a lock, so can be used with threading when downloading time series)
Expand All @@ -96,20 +105,25 @@ def __init__(self, data_source = None,

self.category = copy.deepcopy(md_request.category) # special predefined categories

self.cut = copy.deepcopy(md_request.cut) # closing time of the data (eg. NYC, LDN, TOK etc)
self.fields = copy.deepcopy(md_request.fields) # fields, eg. close, high, low, open
self.cache_algo = copy.deepcopy(md_request.cache_algo) # internet_load_return (cache_algo_return is for future use)
self.cut = copy.deepcopy(md_request.cut) # closing time of the data (eg. NYC, LDN, TOK etc)
self.fields = copy.deepcopy(md_request.fields) # fields, eg. close, high, low, open
self.cache_algo = copy.deepcopy(
md_request.cache_algo) # internet_load_return (cache_algo_return is for future use)
self.vendor_tickers = copy.deepcopy(md_request.vendor_tickers) # define vendor tickers
self.vendor_fields = copy.deepcopy(md_request.vendor_fields) # define vendor fields
self.environment = copy.deepcopy(md_request.environment) # backtest environment only supported at present
self.vendor_fields = copy.deepcopy(md_request.vendor_fields) # define vendor fields
self.environment = copy.deepcopy(
md_request.environment) # backtest environment only supported at present
self.trade_side = copy.deepcopy(md_request.trade_side)
self.expiry_date = copy.deepcopy(md_request.expiry_date)
self.resample = copy.deepcopy(md_request.resample)
self.resample_how = copy.deepcopy(md_request.resample_how)
self.abstract_curve = copy.deepcopy(md_request.abstract_curve)
self.quandl_api_key = copy.deepcopy(md_request.quandl_api_key)
self.fred_api_key = copy.deepcopy(md_request.fred_api_key)
self.alpha_vantage_api_key = copy.deepcopy(md_request.alpha_vantage_api_key)
self.overrides = copy.deepcopy(md_request.overrides)

self.tickers = copy.deepcopy(md_request.tickers) # need this after category in case have wildcard
self.tickers = copy.deepcopy(md_request.tickers) # need this after category in case have wildcard
else:
self.freq_mult = freq_mult

Expand All @@ -122,20 +136,24 @@ def __init__(self, data_source = None,
self.data_source = data_source
self.start_date = start_date
self.finish_date = finish_date
self.category = category # special predefined categories

self.cut = cut # closing time of the data (eg. NYC, LDN, TOK etc)
self.fields = fields # fields, eg. close, high, low, open
self.cache_algo = cache_algo # internet_load_return (cache_algo_return is for future use)
self.vendor_tickers = vendor_tickers # define vendor tickers
self.vendor_fields = vendor_fields # define vendor fields
self.environment = environment # backtest environment only supported at present
self.category = category # special predefined categories

self.cut = cut # closing time of the data (eg. NYC, LDN, TOK etc)
self.fields = fields # fields, eg. close, high, low, open
self.cache_algo = cache_algo # internet_load_return (cache_algo_return is for future use)
self.vendor_tickers = vendor_tickers # define vendor tickers
self.vendor_fields = vendor_fields # define vendor fields
self.environment = environment # backtest environment only supported at present
self.trade_side = trade_side
self.expiry_date = expiry_date
self.resample = resample
self.resample_how = resample_how
self.abstract_curve = abstract_curve

self.quandl_api_key = quandl_api_key
self.fred_api_key = fred_api_key
self.alpha_vantage_api_key = alpha_vantage_api_key

self.overrides = overrides

self.tickers = tickers
Expand Down Expand Up @@ -184,7 +202,8 @@ def data_source(self, data_source):

if not data_source in valid_data_source:
LoggerManager().getLogger(__name__).warning(data_source & " is not a defined data source.")
except: pass
except:
pass

self.__data_source = data_source

Expand Down Expand Up @@ -218,7 +237,7 @@ def tickers(self, tickers):
if tick[-1] == "*" and tick[0] != "*":
start = "^"

tick = start + "(" + tick.replace('*','') + ")"
tick = start + "(" + tick.replace('*', '') + ")"

if config is None:
from findatapy.util import ConfigManager
Expand Down Expand Up @@ -287,7 +306,8 @@ def freq(self):
def freq(self, freq):
freq = freq.lower()

valid_freq = ['tick', 'second', 'minute', 'intraday', 'hourly', 'daily', 'weekly', 'monthly', 'quarterly', 'annually']
valid_freq = ['tick', 'second', 'minute', 'intraday', 'hourly', 'daily', 'weekly', 'monthly', 'quarterly',
'annually']

if not freq in valid_freq:
LoggerManager().getLogger(__name__).warning(freq + " is not a defined frequency")
Expand All @@ -303,7 +323,8 @@ def gran_freq(self, gran_freq):
try:
gran_freq = gran_freq.lower()

valid_gran_freq = ['tick', 'second', 'minute', 'hourly', 'pseudodaily', 'daily', 'weekly', 'monthly', 'quarterly', 'annually']
valid_gran_freq = ['tick', 'second', 'minute', 'hourly', 'pseudodaily', 'daily', 'weekly', 'monthly',
'quarterly', 'annually']

if not gran_freq in valid_gran_freq:
LoggerManager().getLogger(__name__).warning(gran_freq & " is not a defined frequency")
Expand All @@ -314,7 +335,8 @@ def gran_freq(self, gran_freq):
self.__freq = 'tick'
else:
self.__freq = 'daily'
except: pass
except:
pass

self.__gran_freq = gran_freq

Expand Down Expand Up @@ -417,6 +439,7 @@ def date_parser(self, date):
date1 = pandas.Timestamp(date)

return date1

@property
def cache_algo(self):
return self.__cache_algo
Expand All @@ -427,7 +450,6 @@ def cache_algo(self, cache_algo):

valid_cache_algo = ['internet_load', 'internet_load_return', 'cache_algo', 'cache_algo_return']


if not cache_algo in valid_cache_algo:
LoggerManager().getLogger(__name__).warning(cache_algo + " is not a defined caching scheme")

Expand All @@ -441,7 +463,7 @@ def environment(self):
def environment(self, environment):
environment = environment.lower()

valid_environment= ['prod', 'backtest']
valid_environment = ['prod', 'backtest']

if not environment in valid_environment:
LoggerManager().getLogger(__name__).warning(environment + " is not a defined environment.")
Expand All @@ -462,7 +484,7 @@ def trade_side(self, trade_side):
LoggerManager().getLogger(__name__).warning(trade_side + " is not a defined trade side.")

self.__trade_side = trade_side

@property
def expiry_date(self):
return self.__expiry_date
Expand All @@ -484,6 +506,30 @@ def abstract_curve(self, abstract_curve):

self.__abstract_curve = abstract_curve

@property
def quandl_api_key(self):
return self.__quandl_api_key

@quandl_api_key.setter
def quandl_api_key(self, quandl_api_key):
self.__quandl_api_key = quandl_api_key

@property
def fred_api_key(self):
return self.__fred_api_key

@fred_api_key.setter
def fred_api_key(self, fred_api_key):
self.__fred_api_key = fred_api_key

@property
def alpha_vantage_api_key(self):
return self.__alpha_vantage_api_key

@alpha_vantage_api_key.setter
def alpha_vantage_api_key(self, alpha_vantage_api_key):
self.__alpha_vantage_api_key = alpha_vantage_api_key

@property
def overrides(self):
return self.__overrides
Expand Down Expand Up @@ -513,4 +559,4 @@ def _flatten_list(self, list_of_lists):
# Otherwise call this function recursively
else:
result.extend(self._flatten_list(i))
return result
return result

0 comments on commit 30d4e7c

Please sign in to comment.