forked from verystrongjoe/stock-analysis
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
72 lines (47 loc) · 1.48 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import api.crawl.yahoo as cralwer
import api.transport.csv2db as csv2db
import api.genf as genf
import os
from gevent import monkey
import gevent
import shutil
import time
from datetime import date
# get unix time
def get_unix_time(yyyy, mm, dd) :
d = date(yyyy,mm,dd)
unixtime = time.mktime(d.timetuple())
return unixtime
# https://finance.yahoo.com/quote/IBM/
def crawl(tkr='') :
yahoo = cralwer.CrawlYahooTkr()
if tkr == '' :
rootdir = os.path.dirname(os.path.abspath(__file__))
fname = rootdir + '\\data\\input\\tkrlist_small.txt'
content = []
with open(fname) as f :
content = [x.strip() for x in f.readlines()]
#yahoo.download_csv('^GSPC')
#yahoo.download_csv('AMZN')
monkey.patch_all()
threads = [gevent.spawn(yahoo.download_csv, tkr) for tkr in content]
gevent.joinall(threads)
else :
yahoo.download_csv(tkr)
def csv_to_db(tkrhdir) :
csv2db.copy2db(tkrhdir,'postgres://tkrapi:tkrapi@127.0.0.1/tkrapi')
#crawl()
#crawl('BAC')
#crawl('DIA')
# csv to database
# 위에서 수집한 csv 파일을 postgre database로 옮기는 함수
csv_to_db('D:\\dev\\workspace\\stock-analysis\\data\\tkrcsv')
# generate features
genf.genf('postgres://tkrapi:tkrapi@127.0.0.1/tkrapi')
# learn, predict
import api.kerastkr as kerastkr
# out_df = kerastkr.learn_predict_kerasnn('^GSPC', 6, '2017-08')
# print(out_df)
# out_df = kerastkr.load_predict_keraslinear()
# print(out_df)
'bye'