Initial commit
This commit is contained in:
115
stockdash_main.py
Normal file
115
stockdash_main.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import os
|
||||
import pandas as pd
|
||||
import json
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
import stockdash_render
|
||||
import yfinance as yf
|
||||
|
||||
API_KEY = '44ced5e44c50543745b1d89fce8cd93a'
|
||||
api_key = "?apikey=" + API_KEY
|
||||
api_kpi_url = "https://financialmodelingprep.com/api/v3/key-metrics/"
|
||||
api_batch_stock_price_url = "https://financialmodelingprep.com/api/v3/quote/"
|
||||
|
||||
data_dir = 'data'
|
||||
file_symbol = os.path.join(data_dir, 'symbols.json')
|
||||
file_kpi = os.path.join(data_dir, 'comp_kpi.csv')
|
||||
file_div = os.path.join(data_dir, 'comp_div.csv')
|
||||
file_rec = os.path.join(data_dir, 'comp_rec.csv')
|
||||
file_his = os.path.join(data_dir, 'comp_his.csv')
|
||||
|
||||
sym_exclude = []
|
||||
used_columns = ['symbol', 'shortName', 'sector', 'industry', 'country', 'marketCap', 'enterpriseValue', 'dividendRate',
|
||||
'trailingPE', 'forwardPE', 'enterpriseToEbitda', 'shortRatio']
|
||||
|
||||
|
||||
def load_symbols():
|
||||
symbols = []
|
||||
with open(file_symbol) as json_file:
|
||||
data = json.load(json_file)
|
||||
for sym in data:
|
||||
if 'exchange' in sym and sym['exchange'] == 'Nasdaq Global Select' and sym['symbol'] not in sym_exclude:
|
||||
symbols.append(sym['symbol'])
|
||||
return symbols
|
||||
|
||||
|
||||
def get_data(symbols):
|
||||
tickers = yf.Tickers(' '.join(symbols))
|
||||
|
||||
if os.path.exists(file_kpi) and os.path.exists(file_div) and os.path.exists(file_rec) and os.path.exists(file_his):
|
||||
print("Found cached files. Loading cache...")
|
||||
kpi_data = pd.read_csv(file_kpi)
|
||||
print("%s KPIs loaded..." % len(kpi_data))
|
||||
div_data = pd.read_csv(file_div)
|
||||
print("%s Dividends loaded..." % len(div_data))
|
||||
rec_data = pd.read_csv(file_rec)
|
||||
print("%s Recommendations loaded..." % len(rec_data))
|
||||
his_data = pd.read_csv(file_his)
|
||||
print("%s Price History loaded..." % len(his_data))
|
||||
else:
|
||||
kpi_data, div_data, rec_data, his_data = pd.DataFrame(), pd.DataFrame(), pd.DataFrame(), pd.DataFrame()
|
||||
i = 1
|
||||
for ticker in tickers.tickers:
|
||||
try:
|
||||
info = ticker.info
|
||||
print("%s/%s Querying data for ticker %s" % (i, len(symbols), info['symbol']))
|
||||
kpi_data = kpi_data.append(info, ignore_index=True)
|
||||
|
||||
div = ticker.dividends.to_frame().reset_index()
|
||||
div.insert(0, 'Symbol', info['symbol'])
|
||||
div_data = div_data.append(div, ignore_index=True)
|
||||
|
||||
rec = ticker.recommendations.reset_index()
|
||||
rec.insert(0, 'Symbol', info['symbol'])
|
||||
rec_data = rec_data.append(rec, ignore_index=True)
|
||||
|
||||
his = ticker.history(period='5y').reset_index()
|
||||
his.insert(0, 'Symbol', info['symbol'])
|
||||
his_data = his_data.append(his, ignore_index=True)
|
||||
i += 1
|
||||
except Exception:
|
||||
print("Error occured when quering %s - skipping this entry")
|
||||
continue
|
||||
|
||||
kpi_data = kpi_data[['symbol'] + [col for col in kpi_data.columns if col != 'symbol']]
|
||||
|
||||
rec_data['Date'] = pd.to_datetime(rec_data['Date'])
|
||||
|
||||
his_data['priceMA50'] = his_data['Close'].rolling(window=50).mean()
|
||||
his_data['priceMA200'] = his_data['Close'].rolling(window=200).mean()
|
||||
his_data['diffMA50_200'] = his_data['priceMA50'] - his_data['priceMA200']
|
||||
|
||||
kpi_data.to_csv(file_kpi)
|
||||
div_data.to_csv(file_div)
|
||||
rec_data.to_csv(file_rec)
|
||||
his_data.to_csv(file_his)
|
||||
|
||||
return kpi_data, div_data, rec_data, his_data
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_time = time.time()
|
||||
print("----- Starting STOCKDASH @ %s -----" % datetime.fromtimestamp(start_time))
|
||||
used_symbols = load_symbols()
|
||||
|
||||
print("%s symbols loaded from file" % len(used_symbols))
|
||||
kpi_data, div_data, rec_data, his_data = get_data(used_symbols[:100])
|
||||
|
||||
# Modify Recommendation Data
|
||||
rec_data_mod = pd.concat([rec_data, pd.get_dummies(rec_data['To Grade'], prefix='grade')], axis=1)
|
||||
rec_data_mod.drop(['To Grade', 'From Grade', 'Action'], axis=1, inplace=True)
|
||||
rec_data_mod['Date'] = pd.to_datetime(rec_data_mod['Date'])
|
||||
df2 = rec_data_mod.groupby([pd.Grouper(key='Date', freq='Y'), pd.Grouper('Symbol')]).agg(['sum'])
|
||||
|
||||
df2['Positive'] = df2['grade_Buy'] + df2['grade_Outperform'] + df2['grade_Market Outperform'] + df2[
|
||||
'grade_Overweight'] + df2['grade_Positive'] + df2['grade_Strong Buy']
|
||||
df2['Neutral'] = df2['grade_Equal-Weight'] + df2['grade_Hold'] + df2['grade_Neutral']
|
||||
df2['Negative'] = df2['grade_Market Underperform'] + df2['grade_Reduce'] + df2['grade_Sell'] + df2[
|
||||
'grade_Underweight']
|
||||
|
||||
columns = ['Positive', 'Neutral', 'Negative']
|
||||
rec_data_mod = df2[columns]
|
||||
|
||||
print("Data loaded after %ss" % (time.time()-start_time))
|
||||
stockdash_render.load_dash(kpi_data[used_columns], rec_data_mod, div_data, his_data)
|
||||
Reference in New Issue
Block a user