Implemented Loader; Implemented Render
This commit is contained in:
2717
assets/base.css
2717
assets/base.css
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
assets/dash-logo.png
Normal file
BIN
assets/dash-logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.4 KiB |
29
requirements.txt
Normal file
29
requirements.txt
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
Brotli==1.0.9
|
||||||
|
certifi==2020.12.5
|
||||||
|
chardet==3.0.4
|
||||||
|
click==7.1.2
|
||||||
|
dash==1.18.1
|
||||||
|
dash-core-components==1.14.1
|
||||||
|
dash-html-components==1.1.1
|
||||||
|
dash-renderer==1.8.3
|
||||||
|
dash-table==4.11.1
|
||||||
|
Flask==1.1.2
|
||||||
|
Flask-Compress==1.8.0
|
||||||
|
future==0.18.2
|
||||||
|
idna==2.10
|
||||||
|
itsdangerous==1.1.0
|
||||||
|
Jinja2==2.11.2
|
||||||
|
lxml==4.6.2
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
multitasking==0.0.9
|
||||||
|
numpy==1.19.3
|
||||||
|
pandas==1.1.5
|
||||||
|
plotly==4.14.1
|
||||||
|
python-dateutil==2.8.1
|
||||||
|
pytz==2020.4
|
||||||
|
requests==2.25.0
|
||||||
|
retrying==1.3.3
|
||||||
|
six==1.15.0
|
||||||
|
urllib3==1.26.2
|
||||||
|
Werkzeug==1.0.1
|
||||||
|
yfinance==0.1.55
|
||||||
247
stockdash_loader.py
Normal file
247
stockdash_loader.py
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
import os, sys
|
||||||
|
import pandas as pd
|
||||||
|
import numpy as np
|
||||||
|
import sqlalchemy
|
||||||
|
import logging
|
||||||
|
import traceback
|
||||||
|
from timeit import default_timer as timer
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy import Column, Integer, String, DateTime
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.types import VARCHAR
|
||||||
|
|
||||||
|
import yfinance as yf
|
||||||
|
|
||||||
|
API_KEY = '44ced5e44c50543745b1d89fce8cd93a'
|
||||||
|
api_key = "?apikey=" + API_KEY
|
||||||
|
api_kpi_url = "https://financialmodelingprep.com/api/v3/key-metrics/"
|
||||||
|
api_batch_stock_price_url = "https://financialmodelingprep.com/api/v3/quote/"
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s [%(levelname)s] %(message)s",
|
||||||
|
handlers=[
|
||||||
|
logging.FileHandler("debug.log"),
|
||||||
|
logging.StreamHandler()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
data_dir = 'data'
|
||||||
|
file_symbol = os.path.join(data_dir, 'symbols.json')
|
||||||
|
db_symbol = "base_symbol"
|
||||||
|
db_kpi = "base_kpi"
|
||||||
|
db_div = "base_dividend"
|
||||||
|
db_rec = "base_recommendation"
|
||||||
|
db_his = "base_price_history"
|
||||||
|
|
||||||
|
sync_freq_kpi = 14
|
||||||
|
sync_freq_rec = 7
|
||||||
|
sync_freq_div = 21
|
||||||
|
sync_freq_his = 1
|
||||||
|
|
||||||
|
sym_exclude = []
|
||||||
|
engine = sqlalchemy.create_engine("mysql+pymysql://spcial:GOi1gA01@localhost:3306/app_stockdash?charset=utf8mb4")
|
||||||
|
|
||||||
|
|
||||||
|
def update_timestamp(symbol, schema):
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
session = Session()
|
||||||
|
curr_symbol = session.query(Symbols).filter(Symbols.symbol == symbol)
|
||||||
|
if schema == db_kpi:
|
||||||
|
curr_symbol.update({Symbols.last_updated_kpi: datetime.now()}, synchronize_session=False)
|
||||||
|
elif schema == db_rec:
|
||||||
|
curr_symbol.update({Symbols.last_updated_rec: datetime.now()}, synchronize_session=False)
|
||||||
|
elif schema == db_div:
|
||||||
|
curr_symbol.update({Symbols.last_updated_div: datetime.now()}, synchronize_session=False)
|
||||||
|
elif schema == db_his:
|
||||||
|
curr_symbol.update({Symbols.last_updated_his: datetime.now()}, synchronize_session=False)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def update_loadable(symbol, loadable):
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
session = Session()
|
||||||
|
curr_symbol = session.query(Symbols).filter(Symbols.symbol == symbol)
|
||||||
|
curr_symbol.update({Symbols.loadable: loadable}, synchronize_session=False)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def load_to_db(df, table_name):
|
||||||
|
start = timer()
|
||||||
|
|
||||||
|
try:
|
||||||
|
df = df.replace([np.inf, -np.inf], np.nan)
|
||||||
|
df.to_sql(table_name, schema='app_stockdash', con=engine, if_exists='append', dtype={'symbol': VARCHAR(10)})
|
||||||
|
except Exception as err:
|
||||||
|
logging.warning(" <%s> Error occured when loading data to DB. Error: \n%s" % (table_name, err))
|
||||||
|
raise
|
||||||
|
|
||||||
|
logging.info(" <load_to_db> comleted in %f sec! " % (timer() - start))
|
||||||
|
|
||||||
|
|
||||||
|
def load_from_db(table_name, where=None, limit=None, orderby=None):
|
||||||
|
start = timer()
|
||||||
|
|
||||||
|
try:
|
||||||
|
sql = "SELECT * FROM %s " % table_name
|
||||||
|
if where is not None:
|
||||||
|
sql = sql + "WHERE %s " % where
|
||||||
|
if orderby is not None:
|
||||||
|
sql = sql + "ORDER BY %s " % orderby
|
||||||
|
if limit is not None:
|
||||||
|
sql = sql + "LIMIT %i " % limit
|
||||||
|
|
||||||
|
df = pd.read_sql_query(sql, engine)
|
||||||
|
except sqlalchemy.exc.ProgrammingError as er:
|
||||||
|
logging.warning(" <%s> Error occured when quering data. Return None. Error: \n%s" % (table_name, er))
|
||||||
|
return None
|
||||||
|
|
||||||
|
logging.info(" <load_from_db> comleted in %f sec! " % (timer() - start))
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def load_symbols():
|
||||||
|
logging.info("Loading symbols based on file %s" % file_symbol)
|
||||||
|
df_symbols = pd.read_json(file_symbol).drop(columns=['price'])
|
||||||
|
logging.info("Retrieved %i symbols from file" % len(df_symbols))
|
||||||
|
|
||||||
|
df_symbols = df_symbols[df_symbols['exchange'].isin(['Nasdaq Global Select', 'NASDAQ Global Market', 'NASDAQ Capital Market'])]
|
||||||
|
logging.info("Using %i symbols after filtering" % len(df_symbols))
|
||||||
|
|
||||||
|
df_symbols["initialized"] = datetime.now()
|
||||||
|
df_symbols["last_updated_kpi"] = pd.Timestamp.min
|
||||||
|
df_symbols["last_updated_div"] = pd.Timestamp.min
|
||||||
|
df_symbols["last_updated_rec"] = pd.Timestamp.min
|
||||||
|
df_symbols["last_updated_his"] = pd.Timestamp.min
|
||||||
|
df_symbols["loadable"] = True
|
||||||
|
|
||||||
|
df_existing_symbols = load_from_db(db_symbol)
|
||||||
|
|
||||||
|
if df_existing_symbols is not None:
|
||||||
|
logging.info("Retrieved %i symbols from DB" % len(df_existing_symbols))
|
||||||
|
df_diff = pd.concat([df_symbols, df_existing_symbols]).drop_duplicates(subset=['symbol'], keep=False).set_index('symbol')
|
||||||
|
logging.info("Loading %i new symbols into DB..." % len(df_diff))
|
||||||
|
if len(df_diff) > 0:
|
||||||
|
load_to_db(df_diff, db_symbol)
|
||||||
|
else:
|
||||||
|
logging.info("Could not retrieve any symbols from DB. Expecting table does not exist. Create table...")
|
||||||
|
load_to_db(df_symbols, db_symbol)
|
||||||
|
|
||||||
|
return df_symbols
|
||||||
|
|
||||||
|
|
||||||
|
def synch_data():
|
||||||
|
logging.info("Synching data. Loading available symbols from DB...")
|
||||||
|
df_symbols = load_from_db(db_symbol, where='loadable = 1')
|
||||||
|
num_symbols = len(df_symbols['symbol'])
|
||||||
|
logging.info("Loaded %i symbols from DB." % num_symbols)
|
||||||
|
final_kpi_columns = load_from_db(db_kpi, limit=1).columns
|
||||||
|
|
||||||
|
i = 1
|
||||||
|
for index, row in df_symbols.iterrows():
|
||||||
|
try:
|
||||||
|
symbol = row['symbol']
|
||||||
|
yticker = yf.Ticker(symbol)
|
||||||
|
|
||||||
|
logging.info("%s/%s Querying data for ticker %s" % (i, num_symbols, symbol))
|
||||||
|
|
||||||
|
# KPI
|
||||||
|
if row['last_updated_kpi'] < datetime.today() - timedelta(days=sync_freq_kpi):
|
||||||
|
logging.info(" <%s> Last Updated above Threshold. Loading new KPI data for symbol into DB %s" % (db_kpi, symbol))
|
||||||
|
ticker_dict = yticker.info
|
||||||
|
|
||||||
|
for idx, val in ticker_dict.items():
|
||||||
|
if type(val) != list:
|
||||||
|
ticker_dict[idx] = [val]
|
||||||
|
else:
|
||||||
|
ticker_dict[idx] = [",".join(val)]
|
||||||
|
|
||||||
|
kpi = pd.DataFrame.from_dict(ticker_dict).set_index('symbol')
|
||||||
|
kpi["date"] = datetime.now()
|
||||||
|
|
||||||
|
|
||||||
|
kpi = kpi[kpi.columns.intersection(final_kpi_columns)]
|
||||||
|
|
||||||
|
load_to_db(kpi, db_kpi)
|
||||||
|
update_timestamp(symbol, db_kpi)
|
||||||
|
else:
|
||||||
|
logging.info(" <%s> Data is up-to-date. Nothing to do." % db_kpi)
|
||||||
|
|
||||||
|
# DIVIDENDS
|
||||||
|
if row['last_updated_div'] < datetime.today() - timedelta(days=sync_freq_div):
|
||||||
|
logging.info(" <%s> Last Updated above Threshold. Loading new DIVIDENDS data for symbol into DB %s" % (db_div, symbol))
|
||||||
|
div = yticker.dividends.to_frame().reset_index()
|
||||||
|
div.insert(0, 'symbol', symbol)
|
||||||
|
div = div.set_index('symbol')
|
||||||
|
load_to_db(div, db_div)
|
||||||
|
update_timestamp(symbol, db_div)
|
||||||
|
else:
|
||||||
|
logging.info(" <%s> Data is up-to-date. Nothing to do." % db_div)
|
||||||
|
|
||||||
|
# RECOMMENDATIONS
|
||||||
|
if row['last_updated_rec'] < datetime.today() - timedelta(days=sync_freq_rec):
|
||||||
|
logging.info(" <%s> Last Updated above Threshold. Loading new RECOMMENDATIONS data for symbol into DB %s" % (db_rec, symbol))
|
||||||
|
rec = yticker.recommendations
|
||||||
|
if rec is not None:
|
||||||
|
rec = rec.reset_index()
|
||||||
|
rec.insert(0, 'symbol', symbol)
|
||||||
|
rec = rec.set_index('symbol').drop_duplicates(subset=['Date', 'Firm'])
|
||||||
|
load_to_db(rec, db_rec)
|
||||||
|
else:
|
||||||
|
logging.info(" <%s> No recommendation data found for %s" % (db_rec, symbol))
|
||||||
|
update_timestamp(symbol, db_rec)
|
||||||
|
else:
|
||||||
|
logging.info(" <%s> Data is up-to-date. Nothing to do." % db_rec)
|
||||||
|
|
||||||
|
# PRICE HISTORY
|
||||||
|
if row['last_updated_his'] < datetime.today() - timedelta(days=sync_freq_his):
|
||||||
|
if row['last_updated_his'].date() != pd.Timestamp.min.to_pydatetime().date():
|
||||||
|
delta = (row['last_updated_his'] + timedelta(days=1)).strftime("%Y-%m-%d")
|
||||||
|
his = yticker.history(start=delta)
|
||||||
|
logging.info(" <%s> Last Updated above Threshold. Loading new PRICE data for symbol into DB %s since %s" % (db_his, symbol, delta))
|
||||||
|
else:
|
||||||
|
his = yticker.history(period="max")
|
||||||
|
logging.info(" <%s> Never loaded price data. Loading all available price data for symbol into DB %s " % (db_his, symbol))
|
||||||
|
|
||||||
|
if his is not None:
|
||||||
|
his = his.reset_index()
|
||||||
|
his.insert(0, 'symbol', symbol)
|
||||||
|
his = his.set_index('symbol')
|
||||||
|
load_to_db(his, db_his)
|
||||||
|
else:
|
||||||
|
logging.info(" <%s> No price history data found for %s" % (db_rec, symbol))
|
||||||
|
update_timestamp(symbol, db_his)
|
||||||
|
else:
|
||||||
|
logging.info(" <%s> Data is up-to-date. Nothing to do." % db_his)
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
except Exception as er:
|
||||||
|
logging.warning("%s/%s Error occured - skipping this entry. Errormsg: \n%s" % (
|
||||||
|
i, num_symbols, traceback.print_exception(*sys.exc_info())))
|
||||||
|
update_loadable(symbol, 0)
|
||||||
|
i += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
class Symbols(Base):
|
||||||
|
__tablename__ = db_symbol
|
||||||
|
|
||||||
|
index = Column(Integer, primary_key=True)
|
||||||
|
symbol = Column(String)
|
||||||
|
name = Column(String)
|
||||||
|
exchange = Column(String)
|
||||||
|
initialized = Column(String)
|
||||||
|
last_updated_kpi = Column(DateTime)
|
||||||
|
last_updated_div = Column(DateTime)
|
||||||
|
last_updated_rec = Column(DateTime)
|
||||||
|
last_updated_his = Column(DateTime)
|
||||||
|
loadable = Column(Integer)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
load_symbols()
|
||||||
|
synch_data()
|
||||||
@@ -1,115 +0,0 @@
|
|||||||
import os
|
|
||||||
import pandas as pd
|
|
||||||
import json
|
|
||||||
from datetime import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
import stockdash_render
|
|
||||||
import yfinance as yf
|
|
||||||
|
|
||||||
API_KEY = '44ced5e44c50543745b1d89fce8cd93a'
|
|
||||||
api_key = "?apikey=" + API_KEY
|
|
||||||
api_kpi_url = "https://financialmodelingprep.com/api/v3/key-metrics/"
|
|
||||||
api_batch_stock_price_url = "https://financialmodelingprep.com/api/v3/quote/"
|
|
||||||
|
|
||||||
data_dir = 'data'
|
|
||||||
file_symbol = os.path.join(data_dir, 'symbols.json')
|
|
||||||
file_kpi = os.path.join(data_dir, 'comp_kpi.csv')
|
|
||||||
file_div = os.path.join(data_dir, 'comp_div.csv')
|
|
||||||
file_rec = os.path.join(data_dir, 'comp_rec.csv')
|
|
||||||
file_his = os.path.join(data_dir, 'comp_his.csv')
|
|
||||||
|
|
||||||
sym_exclude = []
|
|
||||||
used_columns = ['symbol', 'shortName', 'sector', 'industry', 'country', 'marketCap', 'enterpriseValue', 'dividendRate',
|
|
||||||
'trailingPE', 'forwardPE', 'enterpriseToEbitda', 'shortRatio']
|
|
||||||
|
|
||||||
|
|
||||||
def load_symbols():
|
|
||||||
symbols = []
|
|
||||||
with open(file_symbol) as json_file:
|
|
||||||
data = json.load(json_file)
|
|
||||||
for sym in data:
|
|
||||||
if 'exchange' in sym and sym['exchange'] == 'Nasdaq Global Select' and sym['symbol'] not in sym_exclude:
|
|
||||||
symbols.append(sym['symbol'])
|
|
||||||
return symbols
|
|
||||||
|
|
||||||
|
|
||||||
def get_data(symbols):
|
|
||||||
tickers = yf.Tickers(' '.join(symbols))
|
|
||||||
|
|
||||||
if os.path.exists(file_kpi) and os.path.exists(file_div) and os.path.exists(file_rec) and os.path.exists(file_his):
|
|
||||||
print("Found cached files. Loading cache...")
|
|
||||||
kpi_data = pd.read_csv(file_kpi)
|
|
||||||
print("%s KPIs loaded..." % len(kpi_data))
|
|
||||||
div_data = pd.read_csv(file_div)
|
|
||||||
print("%s Dividends loaded..." % len(div_data))
|
|
||||||
rec_data = pd.read_csv(file_rec)
|
|
||||||
print("%s Recommendations loaded..." % len(rec_data))
|
|
||||||
his_data = pd.read_csv(file_his)
|
|
||||||
print("%s Price History loaded..." % len(his_data))
|
|
||||||
else:
|
|
||||||
kpi_data, div_data, rec_data, his_data = pd.DataFrame(), pd.DataFrame(), pd.DataFrame(), pd.DataFrame()
|
|
||||||
i = 1
|
|
||||||
for ticker in tickers.tickers:
|
|
||||||
try:
|
|
||||||
info = ticker.info
|
|
||||||
print("%s/%s Querying data for ticker %s" % (i, len(symbols), info['symbol']))
|
|
||||||
kpi_data = kpi_data.append(info, ignore_index=True)
|
|
||||||
|
|
||||||
div = ticker.dividends.to_frame().reset_index()
|
|
||||||
div.insert(0, 'Symbol', info['symbol'])
|
|
||||||
div_data = div_data.append(div, ignore_index=True)
|
|
||||||
|
|
||||||
rec = ticker.recommendations.reset_index()
|
|
||||||
rec.insert(0, 'Symbol', info['symbol'])
|
|
||||||
rec_data = rec_data.append(rec, ignore_index=True)
|
|
||||||
|
|
||||||
his = ticker.history(period='5y').reset_index()
|
|
||||||
his.insert(0, 'Symbol', info['symbol'])
|
|
||||||
his_data = his_data.append(his, ignore_index=True)
|
|
||||||
i += 1
|
|
||||||
except Exception:
|
|
||||||
print("Error occured when quering %s - skipping this entry")
|
|
||||||
continue
|
|
||||||
|
|
||||||
kpi_data = kpi_data[['symbol'] + [col for col in kpi_data.columns if col != 'symbol']]
|
|
||||||
|
|
||||||
rec_data['Date'] = pd.to_datetime(rec_data['Date'])
|
|
||||||
|
|
||||||
his_data['priceMA50'] = his_data['Close'].rolling(window=50).mean()
|
|
||||||
his_data['priceMA200'] = his_data['Close'].rolling(window=200).mean()
|
|
||||||
his_data['diffMA50_200'] = his_data['priceMA50'] - his_data['priceMA200']
|
|
||||||
|
|
||||||
kpi_data.to_csv(file_kpi)
|
|
||||||
div_data.to_csv(file_div)
|
|
||||||
rec_data.to_csv(file_rec)
|
|
||||||
his_data.to_csv(file_his)
|
|
||||||
|
|
||||||
return kpi_data, div_data, rec_data, his_data
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
start_time = time.time()
|
|
||||||
print("----- Starting STOCKDASH @ %s -----" % datetime.fromtimestamp(start_time))
|
|
||||||
used_symbols = load_symbols()
|
|
||||||
|
|
||||||
print("%s symbols loaded from file" % len(used_symbols))
|
|
||||||
kpi_data, div_data, rec_data, his_data = get_data(used_symbols[:100])
|
|
||||||
|
|
||||||
# Modify Recommendation Data
|
|
||||||
rec_data_mod = pd.concat([rec_data, pd.get_dummies(rec_data['To Grade'], prefix='grade')], axis=1)
|
|
||||||
rec_data_mod.drop(['To Grade', 'From Grade', 'Action'], axis=1, inplace=True)
|
|
||||||
rec_data_mod['Date'] = pd.to_datetime(rec_data_mod['Date'])
|
|
||||||
df2 = rec_data_mod.groupby([pd.Grouper(key='Date', freq='Y'), pd.Grouper('Symbol')]).agg(['sum'])
|
|
||||||
|
|
||||||
df2['Positive'] = df2['grade_Buy'] + df2['grade_Outperform'] + df2['grade_Market Outperform'] + df2[
|
|
||||||
'grade_Overweight'] + df2['grade_Positive'] + df2['grade_Strong Buy']
|
|
||||||
df2['Neutral'] = df2['grade_Equal-Weight'] + df2['grade_Hold'] + df2['grade_Neutral']
|
|
||||||
df2['Negative'] = df2['grade_Market Underperform'] + df2['grade_Reduce'] + df2['grade_Sell'] + df2[
|
|
||||||
'grade_Underweight']
|
|
||||||
|
|
||||||
columns = ['Positive', 'Neutral', 'Negative']
|
|
||||||
rec_data_mod = df2[columns]
|
|
||||||
|
|
||||||
print("Data loaded after %ss" % (time.time()-start_time))
|
|
||||||
stockdash_render.load_dash(kpi_data[used_columns], rec_data_mod, div_data, his_data)
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import time
|
import time
|
||||||
|
import flask
|
||||||
|
import pandas as pd
|
||||||
import dash_table
|
import dash_table
|
||||||
import plotly.graph_objects as go
|
import plotly.graph_objects as go
|
||||||
import dash_core_components as dcc
|
import dash_core_components as dcc
|
||||||
@@ -8,167 +9,118 @@ import dash_html_components as html
|
|||||||
from dash import Dash
|
from dash import Dash
|
||||||
from dash_table.Format import Format
|
from dash_table.Format import Format
|
||||||
from dash.dependencies import Input, Output
|
from dash.dependencies import Input, Output
|
||||||
|
from plotly.subplots import make_subplots
|
||||||
|
import stockdash_loader as sdl
|
||||||
|
|
||||||
colorway = ["#9a58cc", '#FF4F00', '#375CB1', '#FF7400', '#FFF400', '#FF0056']
|
colorway = ["lightslategray", '#FF4F00', '#375CB1', '#FF7400', '#FFF400', '#FF0056']
|
||||||
|
used_columns = ['symbol', 'shortName', 'sector', 'industry', 'country', 'marketCap', 'enterpriseValue', 'dividendRate',
|
||||||
|
'trailingPE', 'forwardPE', 'enterpriseToEbitda', 'shortRatio']
|
||||||
PAGE_SIZE = 20
|
PAGE_SIZE = 20
|
||||||
|
|
||||||
|
app = Dash(__name__)
|
||||||
|
server = app.server
|
||||||
|
|
||||||
def load_dash(comp_kpi, rec_data_mod, div_data, his_data):
|
start_time = time.time()
|
||||||
start_time = time.time()
|
print("----- Starting STOCKDASH @ %s -----" % datetime.fromtimestamp(start_time))
|
||||||
print(" -- Rendering STOCKDASH @ %s -----" % datetime.fromtimestamp(start_time))
|
kpi_data = sdl.load_from_db(sdl.db_kpi, orderby="marketCap DESC")
|
||||||
|
print("Data loaded after %ss" % (time.time()-start_time))
|
||||||
|
comp_kpi = kpi_data[used_columns]
|
||||||
|
|
||||||
app = Dash(__name__)
|
app.layout = html.Div(children=[
|
||||||
|
html.Div(className='row',
|
||||||
|
children=[html.Div(className='three columns div-user-controls',
|
||||||
|
children=[
|
||||||
|
html.H2('STOCKDASH'),
|
||||||
|
html.P('''Visualising data with Plotly - Dash'''),
|
||||||
|
html.P('''Pick one or more KPIs from the dropdown below.'''),
|
||||||
|
html.Div(
|
||||||
|
className='div-for-dropdown',
|
||||||
|
children=[
|
||||||
|
dcc.Dropdown(id='stockselector',
|
||||||
|
options=[{'label': i, 'value': i} for i in
|
||||||
|
comp_kpi._get_numeric_data().columns],
|
||||||
|
multi=True,
|
||||||
|
value=[comp_kpi._get_numeric_data().columns[0]],
|
||||||
|
style={'backgroundColor': '#1E1E1E'},
|
||||||
|
className='stockselector')
|
||||||
|
],
|
||||||
|
style={'color': '#1E1E1E'}),
|
||||||
|
html.P(id="total-stocks"),
|
||||||
|
dcc.Markdown(
|
||||||
|
children=[
|
||||||
|
"Source: [thiessen.io](https://www.thiessen.io)"
|
||||||
|
])
|
||||||
|
]),
|
||||||
|
html.Div(className='nine columns div-for-charts bg-grey',
|
||||||
|
style={'padding': 0},
|
||||||
|
children=[
|
||||||
|
dash_table.DataTable(
|
||||||
|
id='company-kpi-data',
|
||||||
|
columns=
|
||||||
|
[{"name": i, "id": i, 'deletable': True, 'type': 'numeric',
|
||||||
|
'format': Format(group=',')} if i in comp_kpi._get_numeric_data().columns
|
||||||
|
else {"name": i, "id": i, 'deletable': True} for i in comp_kpi.columns],
|
||||||
|
style_as_list_view=True,
|
||||||
|
style_data_conditional=[{
|
||||||
|
'if': {'column_editable': False},
|
||||||
|
'backgroundColor': 'rgba(50, 50, 50, 0.5)',
|
||||||
|
'textAlign': 'left',
|
||||||
|
'color': 'white',
|
||||||
|
'padding': 7
|
||||||
|
}],
|
||||||
|
style_filter_conditional=[{
|
||||||
|
'if': {'column_editable': False},
|
||||||
|
'backgroundColor': 'rgba(40, 40, 40,0.5)',
|
||||||
|
'textAlign': 'left',
|
||||||
|
'color': 'white'
|
||||||
|
}],
|
||||||
|
style_header_conditional=[{
|
||||||
|
'if': {'column_editable': False},
|
||||||
|
'backgroundColor': 'rgba(30, 30, 30,0.5)',
|
||||||
|
'textAlign': 'left',
|
||||||
|
'fontWeight': 'bold',
|
||||||
|
'color': 'white'
|
||||||
|
}],
|
||||||
|
page_current=0,
|
||||||
|
page_size=PAGE_SIZE,
|
||||||
|
page_action='custom',
|
||||||
|
|
||||||
app.layout = html.Div(children=[
|
filter_action='custom',
|
||||||
html.Div(className='row',
|
filter_query='',
|
||||||
children=[html.Div(className='three columns div-user-controls',
|
|
||||||
children=[
|
|
||||||
html.H2('STOCKDASH'),
|
|
||||||
html.P('''Visualising data with Plotly - Dash'''),
|
|
||||||
html.P('''Pick one or more KPIs from the dropdown below.'''),
|
|
||||||
html.Div(
|
|
||||||
className='div-for-dropdown',
|
|
||||||
children=[
|
|
||||||
dcc.Dropdown(id='stockselector',
|
|
||||||
options=[{'label': i, 'value': i} for i in
|
|
||||||
comp_kpi._get_numeric_data().columns],
|
|
||||||
multi=True,
|
|
||||||
value=[comp_kpi._get_numeric_data().columns[0]],
|
|
||||||
style={'backgroundColor': '#1E1E1E'},
|
|
||||||
className='stockselector')
|
|
||||||
],
|
|
||||||
style={'color': '#1E1E1E'})
|
|
||||||
]),
|
|
||||||
html.Div(className='nine columns div-for-charts bg-grey',
|
|
||||||
style={'padding': 0},
|
|
||||||
children=[
|
|
||||||
dash_table.DataTable(
|
|
||||||
id='company-kpi-data',
|
|
||||||
columns=
|
|
||||||
[{"name": i, "id": i, 'deletable': True, 'type': 'numeric',
|
|
||||||
'format': Format(group=',')} if i in comp_kpi._get_numeric_data().columns
|
|
||||||
else {"name": i, "id": i, 'deletable': True} for i in comp_kpi.columns],
|
|
||||||
style_as_list_view=True,
|
|
||||||
style_data_conditional=[{
|
|
||||||
'if': {'column_editable': False},
|
|
||||||
'backgroundColor': 'rgba(50, 50, 50, 0.5)',
|
|
||||||
'textAlign': 'left',
|
|
||||||
'color': 'white',
|
|
||||||
'padding': 7
|
|
||||||
}],
|
|
||||||
style_filter_conditional=[{
|
|
||||||
'if': {'column_editable': False},
|
|
||||||
'backgroundColor': 'rgba(40, 40, 40,0.5)',
|
|
||||||
'textAlign': 'left',
|
|
||||||
'color': 'white'
|
|
||||||
}],
|
|
||||||
style_header_conditional=[{
|
|
||||||
'if': {'column_editable': False},
|
|
||||||
'backgroundColor': 'rgba(30, 30, 30,0.5)',
|
|
||||||
'textAlign': 'left',
|
|
||||||
'fontWeight': 'bold',
|
|
||||||
'color': 'white'
|
|
||||||
}],
|
|
||||||
page_current=0,
|
|
||||||
page_size=PAGE_SIZE,
|
|
||||||
page_action='custom',
|
|
||||||
|
|
||||||
filter_action='custom',
|
sort_action='custom',
|
||||||
filter_query='',
|
sort_mode='multi',
|
||||||
|
sort_by=[]
|
||||||
|
),
|
||||||
|
dcc.Graph(
|
||||||
|
id='bar-chart-marketcap',
|
||||||
|
className='bg-grey',
|
||||||
|
hoverData={'points': [{'x': 'AAPL'}]},
|
||||||
|
animate=False),
|
||||||
|
dcc.Graph(
|
||||||
|
id='timeseries-chart-price',
|
||||||
|
className='bg-grey',
|
||||||
|
config={'displayModeBar': False},
|
||||||
|
animate=False),
|
||||||
|
dcc.Graph(
|
||||||
|
id='recom-bar-chart',
|
||||||
|
className='bg-grey',
|
||||||
|
config={'displayModeBar': False},
|
||||||
|
animate=False)
|
||||||
|
])
|
||||||
|
])
|
||||||
|
])
|
||||||
|
|
||||||
sort_action='custom',
|
|
||||||
sort_mode='multi',
|
|
||||||
sort_by=[]
|
|
||||||
),
|
|
||||||
dcc.Graph(
|
|
||||||
id='bar-chart-marketcap',
|
|
||||||
className='bg-grey',
|
|
||||||
hoverData={'points': [{'x': 'AAPL'}]},
|
|
||||||
animate=True),
|
|
||||||
dcc.Graph(
|
|
||||||
id='timeseries-chart-price',
|
|
||||||
className='bg-grey',
|
|
||||||
config={'displayModeBar': False},
|
|
||||||
animate=False),
|
|
||||||
dcc.Graph(
|
|
||||||
id='recom-bar-chart',
|
|
||||||
className='bg-grey',
|
|
||||||
config={'displayModeBar': False},
|
|
||||||
animate=True)
|
|
||||||
])
|
|
||||||
])
|
|
||||||
])
|
|
||||||
|
|
||||||
@app.callback(Output('bar-chart-marketcap', 'figure'),
|
@app.callback(Output('bar-chart-marketcap', 'figure'),
|
||||||
[Input('company-kpi-data', 'data'),
|
[Input('company-kpi-data', 'data'),
|
||||||
Input('stockselector', 'value')])
|
Input('stockselector', 'value'),
|
||||||
def update_graph(data, selected_columns):
|
Input('bar-chart-marketcap', 'clickData')])
|
||||||
used_symbols = [x['symbol'] for x in data]
|
def update_graph(data, selected_columns, clickData):
|
||||||
|
used_symbols = [x['symbol'] for x in data]
|
||||||
|
|
||||||
figure = go.Figure(
|
figure = go.Figure(
|
||||||
layout=go.Layout(
|
layout=go.Layout(
|
||||||
colorway=colorway,
|
|
||||||
template='plotly_dark',
|
|
||||||
paper_bgcolor='rgba(0, 0, 0, 0)',
|
|
||||||
plot_bgcolor='rgba(0, 0, 0, 0)',
|
|
||||||
margin={'b': 15},
|
|
||||||
hovermode='x',
|
|
||||||
autosize=True,
|
|
||||||
title={'text': 'Market Data', 'font': {'color': 'white'}, 'x': 0.5}
|
|
||||||
))
|
|
||||||
|
|
||||||
val = dict()
|
|
||||||
val["xaxis"] = dict(domain=[0.15, 0.85])
|
|
||||||
for i, column in enumerate(selected_columns):
|
|
||||||
i += 1
|
|
||||||
figure.add_trace(go.Bar(name=column,
|
|
||||||
x=used_symbols,
|
|
||||||
y=[x[column] for x in data],
|
|
||||||
yaxis='y' + str(i), offsetgroup=i))
|
|
||||||
|
|
||||||
val["yaxis%s" % i] = dict(
|
|
||||||
title=column,
|
|
||||||
titlefont=dict(color=colorway[i - 1]),
|
|
||||||
tickfont=dict(color=colorway[i - 1]),
|
|
||||||
)
|
|
||||||
|
|
||||||
if i == 2:
|
|
||||||
val["yaxis2"].update(dict(
|
|
||||||
anchor="x",
|
|
||||||
overlaying="y",
|
|
||||||
side="right"
|
|
||||||
))
|
|
||||||
elif i == 3:
|
|
||||||
val["yaxis3"].update(dict(
|
|
||||||
anchor="free",
|
|
||||||
overlaying="y",
|
|
||||||
side="left",
|
|
||||||
position=0.05
|
|
||||||
))
|
|
||||||
elif i == 4:
|
|
||||||
val["yaxis4"].update(dict(
|
|
||||||
anchor="free",
|
|
||||||
overlaying="y",
|
|
||||||
side="right",
|
|
||||||
position=0.95
|
|
||||||
))
|
|
||||||
|
|
||||||
figure.update_layout(val)
|
|
||||||
figure.update_yaxes(
|
|
||||||
showgrid=True, zeroline=True, zerolinewidth=1, zerolinecolor='White',
|
|
||||||
)
|
|
||||||
|
|
||||||
return figure
|
|
||||||
|
|
||||||
@app.callback(Output('recom-bar-chart', 'figure'),
|
|
||||||
[Input('company-kpi-data', 'data')])
|
|
||||||
def update_graph(data):
|
|
||||||
used_symbols = [x['symbol'] for x in data]
|
|
||||||
|
|
||||||
df = rec_data_mod.loc['2020-12-31'].reset_index()
|
|
||||||
df_tmp = df.loc[df['Symbol'].isin(used_symbols)]
|
|
||||||
|
|
||||||
figure = go.Figure(layout=go.Layout(
|
|
||||||
colorway=colorway,
|
colorway=colorway,
|
||||||
template='plotly_dark',
|
template='plotly_dark',
|
||||||
paper_bgcolor='rgba(0, 0, 0, 0)',
|
paper_bgcolor='rgba(0, 0, 0, 0)',
|
||||||
@@ -176,113 +128,281 @@ def load_dash(comp_kpi, rec_data_mod, div_data, his_data):
|
|||||||
margin={'b': 15},
|
margin={'b': 15},
|
||||||
hovermode='x',
|
hovermode='x',
|
||||||
autosize=True,
|
autosize=True,
|
||||||
title={'text': 'Recommendation Data', 'font': {'color': 'white'}, 'x': 0.5},
|
title={'text': 'Market Data', 'font': {'color': 'white'}, 'x': 0.5}
|
||||||
barmode='stack'
|
|
||||||
))
|
))
|
||||||
|
|
||||||
figure.add_trace(go.Bar(x=used_symbols, y=df_tmp['Positive'].tolist(), name='Positive Outlook', marker_color='#41B3A3'))
|
val = dict()
|
||||||
figure.add_trace(go.Bar(x=used_symbols, y=df_tmp['Neutral'].tolist(), name='Neutral Outlook', marker_color='#E8A87C'))
|
val["xaxis"] = dict(domain=[0.15, 0.85])
|
||||||
figure.add_trace(go.Bar(x=used_symbols, y=df_tmp['Negative'].tolist(), name='Negative Outlook', marker_color='#E27D60'))
|
for i, column in enumerate(selected_columns):
|
||||||
|
i += 1
|
||||||
|
figure.add_trace(go.Bar(name=column,
|
||||||
|
x=used_symbols,
|
||||||
|
y=[x[column] for x in data],
|
||||||
|
marker_color=['lightslategray',] * len(data),
|
||||||
|
yaxis='y' + str(i), offsetgroup=i))
|
||||||
|
|
||||||
return figure
|
val["yaxis%s" % i] = dict(
|
||||||
|
title=column,
|
||||||
|
titlefont=dict(color=colorway[i - 1]),
|
||||||
|
tickfont=dict(color=colorway[i - 1]),
|
||||||
|
)
|
||||||
|
|
||||||
@app.callback(Output('timeseries-chart-price', 'figure'),
|
if i == 2:
|
||||||
[Input('bar-chart-marketcap', 'hoverData')])
|
val["yaxis2"].update(dict(
|
||||||
def update_graph(hoverData):
|
anchor="x",
|
||||||
trace1 = []
|
overlaying="y",
|
||||||
columns = ['Close', 'priceMA50', 'priceMA200']
|
side="right"
|
||||||
df_sub = his_data[his_data['Symbol'] == hoverData['points'][0]['x']]
|
))
|
||||||
|
elif i == 3:
|
||||||
|
val["yaxis3"].update(dict(
|
||||||
|
anchor="free",
|
||||||
|
overlaying="y",
|
||||||
|
side="left",
|
||||||
|
position=0.10
|
||||||
|
))
|
||||||
|
elif i == 4:
|
||||||
|
val["yaxis4"].update(dict(
|
||||||
|
anchor="free",
|
||||||
|
overlaying="y",
|
||||||
|
side="right",
|
||||||
|
position=0.90
|
||||||
|
))
|
||||||
|
|
||||||
for column in columns:
|
figure.update_layout(val)
|
||||||
trace1.append(go.Scatter(x=df_sub['Date'],
|
figure.update_yaxes(
|
||||||
y=df_sub[column],
|
showgrid=True, zeroline=True, zerolinewidth=1, zerolinecolor='White',
|
||||||
mode='lines',
|
)
|
||||||
opacity=0.7,
|
|
||||||
name=hoverData['points'][0]['x'] + "-" + column,
|
|
||||||
textposition='bottom center'))
|
|
||||||
|
|
||||||
traces = [trace1]
|
if clickData is not None:
|
||||||
data = [val for sublist in traces for val in sublist]
|
i = 0
|
||||||
figure = {'data': data,
|
for subFig in figure['data']:
|
||||||
'layout': go.Layout(
|
color=[colorway[i],] * len(data)
|
||||||
colorway=colorway,
|
color[clickData['points'][0]['pointNumber']] = 'crimson'
|
||||||
template='plotly_dark',
|
|
||||||
paper_bgcolor='rgba(0, 0, 0, 0)',
|
|
||||||
plot_bgcolor='rgba(0, 0, 0, 0)',
|
|
||||||
margin={'b': 15},
|
|
||||||
hovermode='x',
|
|
||||||
autosize=True,
|
|
||||||
|
|
||||||
title={'text': 'Stock Prices', 'font': {'color': 'white'}, 'x': 0.5},
|
subFig['marker']['color'] = color
|
||||||
xaxis={'range': [df_sub['Date'].min(), df_sub['Date'].max()]},
|
i = i +1
|
||||||
yaxis={'range': [0, df_sub['Close'].max() + df_sub['Close'].max() / 10]}
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
return figure
|
return figure
|
||||||
|
|
||||||
def split_filter_part(filter_part):
|
|
||||||
operators = [['ge ', '>='],
|
|
||||||
['le ', '<='],
|
|
||||||
['lt ', '<'],
|
|
||||||
['gt ', '>'],
|
|
||||||
['ne ', '!='],
|
|
||||||
['eq ', '='],
|
|
||||||
['contains '],
|
|
||||||
['datestartswith ']]
|
|
||||||
|
|
||||||
for operator_type in operators:
|
@app.callback(Output("total-stocks", "children"),
|
||||||
for operator in operator_type:
|
[Input('company-kpi-data', 'data')])
|
||||||
if operator in filter_part:
|
def update_total_stocks(data):
|
||||||
name_part, value_part = filter_part.split(operator, 1)
|
stocks_picked = len(comp_kpi)
|
||||||
name = name_part[name_part.find('{') + 1: name_part.rfind('}')]
|
return "Total Number of Stocks loaded: %s" % stocks_picked
|
||||||
|
|
||||||
value_part = value_part.strip()
|
|
||||||
v0 = value_part[0]
|
|
||||||
|
|
||||||
if v0 == value_part[-1] and v0 in ("'", '"', '`'):
|
@app.callback(Output('recom-bar-chart', 'figure'),
|
||||||
value = value_part[1: -1].replace('\\' + v0, v0)
|
[Input('company-kpi-data', 'data')])
|
||||||
else:
|
def update_graph(data):
|
||||||
try:
|
used_symbols = [x['symbol'] for x in data]
|
||||||
value = float(value_part)
|
|
||||||
except ValueError:
|
|
||||||
value = value_part
|
|
||||||
|
|
||||||
return name, operator_type[0].strip(), value
|
# Modify Recommendation Data
|
||||||
return [None] * 3
|
where_clause = "symbol IN ('"+"','".join(used_symbols)+"')"
|
||||||
|
rec_data = sdl.load_from_db(sdl.db_rec, where=where_clause)
|
||||||
|
rec_data_mod = pd.concat([rec_data, pd.get_dummies(rec_data['To Grade'], prefix='grade')], axis=1)
|
||||||
|
rec_data_mod.drop(['To Grade', 'From Grade', 'Action'], axis=1, inplace=True)
|
||||||
|
rec_data_mod['Date'] = pd.to_datetime(rec_data_mod['Date'])
|
||||||
|
df2 = rec_data_mod.groupby([pd.Grouper(key='Date', freq='Y'), pd.Grouper('symbol')]).agg(['sum'])
|
||||||
|
|
||||||
@app.callback(
|
df2['Positive'] = df2['grade_Buy'] + df2['grade_Outperform'] + df2['grade_Market Outperform'] + df2[
|
||||||
Output('company-kpi-data', "data"),
|
'grade_Overweight'] + df2['grade_Positive'] + df2['grade_Strong Buy']
|
||||||
Input('company-kpi-data', "page_current"),
|
df2['Neutral'] = df2['grade_Equal-Weight'] + df2['grade_Hold'] + df2['grade_Neutral']
|
||||||
Input('company-kpi-data', "page_size"),
|
df2['Negative'] = df2['grade_Market Underperform'] + df2['grade_Reduce'] + df2['grade_Sell'] + df2[
|
||||||
Input('company-kpi-data', "sort_by"),
|
'grade_Underweight']
|
||||||
Input('company-kpi-data', 'filter_query'))
|
|
||||||
def update_table(page_current, page_size, sort_by, filter):
|
|
||||||
filtering_expressions = filter.split(' && ')
|
|
||||||
dff = comp_kpi
|
|
||||||
for filter_part in filtering_expressions:
|
|
||||||
col_name, operator, filter_value = split_filter_part(filter_part)
|
|
||||||
|
|
||||||
if operator in ('eq', 'ne', 'lt', 'le', 'gt', 'ge'):
|
columns = ['Positive', 'Neutral', 'Negative']
|
||||||
dff = dff.loc[getattr(dff[col_name], operator)(filter_value)]
|
rec_data_mod = df2[columns]
|
||||||
elif operator == 'contains':
|
|
||||||
dff = dff.loc[dff[col_name].str.contains(filter_value)]
|
|
||||||
elif operator == 'datestartswith':
|
|
||||||
dff = dff.loc[dff[col_name].str.startswith(filter_value)]
|
|
||||||
|
|
||||||
if len(sort_by):
|
df = rec_data_mod.loc['2020-12-31'].reset_index()
|
||||||
dff = dff.sort_values(
|
df_tmp = df.loc[df['symbol'].isin(used_symbols)]
|
||||||
[col['column_id'] for col in sort_by],
|
|
||||||
ascending=[
|
|
||||||
col['direction'] == 'asc'
|
|
||||||
for col in sort_by
|
|
||||||
],
|
|
||||||
inplace=False
|
|
||||||
)
|
|
||||||
|
|
||||||
page = page_current
|
figure = go.Figure(layout=go.Layout(
|
||||||
size = page_size
|
colorway=colorway,
|
||||||
return dff.iloc[page * size: (page + 1) * size].to_dict('records')
|
template='plotly_dark',
|
||||||
|
paper_bgcolor='rgba(0, 0, 0, 0)',
|
||||||
|
plot_bgcolor='rgba(0, 0, 0, 0)',
|
||||||
|
margin={'b': 15},
|
||||||
|
hovermode='x',
|
||||||
|
autosize=True,
|
||||||
|
title={'text': 'Recommendation Data', 'font': {'color': 'white'}, 'x': 0.5},
|
||||||
|
barmode='stack'
|
||||||
|
))
|
||||||
|
|
||||||
print("Rendering loaded after %ss" % (time.time()-start_time))
|
figure.add_trace(
|
||||||
app.run_server(debug=True)
|
go.Bar(x=used_symbols, y=df_tmp['Positive'].tolist(), name='Positive Outlook', marker_color='#41B3A3'))
|
||||||
|
figure.add_trace(
|
||||||
|
go.Bar(x=used_symbols, y=df_tmp['Neutral'].tolist(), name='Neutral Outlook', marker_color='#E8A87C'))
|
||||||
|
figure.add_trace(
|
||||||
|
go.Bar(x=used_symbols, y=df_tmp['Negative'].tolist(), name='Negative Outlook', marker_color='#E27D60'))
|
||||||
|
|
||||||
|
return figure
|
||||||
|
|
||||||
|
|
||||||
|
@app.callback(Output('timeseries-chart-price', 'figure'),
|
||||||
|
Input('bar-chart-marketcap', 'clickData'),
|
||||||
|
Input('company-kpi-data', 'data'))
|
||||||
|
def update_graph(clickData, kpi_data):
|
||||||
|
if clickData is None:
|
||||||
|
used_symbol = kpi_data[0]['symbol']
|
||||||
|
else:
|
||||||
|
used_symbol = clickData['points'][0]['x']
|
||||||
|
|
||||||
|
where_clause = "symbol = '%s'" % used_symbol
|
||||||
|
his_data = sdl.load_from_db(sdl.db_his, where=where_clause, limit=1000, orderby="Date DESC")
|
||||||
|
where_clause = "symbol = '%s' and Date >= '%s'" % (used_symbol, his_data['Date'].min())
|
||||||
|
div_data = sdl.load_from_db(sdl.db_div, where=where_clause, orderby="Date DESC")
|
||||||
|
|
||||||
|
# Calculate rolling window
|
||||||
|
his_data['priceMA50'] = his_data['Close'].rolling(window=50, min_periods=1).mean()
|
||||||
|
std_dev = his_data['Close'].rolling(window=50, min_periods=1).std()
|
||||||
|
|
||||||
|
his_data['priceMA50_lstd'] = his_data['priceMA50'] - 2*std_dev
|
||||||
|
his_data['priceMA50_hstd'] = his_data['priceMA50'] + 2*std_dev
|
||||||
|
his_data['priceMA200'] = his_data['Close'].rolling(window=200, min_periods=1).mean()
|
||||||
|
his_data['diffMA50_200'] = his_data['priceMA50'] - his_data['priceMA200']
|
||||||
|
|
||||||
|
fig = make_subplots(rows=3, cols=1, row_heights=[0.7, 0.2, 0.1], shared_xaxes=True, vertical_spacing=0.07)
|
||||||
|
|
||||||
|
fig.add_trace(go.Candlestick(x=his_data['Date'], open=his_data['Open'], high=his_data['High'], low=his_data['Low'],
|
||||||
|
close=his_data['Close'], name=used_symbol), row=1, col=1)
|
||||||
|
|
||||||
|
columns = ['priceMA50', 'priceMA200']
|
||||||
|
for column in columns:
|
||||||
|
fig.add_trace(go.Scatter(x=his_data['Date'], y=his_data[column], mode='lines', opacity=0.7,
|
||||||
|
name=used_symbol + "-" + column, textposition='bottom center'),
|
||||||
|
row=1, col=1)
|
||||||
|
|
||||||
|
fig.add_trace(go.Scatter(x=his_data['Date'], y=his_data['priceMA50_lstd'], mode='lines', opacity=0.7, line=dict(color='#ffdd70', width=1, dash='dash'),
|
||||||
|
name=used_symbol + "-" + 'Lower Band', textposition='bottom center'),
|
||||||
|
row=1, col=1)
|
||||||
|
|
||||||
|
fig.add_trace(go.Scatter(x=his_data['Date'], y=his_data['priceMA50_hstd'], mode='lines', opacity=0.7, line=dict(color='#ffdd70', width=1, dash='dash'),
|
||||||
|
name=used_symbol + "-" + 'Higher Band', textposition='bottom center'),
|
||||||
|
row=1, col=1)
|
||||||
|
|
||||||
|
fig.update_yaxes(showgrid=True, zeroline=False,
|
||||||
|
showspikes=True, spikemode='across', spikesnap='cursor', showline=False, spikedash='solid')
|
||||||
|
|
||||||
|
fig.update_xaxes(showgrid=True, zeroline=False, rangeslider_visible=False, showticklabels=False,
|
||||||
|
showspikes=True, spikemode='across', spikesnap='cursor', showline=False, spikedash='solid',
|
||||||
|
rangebreaks=[
|
||||||
|
dict(bounds=["sat", "mon"]), #hide weekends
|
||||||
|
#dict(values=["2015-12-25", "2016-01-01"]) # hide Christmas and New Year's
|
||||||
|
])
|
||||||
|
|
||||||
|
fig.update_layout(
|
||||||
|
colorway=colorway,
|
||||||
|
template='plotly_dark',
|
||||||
|
paper_bgcolor='rgba(0, 0, 0, 0)',
|
||||||
|
plot_bgcolor='rgba(0, 0, 0, 0)',
|
||||||
|
autosize=True,
|
||||||
|
height=800,
|
||||||
|
hovermode='x unified',
|
||||||
|
hoverlabel=dict(font=dict(color='black')),
|
||||||
|
title={'text': 'Stock Prices', 'font': {'color': 'white'}, 'x': 0.5},
|
||||||
|
xaxis={'range': [his_data['Date'].min(), his_data['Date'].max()],
|
||||||
|
'showticklabels': True,
|
||||||
|
'rangeselector_bgcolor':'rgba(0, 22, 0, 0)',
|
||||||
|
'rangeselector': dict(
|
||||||
|
buttons=list([
|
||||||
|
dict(count=1,
|
||||||
|
label="1m",
|
||||||
|
step="month",
|
||||||
|
stepmode="backward"),
|
||||||
|
dict(count=6,
|
||||||
|
label="6m",
|
||||||
|
step="month",
|
||||||
|
stepmode="backward"),
|
||||||
|
dict(count=1,
|
||||||
|
label="YTD",
|
||||||
|
step="year",
|
||||||
|
stepmode="todate"),
|
||||||
|
dict(count=1,
|
||||||
|
label="1y",
|
||||||
|
step="year",
|
||||||
|
stepmode="backward"),
|
||||||
|
dict(step="all")
|
||||||
|
]))},
|
||||||
|
yaxis1={'autorange': True, 'fixedrange': False},
|
||||||
|
legend=dict(y=1, x=0),
|
||||||
|
dragmode='pan')
|
||||||
|
|
||||||
|
fig.add_trace(
|
||||||
|
go.Bar(x=his_data['Date'], y=his_data['Volume'], marker_color='#3399ff', name='Volume'),
|
||||||
|
row=2, col=1)
|
||||||
|
|
||||||
|
fig.add_trace(
|
||||||
|
go.Scatter(x=div_data['Date'], y=div_data['Dividends'], marker_color='#fae823', name='Dividends', line=dict(
|
||||||
|
shape='hv'
|
||||||
|
)),
|
||||||
|
row=3, col=1)
|
||||||
|
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def split_filter_part(filter_part):
|
||||||
|
operators = [['ge ', '>='],
|
||||||
|
['le ', '<='],
|
||||||
|
['lt ', '<'],
|
||||||
|
['gt ', '>'],
|
||||||
|
['ne ', '!='],
|
||||||
|
['eq ', '='],
|
||||||
|
['contains '],
|
||||||
|
['datestartswith ']]
|
||||||
|
|
||||||
|
for operator_type in operators:
|
||||||
|
for operator in operator_type:
|
||||||
|
if operator in filter_part:
|
||||||
|
name_part, value_part = filter_part.split(operator, 1)
|
||||||
|
name = name_part[name_part.find('{') + 1: name_part.rfind('}')]
|
||||||
|
|
||||||
|
value_part = value_part.strip()
|
||||||
|
v0 = value_part[0]
|
||||||
|
|
||||||
|
if v0 == value_part[-1] and v0 in ("'", '"', '`'):
|
||||||
|
value = value_part[1: -1].replace('\\' + v0, v0)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
value = float(value_part)
|
||||||
|
except ValueError:
|
||||||
|
value = value_part
|
||||||
|
|
||||||
|
return name, operator_type[0].strip(), value
|
||||||
|
return [None] * 3
|
||||||
|
|
||||||
|
@app.callback(
|
||||||
|
Output('company-kpi-data', "data"),
|
||||||
|
Input('company-kpi-data', "page_current"),
|
||||||
|
Input('company-kpi-data', "page_size"),
|
||||||
|
Input('company-kpi-data', "sort_by"),
|
||||||
|
Input('company-kpi-data', 'filter_query'))
|
||||||
|
def update_table(page_current, page_size, sort_by, filter):
|
||||||
|
filtering_expressions = filter.split(' && ')
|
||||||
|
dff = comp_kpi
|
||||||
|
for filter_part in filtering_expressions:
|
||||||
|
col_name, operator, filter_value = split_filter_part(filter_part)
|
||||||
|
|
||||||
|
if operator in ('eq', 'ne', 'lt', 'le', 'gt', 'ge'):
|
||||||
|
dff = dff.loc[getattr(dff[col_name], operator)(filter_value)]
|
||||||
|
elif operator == 'contains':
|
||||||
|
dff = dff.loc[dff[col_name].str.contains(filter_value)]
|
||||||
|
elif operator == 'datestartswith':
|
||||||
|
dff = dff.loc[dff[col_name].str.startswith(filter_value)]
|
||||||
|
|
||||||
|
if len(sort_by):
|
||||||
|
dff = dff.sort_values(
|
||||||
|
[col['column_id'] for col in sort_by],
|
||||||
|
ascending=[
|
||||||
|
col['direction'] == 'asc'
|
||||||
|
for col in sort_by
|
||||||
|
],
|
||||||
|
inplace=False
|
||||||
|
)
|
||||||
|
|
||||||
|
page = page_current
|
||||||
|
size = page_size
|
||||||
|
return dff.iloc[page * size: (page + 1) * size].to_dict('records')
|
||||||
|
|
||||||
|
|
||||||
|
app.run_server(debug=True, port=18051, threaded=True)
|
||||||
|
|||||||
Reference in New Issue
Block a user