Spaces:
Runtime error
Runtime error
Commit
·
642c876
1
Parent(s):
e69cc54
first commit
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitignore +1 -0
- .streamlit/config.toml +2 -0
- README.md +1 -12
- __pycache__/data_creator.cpython-39.pyc +0 -0
- __pycache__/persist.cpython-39.pyc +0 -0
- __pycache__/plot_creator.cpython-39.pyc +0 -0
- __pycache__/port_creator.cpython-39.pyc +0 -0
- __pycache__/risk_metrics.cpython-310.pyc +0 -0
- __pycache__/risk_metrics.cpython-39.pyc +0 -0
- aggrid_viewer.py +238 -0
- app.py +14 -0
- cbpro.ipynb +573 -0
- coincap.ipynb +0 -0
- cryptoTester.py +281 -0
- crypto_viewer.py +314 -0
- data_creator.py +150 -0
- frontpage.py +211 -0
- goofing.ipynb +0 -0
- histories.csv +0 -0
- images/background.png +0 -0
- images/cart.png +0 -0
- logos/ADA.png +0 -0
- logos/ATOM.png +0 -0
- logos/AVAX.png +0 -0
- logos/BNB.png +0 -0
- logos/BTC.png +0 -0
- logos/BUSD.png +0 -0
- logos/CRO.png +0 -0
- logos/DAI.png +0 -0
- logos/DOGE.png +0 -0
- logos/DOT.png +0 -0
- logos/ETH.png +0 -0
- logos/HEX.png +0 -0
- logos/LINK.png +0 -0
- logos/LTC.png +0 -0
- logos/LUNA.png +0 -0
- logos/MATIC.png +0 -0
- logos/NEAR.png +0 -0
- logos/SHIB.png +0 -0
- logos/SOL.png +0 -0
- logos/USDC.png +0 -0
- logos/USDT.png +0 -0
- logos/UST.png +0 -0
- logos/WBTC.png +0 -0
- logos/XRP.png +0 -0
- performance analysis.ipynb +1206 -0
- persist.py +23 -0
- plot_creator.py +154 -0
- port_creator.py +141 -0
- port_viewer.py +159 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
.ipynb_checkpoints
|
.streamlit/config.toml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[theme]
|
2 |
+
base="dark"
|
README.md
CHANGED
@@ -1,12 +1 @@
|
|
1 |
-
|
2 |
-
title: CryptoTester
|
3 |
-
emoji: 🐢
|
4 |
-
colorFrom: purple
|
5 |
-
colorTo: pink
|
6 |
-
sdk: streamlit
|
7 |
-
sdk_version: 1.9.0
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
-
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
|
|
|
1 |
+
This is the README file for the crypto port project.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
__pycache__/data_creator.cpython-39.pyc
ADDED
Binary file (5.31 kB). View file
|
|
__pycache__/persist.cpython-39.pyc
ADDED
Binary file (871 Bytes). View file
|
|
__pycache__/plot_creator.cpython-39.pyc
ADDED
Binary file (7.16 kB). View file
|
|
__pycache__/port_creator.cpython-39.pyc
ADDED
Binary file (5.05 kB). View file
|
|
__pycache__/risk_metrics.cpython-310.pyc
ADDED
Binary file (1.52 kB). View file
|
|
__pycache__/risk_metrics.cpython-39.pyc
ADDED
Binary file (1.64 kB). View file
|
|
aggrid_viewer.py
ADDED
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import plotly.express as px
|
3 |
+
from datetime import date, timedelta
|
4 |
+
from data_creator import create_market_cap_dict, gen_rebased_df, ids2names_dict, names2ids_dict, create_assets, gen_symbols, create_histories_df, create_unix_dates, create_returns_df, create_rebased_df, date_range
|
5 |
+
from plot_creator import get_pre_selected_idx, write_coins, write_coins_custom, write_bespoke_coins, create_comparison_df, load_images, gen_performance_ag_df, add_drawdown
|
6 |
+
from port_creator import gen_all_returns, markowitz_weights_dict, uniform_weights_dict, ids_with_histories, uniform, create_port_rtns, markowitz_weights, create_weights_df
|
7 |
+
from risk_metrics import max_drawdown
|
8 |
+
from st_aggrid import AgGrid, GridOptionsBuilder
|
9 |
+
from persist import persist, load_widget_state
|
10 |
+
|
11 |
+
load_widget_state()
|
12 |
+
|
13 |
+
st.markdown(
|
14 |
+
"""
|
15 |
+
<style>
|
16 |
+
|
17 |
+
.css-1xsoh1l {
|
18 |
+
font-size: 0px;
|
19 |
+
}
|
20 |
+
.css-1xsoh1l{
|
21 |
+
color: rgb(120 190 33);
|
22 |
+
}
|
23 |
+
.css-jhf39w {
|
24 |
+
color: rgba(120, 190, 33, 1);
|
25 |
+
}
|
26 |
+
.css-jv3mmh {
|
27 |
+
background-color: rgb(120, 190, 33);
|
28 |
+
}
|
29 |
+
</style>
|
30 |
+
""",
|
31 |
+
unsafe_allow_html = True
|
32 |
+
)
|
33 |
+
|
34 |
+
# load start and end dates for investment analysis
|
35 |
+
lookback_years = 5 # max date range for backtest will be: lookback_years - 1
|
36 |
+
start_date = date.today() - timedelta(365)
|
37 |
+
end_date = date.today()
|
38 |
+
|
39 |
+
if 'start_date' not in st.session_state:
|
40 |
+
st.session_state.start_date = start_date
|
41 |
+
st.session_state.end_date = end_date
|
42 |
+
|
43 |
+
if 'max_coins' not in st.session_state:
|
44 |
+
st.session_state.max_coins = 10
|
45 |
+
|
46 |
+
if 'start_id' not in st.session_state:
|
47 |
+
st.session_state.start_id = 1
|
48 |
+
|
49 |
+
# Pull down histories from coincap, and create dataframes for historic prices,
|
50 |
+
# returns and rebased cumulative price; histories_df, returns_df, and
|
51 |
+
# rebased_df, respectively.
|
52 |
+
assets_json = create_assets(total_coins=50)
|
53 |
+
symbols, names, coin_ids = gen_symbols(assets_json)
|
54 |
+
ids2symbols = ids2names_dict(coin_ids, symbols)
|
55 |
+
ids2names_dict=ids2names_dict(coin_ids, names)
|
56 |
+
names2ids_dict = names2ids_dict(names, coin_ids)
|
57 |
+
market_cap_dict = create_market_cap_dict(assets_json)
|
58 |
+
start_unix, end_unix = create_unix_dates(today=date.today(), lookback_years=lookback_years)
|
59 |
+
histories_df = create_histories_df(coin_ids, start_unix, end_unix)
|
60 |
+
|
61 |
+
# Create list of coin ids with full hisoties over the backtest period
|
62 |
+
ids_with_histories = ids_with_histories(histories_df,
|
63 |
+
st.session_state.start_date, st.session_state.end_date)
|
64 |
+
names_with_histories = list(map(ids2names_dict.get, ids_with_histories))
|
65 |
+
|
66 |
+
|
67 |
+
def change_date_range():
|
68 |
+
st.session_state.start_date = st.session_state.myslider[0]
|
69 |
+
st.session_state.end_date = st.session_state.myslider[1]
|
70 |
+
|
71 |
+
# calculate weghts for the uniform and markowitz pfs
|
72 |
+
uniform_weights_dict = uniform_weights_dict(ids_with_histories[:st.session_state.max_coins])
|
73 |
+
#markowitz_weights_dict = markowitz_weights_dict(histories_df,
|
74 |
+
# st.session_state.start_date ,ids_with_histories[:max_coins], analysis_days=365)
|
75 |
+
strategy_dict = {'Uniform': uniform_weights_dict}#, 'Markowitz':markowitz_weights_dict}
|
76 |
+
|
77 |
+
if "strategy_dict" not in st.session_state:
|
78 |
+
st.session_state.strategy_dict=strategy_dict
|
79 |
+
|
80 |
+
if 'selected_assets' not in st.session_state:
|
81 |
+
st.session_state.selected_assets = ["Uniform"]
|
82 |
+
|
83 |
+
with st.sidebar:
|
84 |
+
st.subheader("Portfolio weights viewer")
|
85 |
+
portfolio_type = st.selectbox(
|
86 |
+
'Select portfolio strategy',
|
87 |
+
['Create your own'] + (list(st.session_state.strategy_dict.keys())),
|
88 |
+
index = st.session_state.start_id
|
89 |
+
)
|
90 |
+
|
91 |
+
|
92 |
+
if st.checkbox("Explain this"):
|
93 |
+
st.subheader("What's this all about then, eh?")
|
94 |
+
st.write('''
|
95 |
+
The app allows you to construct your own portfolios of crypto currencies and view their
|
96 |
+
historic performance alongside the performance of individual crypto
|
97 |
+
currencies over an investment period of your choosing.
|
98 |
+
|
99 |
+
To view the assets and weights comprising a particular portfolio select the
|
100 |
+
portfolio of interest in the 'Select portfolio strategy' dropdown (a uniform
|
101 |
+
portfolio for the top ten largest coins has been automatically created for you
|
102 |
+
to start with).
|
103 |
+
|
104 |
+
To create your own portfolio:
|
105 |
+
|
106 |
+
1. Select 'Create your own' in the 'select portfolio strategy' dropdown;
|
107 |
+
2. Select the maximum number of coins in your portfolio;
|
108 |
+
3. Select the relative weights for each of these assets;
|
109 |
+
4. Choose a name for your portfolio and click add portfolio;
|
110 |
+
5. Click update viewer;
|
111 |
+
|
112 |
+
You can sort and filter the performance metrics table on each of the columns.
|
113 |
+
|
114 |
+
To add an asset to the performance chart, select the corresponding select box.
|
115 |
+
''')
|
116 |
+
|
117 |
+
# Add select slider to allow
|
118 |
+
date_list = date_range(end_date,lookback_years-1)
|
119 |
+
start_port_date, end_port_date = st.select_slider(
|
120 |
+
'Select backtest date range',
|
121 |
+
key="myslider",
|
122 |
+
options=date_list,
|
123 |
+
#value=(date.today() - timedelta(365), date.today()),
|
124 |
+
value = (st.session_state.start_date, st.session_state.end_date),
|
125 |
+
on_change=change_date_range
|
126 |
+
)
|
127 |
+
|
128 |
+
|
129 |
+
# Move the definition of strategy_dict to about the potfolio_type selectbox
|
130 |
+
# This will require that you define max_coins in session state,a dn the
|
131 |
+
# have the max_coins number_input update the max coins session state.
|
132 |
+
# = 10 and let it be
|
133 |
+
|
134 |
+
|
135 |
+
# calculate returns for the portfolios and add to it the rebased df for assets
|
136 |
+
# with hisories. This is the new returns_df
|
137 |
+
rebased_df = gen_rebased_df(histories_df, ids_with_histories,
|
138 |
+
st.session_state.start_date, st.session_state.end_date)
|
139 |
+
|
140 |
+
all_returns_df = gen_all_returns(rebased_df, ids_with_histories,st.session_state.strategy_dict)
|
141 |
+
|
142 |
+
def write_something():
|
143 |
+
st.write("")
|
144 |
+
|
145 |
+
def rerun_aggrid():
|
146 |
+
st.session_state.selected_indexes = selected_indexes
|
147 |
+
st.session_state.performance_ag_df = gen_performance_ag_df(all_returns_df, market_cap_dict,
|
148 |
+
st.session_state.strategy_dict)
|
149 |
+
st.header('ran')
|
150 |
+
|
151 |
+
if portfolio_type == 'Create your own':
|
152 |
+
with st.sidebar:
|
153 |
+
st.session_state.max_coins = st.number_input(
|
154 |
+
"Maximum number of coins in portfolio",
|
155 |
+
min_value=1,
|
156 |
+
max_value=20,
|
157 |
+
value=10,
|
158 |
+
help='''
|
159 |
+
Coins will be added to your "investment set" in order of largest market cap.
|
160 |
+
|
161 |
+
The "investment set" is the group of assets from which your portfolio is
|
162 |
+
constructed. Depending on the portfolio strategy you choose, not all of the
|
163 |
+
assets in your investment set will be included in your portfolio.
|
164 |
+
|
165 |
+
'''
|
166 |
+
)
|
167 |
+
st.markdown("Bespoke portfolio weights (relative):" , unsafe_allow_html=False)
|
168 |
+
bespoke_weights = write_coins_custom(names_with_histories[:st.session_state.max_coins])
|
169 |
+
#bespoke_weights = write_bespoke_coins(names_with_histories[:st.session_state.max_coins])
|
170 |
+
bespoke_cols = st.columns(2)
|
171 |
+
bespoke_cols[0].write(" ")
|
172 |
+
bespoke_cols[0].write(" ")
|
173 |
+
add_bespoke = bespoke_cols[0].button("Add portfolio", key='bespoke_button')
|
174 |
+
bespoke_name = bespoke_cols[1].text_input("Choose portfolio name")
|
175 |
+
if add_bespoke:
|
176 |
+
if bespoke_name=="" or bespoke_name in all_returns_df.columns:
|
177 |
+
st.warning("Please give your portfolio a unique name")
|
178 |
+
else:
|
179 |
+
beskpoke_weights_dict={}
|
180 |
+
for i, wt in enumerate(bespoke_weights):
|
181 |
+
beskpoke_weights_dict[coin_ids[i]] = wt
|
182 |
+
st.session_state.strategy_dict[bespoke_name] = beskpoke_weights_dict
|
183 |
+
st.session_state.start_id = len(st.session_state.strategy_dict)
|
184 |
+
#st.session_state.selected_assets.append(bespoke_name)
|
185 |
+
st.success("Porfolio added, update viewer to see results")
|
186 |
+
#st.button('Update viewer', on_click = change_date_range)
|
187 |
+
st.button('Update viewer', on_click = rerun_aggrid)
|
188 |
+
#st.button('Update viewer', on_click = st.experimental_rerun())
|
189 |
+
#st.write(st.session_state.strategy_dict)
|
190 |
+
else:
|
191 |
+
non_zero_coins = [key for key in st.session_state.strategy_dict[portfolio_type].keys() if st.session_state.strategy_dict[portfolio_type][key]>0]
|
192 |
+
with st.sidebar:
|
193 |
+
st.markdown(portfolio_type + " portfolio weights (%):" , unsafe_allow_html=False)
|
194 |
+
write_coins(non_zero_coins, st.session_state.strategy_dict[portfolio_type], ids2names_dict)
|
195 |
+
|
196 |
+
st.session_state.performance_ag_df = gen_performance_ag_df(all_returns_df, market_cap_dict,
|
197 |
+
st.session_state.strategy_dict)
|
198 |
+
|
199 |
+
if 'selected_assets' not in st.session_state:
|
200 |
+
st.session_state.selected_assets = ["Uniform"]
|
201 |
+
|
202 |
+
selected_indexes = []
|
203 |
+
for asset in st.session_state.selected_assets:
|
204 |
+
try:
|
205 |
+
selected_indexes.append(list(st.session_state.performance_ag_df['Asset']).index(asset))
|
206 |
+
except:
|
207 |
+
pass
|
208 |
+
|
209 |
+
if 'selected_indexes' not in st.session_state:
|
210 |
+
st.session_state.selected_indexes = selected_indexes
|
211 |
+
|
212 |
+
gb = GridOptionsBuilder.from_dataframe(st.session_state.performance_ag_df)
|
213 |
+
gb.configure_selection('multiple', use_checkbox=True,
|
214 |
+
pre_selected_rows = st.session_state.selected_indexes)
|
215 |
+
gridOptions = gb.build()
|
216 |
+
|
217 |
+
st.subheader("Performance metrics")
|
218 |
+
grid_response = AgGrid(st.session_state.performance_ag_df, gridOptions=gridOptions,
|
219 |
+
data_return_mode = 'FILTERED', allow_unsafe_jscode=True, height = 200,
|
220 |
+
update_mode='MODEL_CHANGED', key = persist("aggrid")) # MANUAL SELECTION_CHANGED MODEL_CHANGED, VALUE_CHANGED
|
221 |
+
|
222 |
+
selected_assets = []
|
223 |
+
for row in grid_response['selected_rows']:
|
224 |
+
selected_assets.append(row['Asset'])
|
225 |
+
st.session_state.selected_assets = selected_assets
|
226 |
+
|
227 |
+
|
228 |
+
|
229 |
+
#selected_indexes = []
|
230 |
+
#for asset in st.session_state.selected_assets:
|
231 |
+
# selected_indexes.append(list(performance_ag_df['Asset']).index(asset))
|
232 |
+
|
233 |
+
chart_df = create_comparison_df(all_returns_df, st.session_state.selected_assets)
|
234 |
+
|
235 |
+
fig = px.line(chart_df, x=chart_df.index, y='Value (USD)', color='Asset')
|
236 |
+
|
237 |
+
st.subheader("Performance chart")
|
238 |
+
st.write(fig)
|
app.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from streamlit_custom_slider import st_custom_slider
|
3 |
+
from streamlit_custom_slider import st_range_slider
|
4 |
+
|
5 |
+
v_custom = st_custom_slider('Hello world', 0, 100, 50, key="slider1")
|
6 |
+
st.write(v_custom)
|
7 |
+
|
8 |
+
# Add a range slider
|
9 |
+
v_custom_range = st_range_slider('Hello world', 0, 100, (20, 60), key="slider2")
|
10 |
+
st.write(v_custom_range)
|
11 |
+
|
12 |
+
cols = st.columns(2)
|
13 |
+
with cols[0]:
|
14 |
+
st_range_slider('Hello world', 0, 100, (20, 60), key="slider3")
|
cbpro.ipynb
ADDED
@@ -0,0 +1,573 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"id": "167330a4-9cfe-4e11-a3e5-9b788e710b07",
|
6 |
+
"metadata": {},
|
7 |
+
"source": [
|
8 |
+
"!conda activate cbpro"
|
9 |
+
]
|
10 |
+
},
|
11 |
+
{
|
12 |
+
"cell_type": "code",
|
13 |
+
"execution_count": 1,
|
14 |
+
"id": "d5b44fe7-a66c-4708-9d70-9df144e000bb",
|
15 |
+
"metadata": {},
|
16 |
+
"outputs": [],
|
17 |
+
"source": [
|
18 |
+
"import requests"
|
19 |
+
]
|
20 |
+
},
|
21 |
+
{
|
22 |
+
"cell_type": "code",
|
23 |
+
"execution_count": 2,
|
24 |
+
"id": "d665ee3b-259c-4733-af10-d7dc4e91bbcd",
|
25 |
+
"metadata": {},
|
26 |
+
"outputs": [],
|
27 |
+
"source": [
|
28 |
+
"import cbpro"
|
29 |
+
]
|
30 |
+
},
|
31 |
+
{
|
32 |
+
"cell_type": "code",
|
33 |
+
"execution_count": 4,
|
34 |
+
"id": "b0d24761-fcca-4526-ad2c-4fadaaab3320",
|
35 |
+
"metadata": {},
|
36 |
+
"outputs": [
|
37 |
+
{
|
38 |
+
"name": "stdout",
|
39 |
+
"output_type": "stream",
|
40 |
+
"text": [
|
41 |
+
"Collecting pandas\n",
|
42 |
+
" Downloading pandas-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl (11.1 MB)\n",
|
43 |
+
"\u001b[K |████████████████████████████████| 11.1 MB 3.0 MB/s eta 0:00:01\n",
|
44 |
+
"\u001b[?25hRequirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.9/site-packages (from pandas) (2022.1)\n",
|
45 |
+
"Collecting numpy>=1.18.5\n",
|
46 |
+
" Downloading numpy-1.22.3-cp39-cp39-macosx_10_14_x86_64.whl (17.6 MB)\n",
|
47 |
+
"\u001b[K |████████████████████████████████| 17.6 MB 25.2 MB/s eta 0:00:01\n",
|
48 |
+
"\u001b[?25hRequirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.9/site-packages (from pandas) (2.8.2)\n",
|
49 |
+
"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.9/site-packages (from python-dateutil>=2.8.1->pandas) (1.10.0)\n",
|
50 |
+
"Installing collected packages: numpy, pandas\n",
|
51 |
+
"Successfully installed numpy-1.22.3 pandas-1.4.2\n",
|
52 |
+
"\u001b[33mWARNING: You are using pip version 21.1.1; however, version 22.0.4 is available.\n",
|
53 |
+
"You should consider upgrading via the '/usr/local/opt/[email protected]/bin/python3.9 -m pip install --upgrade pip' command.\u001b[0m\n"
|
54 |
+
]
|
55 |
+
}
|
56 |
+
],
|
57 |
+
"source": [
|
58 |
+
"#!pip3 install pandas"
|
59 |
+
]
|
60 |
+
},
|
61 |
+
{
|
62 |
+
"cell_type": "code",
|
63 |
+
"execution_count": 5,
|
64 |
+
"id": "6d1cec56-82d3-4915-b271-765bbe767bc2",
|
65 |
+
"metadata": {},
|
66 |
+
"outputs": [
|
67 |
+
{
|
68 |
+
"data": {
|
69 |
+
"text/html": [
|
70 |
+
"<div>\n",
|
71 |
+
"<style scoped>\n",
|
72 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
73 |
+
" vertical-align: middle;\n",
|
74 |
+
" }\n",
|
75 |
+
"\n",
|
76 |
+
" .dataframe tbody tr th {\n",
|
77 |
+
" vertical-align: top;\n",
|
78 |
+
" }\n",
|
79 |
+
"\n",
|
80 |
+
" .dataframe thead th {\n",
|
81 |
+
" text-align: right;\n",
|
82 |
+
" }\n",
|
83 |
+
"</style>\n",
|
84 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
85 |
+
" <thead>\n",
|
86 |
+
" <tr style=\"text-align: right;\">\n",
|
87 |
+
" <th></th>\n",
|
88 |
+
" <th>468</th>\n",
|
89 |
+
" <th>469</th>\n",
|
90 |
+
" <th>470</th>\n",
|
91 |
+
" <th>471</th>\n",
|
92 |
+
" <th>472</th>\n",
|
93 |
+
" </tr>\n",
|
94 |
+
" </thead>\n",
|
95 |
+
" <tbody>\n",
|
96 |
+
" <tr>\n",
|
97 |
+
" <th>id</th>\n",
|
98 |
+
" <td>XRP-BTC</td>\n",
|
99 |
+
" <td>XRP-USD</td>\n",
|
100 |
+
" <td>GNT-USDC</td>\n",
|
101 |
+
" <td>XRP-GBP</td>\n",
|
102 |
+
" <td>XRP-EUR</td>\n",
|
103 |
+
" </tr>\n",
|
104 |
+
" <tr>\n",
|
105 |
+
" <th>base_currency</th>\n",
|
106 |
+
" <td>XRP</td>\n",
|
107 |
+
" <td>XRP</td>\n",
|
108 |
+
" <td>GNT</td>\n",
|
109 |
+
" <td>XRP</td>\n",
|
110 |
+
" <td>XRP</td>\n",
|
111 |
+
" </tr>\n",
|
112 |
+
" <tr>\n",
|
113 |
+
" <th>quote_currency</th>\n",
|
114 |
+
" <td>BTC</td>\n",
|
115 |
+
" <td>USD</td>\n",
|
116 |
+
" <td>USDC</td>\n",
|
117 |
+
" <td>GBP</td>\n",
|
118 |
+
" <td>EUR</td>\n",
|
119 |
+
" </tr>\n",
|
120 |
+
" <tr>\n",
|
121 |
+
" <th>base_min_size</th>\n",
|
122 |
+
" <td>1</td>\n",
|
123 |
+
" <td>1</td>\n",
|
124 |
+
" <td>1</td>\n",
|
125 |
+
" <td>1</td>\n",
|
126 |
+
" <td>1</td>\n",
|
127 |
+
" </tr>\n",
|
128 |
+
" <tr>\n",
|
129 |
+
" <th>base_max_size</th>\n",
|
130 |
+
" <td>500000</td>\n",
|
131 |
+
" <td>500000</td>\n",
|
132 |
+
" <td>490000</td>\n",
|
133 |
+
" <td>500000</td>\n",
|
134 |
+
" <td>500000</td>\n",
|
135 |
+
" </tr>\n",
|
136 |
+
" <tr>\n",
|
137 |
+
" <th>quote_increment</th>\n",
|
138 |
+
" <td>0.00000001</td>\n",
|
139 |
+
" <td>0.0001</td>\n",
|
140 |
+
" <td>0.000001</td>\n",
|
141 |
+
" <td>0.0001</td>\n",
|
142 |
+
" <td>0.0001</td>\n",
|
143 |
+
" </tr>\n",
|
144 |
+
" <tr>\n",
|
145 |
+
" <th>base_increment</th>\n",
|
146 |
+
" <td>1</td>\n",
|
147 |
+
" <td>0.000001</td>\n",
|
148 |
+
" <td>1</td>\n",
|
149 |
+
" <td>0.000001</td>\n",
|
150 |
+
" <td>0.000001</td>\n",
|
151 |
+
" </tr>\n",
|
152 |
+
" <tr>\n",
|
153 |
+
" <th>display_name</th>\n",
|
154 |
+
" <td>XRP/BTC</td>\n",
|
155 |
+
" <td>XRP/USD</td>\n",
|
156 |
+
" <td>GNT/USDC</td>\n",
|
157 |
+
" <td>XRP/GBP</td>\n",
|
158 |
+
" <td>XRP/EUR</td>\n",
|
159 |
+
" </tr>\n",
|
160 |
+
" <tr>\n",
|
161 |
+
" <th>min_market_funds</th>\n",
|
162 |
+
" <td>0.001</td>\n",
|
163 |
+
" <td>10</td>\n",
|
164 |
+
" <td>1</td>\n",
|
165 |
+
" <td>10</td>\n",
|
166 |
+
" <td>10</td>\n",
|
167 |
+
" </tr>\n",
|
168 |
+
" <tr>\n",
|
169 |
+
" <th>max_market_funds</th>\n",
|
170 |
+
" <td>30</td>\n",
|
171 |
+
" <td>100000</td>\n",
|
172 |
+
" <td>200000</td>\n",
|
173 |
+
" <td>100000</td>\n",
|
174 |
+
" <td>100000</td>\n",
|
175 |
+
" </tr>\n",
|
176 |
+
" <tr>\n",
|
177 |
+
" <th>margin_enabled</th>\n",
|
178 |
+
" <td>False</td>\n",
|
179 |
+
" <td>False</td>\n",
|
180 |
+
" <td>False</td>\n",
|
181 |
+
" <td>False</td>\n",
|
182 |
+
" <td>False</td>\n",
|
183 |
+
" </tr>\n",
|
184 |
+
" <tr>\n",
|
185 |
+
" <th>fx_stablecoin</th>\n",
|
186 |
+
" <td>False</td>\n",
|
187 |
+
" <td>False</td>\n",
|
188 |
+
" <td>False</td>\n",
|
189 |
+
" <td>False</td>\n",
|
190 |
+
" <td>False</td>\n",
|
191 |
+
" </tr>\n",
|
192 |
+
" <tr>\n",
|
193 |
+
" <th>max_slippage_percentage</th>\n",
|
194 |
+
" <td>0.10000000</td>\n",
|
195 |
+
" <td>0.10000000</td>\n",
|
196 |
+
" <td>0.03000000</td>\n",
|
197 |
+
" <td>0.10000000</td>\n",
|
198 |
+
" <td>0.10000000</td>\n",
|
199 |
+
" </tr>\n",
|
200 |
+
" <tr>\n",
|
201 |
+
" <th>post_only</th>\n",
|
202 |
+
" <td>False</td>\n",
|
203 |
+
" <td>False</td>\n",
|
204 |
+
" <td>False</td>\n",
|
205 |
+
" <td>False</td>\n",
|
206 |
+
" <td>False</td>\n",
|
207 |
+
" </tr>\n",
|
208 |
+
" <tr>\n",
|
209 |
+
" <th>limit_only</th>\n",
|
210 |
+
" <td>False</td>\n",
|
211 |
+
" <td>False</td>\n",
|
212 |
+
" <td>False</td>\n",
|
213 |
+
" <td>False</td>\n",
|
214 |
+
" <td>False</td>\n",
|
215 |
+
" </tr>\n",
|
216 |
+
" <tr>\n",
|
217 |
+
" <th>cancel_only</th>\n",
|
218 |
+
" <td>False</td>\n",
|
219 |
+
" <td>False</td>\n",
|
220 |
+
" <td>False</td>\n",
|
221 |
+
" <td>False</td>\n",
|
222 |
+
" <td>False</td>\n",
|
223 |
+
" </tr>\n",
|
224 |
+
" <tr>\n",
|
225 |
+
" <th>trading_disabled</th>\n",
|
226 |
+
" <td>True</td>\n",
|
227 |
+
" <td>True</td>\n",
|
228 |
+
" <td>True</td>\n",
|
229 |
+
" <td>True</td>\n",
|
230 |
+
" <td>True</td>\n",
|
231 |
+
" </tr>\n",
|
232 |
+
" <tr>\n",
|
233 |
+
" <th>status</th>\n",
|
234 |
+
" <td>delisted</td>\n",
|
235 |
+
" <td>delisted</td>\n",
|
236 |
+
" <td>delisted</td>\n",
|
237 |
+
" <td>delisted</td>\n",
|
238 |
+
" <td>delisted</td>\n",
|
239 |
+
" </tr>\n",
|
240 |
+
" <tr>\n",
|
241 |
+
" <th>status_message</th>\n",
|
242 |
+
" <td></td>\n",
|
243 |
+
" <td></td>\n",
|
244 |
+
" <td></td>\n",
|
245 |
+
" <td></td>\n",
|
246 |
+
" <td></td>\n",
|
247 |
+
" </tr>\n",
|
248 |
+
" <tr>\n",
|
249 |
+
" <th>auction_mode</th>\n",
|
250 |
+
" <td>False</td>\n",
|
251 |
+
" <td>False</td>\n",
|
252 |
+
" <td>False</td>\n",
|
253 |
+
" <td>False</td>\n",
|
254 |
+
" <td>False</td>\n",
|
255 |
+
" </tr>\n",
|
256 |
+
" </tbody>\n",
|
257 |
+
"</table>\n",
|
258 |
+
"</div>"
|
259 |
+
],
|
260 |
+
"text/plain": [
|
261 |
+
" 468 469 470 471 \\\n",
|
262 |
+
"id XRP-BTC XRP-USD GNT-USDC XRP-GBP \n",
|
263 |
+
"base_currency XRP XRP GNT XRP \n",
|
264 |
+
"quote_currency BTC USD USDC GBP \n",
|
265 |
+
"base_min_size 1 1 1 1 \n",
|
266 |
+
"base_max_size 500000 500000 490000 500000 \n",
|
267 |
+
"quote_increment 0.00000001 0.0001 0.000001 0.0001 \n",
|
268 |
+
"base_increment 1 0.000001 1 0.000001 \n",
|
269 |
+
"display_name XRP/BTC XRP/USD GNT/USDC XRP/GBP \n",
|
270 |
+
"min_market_funds 0.001 10 1 10 \n",
|
271 |
+
"max_market_funds 30 100000 200000 100000 \n",
|
272 |
+
"margin_enabled False False False False \n",
|
273 |
+
"fx_stablecoin False False False False \n",
|
274 |
+
"max_slippage_percentage 0.10000000 0.10000000 0.03000000 0.10000000 \n",
|
275 |
+
"post_only False False False False \n",
|
276 |
+
"limit_only False False False False \n",
|
277 |
+
"cancel_only False False False False \n",
|
278 |
+
"trading_disabled True True True True \n",
|
279 |
+
"status delisted delisted delisted delisted \n",
|
280 |
+
"status_message \n",
|
281 |
+
"auction_mode False False False False \n",
|
282 |
+
"\n",
|
283 |
+
" 472 \n",
|
284 |
+
"id XRP-EUR \n",
|
285 |
+
"base_currency XRP \n",
|
286 |
+
"quote_currency EUR \n",
|
287 |
+
"base_min_size 1 \n",
|
288 |
+
"base_max_size 500000 \n",
|
289 |
+
"quote_increment 0.0001 \n",
|
290 |
+
"base_increment 0.000001 \n",
|
291 |
+
"display_name XRP/EUR \n",
|
292 |
+
"min_market_funds 10 \n",
|
293 |
+
"max_market_funds 100000 \n",
|
294 |
+
"margin_enabled False \n",
|
295 |
+
"fx_stablecoin False \n",
|
296 |
+
"max_slippage_percentage 0.10000000 \n",
|
297 |
+
"post_only False \n",
|
298 |
+
"limit_only False \n",
|
299 |
+
"cancel_only False \n",
|
300 |
+
"trading_disabled True \n",
|
301 |
+
"status delisted \n",
|
302 |
+
"status_message \n",
|
303 |
+
"auction_mode False "
|
304 |
+
]
|
305 |
+
},
|
306 |
+
"execution_count": 5,
|
307 |
+
"metadata": {},
|
308 |
+
"output_type": "execute_result"
|
309 |
+
}
|
310 |
+
],
|
311 |
+
"source": [
|
312 |
+
"import cbpro\n",
|
313 |
+
"import pandas as pd\n",
|
314 |
+
"c = cbpro.PublicClient()\n",
|
315 |
+
"\n",
|
316 |
+
"data = pd.DataFrame(c.get_products())\n",
|
317 |
+
"data.tail().T"
|
318 |
+
]
|
319 |
+
},
|
320 |
+
{
|
321 |
+
"cell_type": "code",
|
322 |
+
"execution_count": 6,
|
323 |
+
"id": "6092a61b-2f80-414a-87ae-8ca248a9ac82",
|
324 |
+
"metadata": {},
|
325 |
+
"outputs": [
|
326 |
+
{
|
327 |
+
"data": {
|
328 |
+
"text/plain": [
|
329 |
+
"{'bids': [['38240.18', '0.00193719', 1]],\n",
|
330 |
+
" 'asks': [['38242.79', '0.62685639', 2]],\n",
|
331 |
+
" 'sequence': 36891016647,\n",
|
332 |
+
" 'auction_mode': False,\n",
|
333 |
+
" 'auction': None}"
|
334 |
+
]
|
335 |
+
},
|
336 |
+
"execution_count": 6,
|
337 |
+
"metadata": {},
|
338 |
+
"output_type": "execute_result"
|
339 |
+
}
|
340 |
+
],
|
341 |
+
"source": [
|
342 |
+
"c.get_product_order_book('BTC-USD')"
|
343 |
+
]
|
344 |
+
},
|
345 |
+
{
|
346 |
+
"cell_type": "code",
|
347 |
+
"execution_count": 17,
|
348 |
+
"id": "5e076bd8-498a-4570-8682-2f890db06a4c",
|
349 |
+
"metadata": {},
|
350 |
+
"outputs": [],
|
351 |
+
"source": [
|
352 |
+
"historical = pd.DataFrame(c.get_product_historic_rates(product_id='ETH-USD', granularity=3600*24))\n",
|
353 |
+
"historical.columns= [\"Date\",\"Open\",\"High\",\"Low\",\"Close\",\"Volume\"]\n",
|
354 |
+
"historical['Date'] = pd.to_datetime(historical['Date'], unit='s')\n",
|
355 |
+
"historical.set_index('Date', inplace=True)\n",
|
356 |
+
"historical.sort_values(by='Date', ascending=True, inplace=True)"
|
357 |
+
]
|
358 |
+
},
|
359 |
+
{
|
360 |
+
"cell_type": "code",
|
361 |
+
"execution_count": 18,
|
362 |
+
"id": "bf77ca3a-93d9-48c0-b29e-3c5c7cf93f59",
|
363 |
+
"metadata": {},
|
364 |
+
"outputs": [
|
365 |
+
{
|
366 |
+
"data": {
|
367 |
+
"text/plain": [
|
368 |
+
"(300, 5)"
|
369 |
+
]
|
370 |
+
},
|
371 |
+
"execution_count": 18,
|
372 |
+
"metadata": {},
|
373 |
+
"output_type": "execute_result"
|
374 |
+
}
|
375 |
+
],
|
376 |
+
"source": [
|
377 |
+
"historical.shape"
|
378 |
+
]
|
379 |
+
},
|
380 |
+
{
|
381 |
+
"cell_type": "code",
|
382 |
+
"execution_count": 19,
|
383 |
+
"id": "561ea595-3858-40f3-9803-d6d07d114105",
|
384 |
+
"metadata": {},
|
385 |
+
"outputs": [
|
386 |
+
{
|
387 |
+
"data": {
|
388 |
+
"text/html": [
|
389 |
+
"<div>\n",
|
390 |
+
"<style scoped>\n",
|
391 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
392 |
+
" vertical-align: middle;\n",
|
393 |
+
" }\n",
|
394 |
+
"\n",
|
395 |
+
" .dataframe tbody tr th {\n",
|
396 |
+
" vertical-align: top;\n",
|
397 |
+
" }\n",
|
398 |
+
"\n",
|
399 |
+
" .dataframe thead th {\n",
|
400 |
+
" text-align: right;\n",
|
401 |
+
" }\n",
|
402 |
+
"</style>\n",
|
403 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
404 |
+
" <thead>\n",
|
405 |
+
" <tr style=\"text-align: right;\">\n",
|
406 |
+
" <th></th>\n",
|
407 |
+
" <th>Open</th>\n",
|
408 |
+
" <th>High</th>\n",
|
409 |
+
" <th>Low</th>\n",
|
410 |
+
" <th>Close</th>\n",
|
411 |
+
" <th>Volume</th>\n",
|
412 |
+
" </tr>\n",
|
413 |
+
" <tr>\n",
|
414 |
+
" <th>Date</th>\n",
|
415 |
+
" <th></th>\n",
|
416 |
+
" <th></th>\n",
|
417 |
+
" <th></th>\n",
|
418 |
+
" <th></th>\n",
|
419 |
+
" <th></th>\n",
|
420 |
+
" </tr>\n",
|
421 |
+
" </thead>\n",
|
422 |
+
" <tbody>\n",
|
423 |
+
" <tr>\n",
|
424 |
+
" <th>2021-07-05</th>\n",
|
425 |
+
" <td>2160.00</td>\n",
|
426 |
+
" <td>2324.44</td>\n",
|
427 |
+
" <td>2323.27</td>\n",
|
428 |
+
" <td>2197.34</td>\n",
|
429 |
+
" <td>156103.936222</td>\n",
|
430 |
+
" </tr>\n",
|
431 |
+
" <tr>\n",
|
432 |
+
" <th>2021-07-06</th>\n",
|
433 |
+
" <td>2195.02</td>\n",
|
434 |
+
" <td>2350.00</td>\n",
|
435 |
+
" <td>2197.34</td>\n",
|
436 |
+
" <td>2321.84</td>\n",
|
437 |
+
" <td>165693.467204</td>\n",
|
438 |
+
" </tr>\n",
|
439 |
+
" <tr>\n",
|
440 |
+
" <th>2021-07-07</th>\n",
|
441 |
+
" <td>2294.16</td>\n",
|
442 |
+
" <td>2411.19</td>\n",
|
443 |
+
" <td>2322.20</td>\n",
|
444 |
+
" <td>2316.82</td>\n",
|
445 |
+
" <td>159702.044084</td>\n",
|
446 |
+
" </tr>\n",
|
447 |
+
" <tr>\n",
|
448 |
+
" <th>2021-07-08</th>\n",
|
449 |
+
" <td>2084.00</td>\n",
|
450 |
+
" <td>2325.46</td>\n",
|
451 |
+
" <td>2317.64</td>\n",
|
452 |
+
" <td>2116.95</td>\n",
|
453 |
+
" <td>223830.003713</td>\n",
|
454 |
+
" </tr>\n",
|
455 |
+
" <tr>\n",
|
456 |
+
" <th>2021-07-09</th>\n",
|
457 |
+
" <td>2050.00</td>\n",
|
458 |
+
" <td>2189.28</td>\n",
|
459 |
+
" <td>2116.30</td>\n",
|
460 |
+
" <td>2147.38</td>\n",
|
461 |
+
" <td>190883.326179</td>\n",
|
462 |
+
" </tr>\n",
|
463 |
+
" <tr>\n",
|
464 |
+
" <th>...</th>\n",
|
465 |
+
" <td>...</td>\n",
|
466 |
+
" <td>...</td>\n",
|
467 |
+
" <td>...</td>\n",
|
468 |
+
" <td>...</td>\n",
|
469 |
+
" <td>...</td>\n",
|
470 |
+
" </tr>\n",
|
471 |
+
" <tr>\n",
|
472 |
+
" <th>2022-04-26</th>\n",
|
473 |
+
" <td>2766.77</td>\n",
|
474 |
+
" <td>3038.60</td>\n",
|
475 |
+
" <td>3007.24</td>\n",
|
476 |
+
" <td>2809.26</td>\n",
|
477 |
+
" <td>191167.431854</td>\n",
|
478 |
+
" </tr>\n",
|
479 |
+
" <tr>\n",
|
480 |
+
" <th>2022-04-27</th>\n",
|
481 |
+
" <td>2794.23</td>\n",
|
482 |
+
" <td>2920.00</td>\n",
|
483 |
+
" <td>2809.50</td>\n",
|
484 |
+
" <td>2889.78</td>\n",
|
485 |
+
" <td>172782.717618</td>\n",
|
486 |
+
" </tr>\n",
|
487 |
+
" <tr>\n",
|
488 |
+
" <th>2022-04-28</th>\n",
|
489 |
+
" <td>2853.83</td>\n",
|
490 |
+
" <td>2980.36</td>\n",
|
491 |
+
" <td>2889.78</td>\n",
|
492 |
+
" <td>2935.91</td>\n",
|
493 |
+
" <td>167061.012721</td>\n",
|
494 |
+
" </tr>\n",
|
495 |
+
" <tr>\n",
|
496 |
+
" <th>2022-04-29</th>\n",
|
497 |
+
" <td>2775.51</td>\n",
|
498 |
+
" <td>2946.68</td>\n",
|
499 |
+
" <td>2935.97</td>\n",
|
500 |
+
" <td>2816.94</td>\n",
|
501 |
+
" <td>204389.174778</td>\n",
|
502 |
+
" </tr>\n",
|
503 |
+
" <tr>\n",
|
504 |
+
" <th>2022-04-30</th>\n",
|
505 |
+
" <td>2768.64</td>\n",
|
506 |
+
" <td>2842.08</td>\n",
|
507 |
+
" <td>2816.77</td>\n",
|
508 |
+
" <td>2791.04</td>\n",
|
509 |
+
" <td>70115.714811</td>\n",
|
510 |
+
" </tr>\n",
|
511 |
+
" </tbody>\n",
|
512 |
+
"</table>\n",
|
513 |
+
"<p>300 rows × 5 columns</p>\n",
|
514 |
+
"</div>"
|
515 |
+
],
|
516 |
+
"text/plain": [
|
517 |
+
" Open High Low Close Volume\n",
|
518 |
+
"Date \n",
|
519 |
+
"2021-07-05 2160.00 2324.44 2323.27 2197.34 156103.936222\n",
|
520 |
+
"2021-07-06 2195.02 2350.00 2197.34 2321.84 165693.467204\n",
|
521 |
+
"2021-07-07 2294.16 2411.19 2322.20 2316.82 159702.044084\n",
|
522 |
+
"2021-07-08 2084.00 2325.46 2317.64 2116.95 223830.003713\n",
|
523 |
+
"2021-07-09 2050.00 2189.28 2116.30 2147.38 190883.326179\n",
|
524 |
+
"... ... ... ... ... ...\n",
|
525 |
+
"2022-04-26 2766.77 3038.60 3007.24 2809.26 191167.431854\n",
|
526 |
+
"2022-04-27 2794.23 2920.00 2809.50 2889.78 172782.717618\n",
|
527 |
+
"2022-04-28 2853.83 2980.36 2889.78 2935.91 167061.012721\n",
|
528 |
+
"2022-04-29 2775.51 2946.68 2935.97 2816.94 204389.174778\n",
|
529 |
+
"2022-04-30 2768.64 2842.08 2816.77 2791.04 70115.714811\n",
|
530 |
+
"\n",
|
531 |
+
"[300 rows x 5 columns]"
|
532 |
+
]
|
533 |
+
},
|
534 |
+
"execution_count": 19,
|
535 |
+
"metadata": {},
|
536 |
+
"output_type": "execute_result"
|
537 |
+
}
|
538 |
+
],
|
539 |
+
"source": [
|
540 |
+
"historical"
|
541 |
+
]
|
542 |
+
},
|
543 |
+
{
|
544 |
+
"cell_type": "code",
|
545 |
+
"execution_count": null,
|
546 |
+
"id": "c340ff84-d379-42e3-b791-e9076bcd439e",
|
547 |
+
"metadata": {},
|
548 |
+
"outputs": [],
|
549 |
+
"source": []
|
550 |
+
}
|
551 |
+
],
|
552 |
+
"metadata": {
|
553 |
+
"kernelspec": {
|
554 |
+
"display_name": "Python 3 (ipykernel)",
|
555 |
+
"language": "python",
|
556 |
+
"name": "python3"
|
557 |
+
},
|
558 |
+
"language_info": {
|
559 |
+
"codemirror_mode": {
|
560 |
+
"name": "ipython",
|
561 |
+
"version": 3
|
562 |
+
},
|
563 |
+
"file_extension": ".py",
|
564 |
+
"mimetype": "text/x-python",
|
565 |
+
"name": "python",
|
566 |
+
"nbconvert_exporter": "python",
|
567 |
+
"pygments_lexer": "ipython3",
|
568 |
+
"version": "3.9.5"
|
569 |
+
}
|
570 |
+
},
|
571 |
+
"nbformat": 4,
|
572 |
+
"nbformat_minor": 5
|
573 |
+
}
|
coincap.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
cryptoTester.py
ADDED
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import plotly.express as px
|
3 |
+
from datetime import date, timedelta
|
4 |
+
from data_creator import create_market_cap_dict, gen_rebased_df, ids2names_dict, names2ids_dict, create_assets, gen_symbols, create_histories_df, create_unix_dates, create_returns_df, create_rebased_df, date_range
|
5 |
+
from plot_creator import write_coins, write_coins_custom, create_chart_df, load_images, gen_performance_df, add_drawdown
|
6 |
+
from port_creator import gen_all_returns, markowitz_weights_dict, uniform_weights_dict, ids_with_histories, uniform, create_port_rtns, markowitz_weights, create_weights_df
|
7 |
+
from risk_metrics import max_drawdown
|
8 |
+
|
9 |
+
|
10 |
+
st.markdown(
|
11 |
+
"""
|
12 |
+
<style>
|
13 |
+
|
14 |
+
.css-1xsoh1l {
|
15 |
+
font-size: 0px;
|
16 |
+
}
|
17 |
+
.css-1xsoh1l{
|
18 |
+
color: rgb(120 190 33);
|
19 |
+
}
|
20 |
+
.css-jhf39w {
|
21 |
+
color: rgba(120, 190, 33, 1);
|
22 |
+
}
|
23 |
+
.css-jv3mmh {
|
24 |
+
background-color: rgb(120, 190, 33);
|
25 |
+
}
|
26 |
+
</style>
|
27 |
+
""",
|
28 |
+
unsafe_allow_html = True
|
29 |
+
)
|
30 |
+
|
31 |
+
# load start and end dates for investment analysis
|
32 |
+
lookback_years = 5 # max date range for backtest will be: lookback_years - 1
|
33 |
+
start_date = date.today() - timedelta(365)
|
34 |
+
end_date = date.today()
|
35 |
+
|
36 |
+
if 'start_date' not in st.session_state:
|
37 |
+
st.session_state.start_date = start_date
|
38 |
+
st.session_state.end_date = end_date
|
39 |
+
|
40 |
+
if 'max_coins' not in st.session_state:
|
41 |
+
st.session_state.max_coins = 8
|
42 |
+
|
43 |
+
if 'start_id' not in st.session_state:
|
44 |
+
st.session_state.start_id = 1 # this is the id of the selected portfolio.
|
45 |
+
|
46 |
+
# Pull down histories from coincap, and create dataframes for historic prices,
|
47 |
+
# returns and rebased cumulative price; histories_df, returns_df, and
|
48 |
+
# rebased_df, respectively.
|
49 |
+
assets_json = create_assets(total_coins=50)
|
50 |
+
symbols, names, coin_ids = gen_symbols(assets_json)
|
51 |
+
ids2symbols = ids2names_dict(coin_ids, symbols)
|
52 |
+
ids2names_dict=ids2names_dict(coin_ids, names)
|
53 |
+
names2ids_dict = names2ids_dict(names, coin_ids)
|
54 |
+
market_cap_dict = create_market_cap_dict(assets_json)
|
55 |
+
start_unix, end_unix = create_unix_dates(today=date.today(), lookback_years=lookback_years)
|
56 |
+
histories_df = create_histories_df(coin_ids, start_unix, end_unix)
|
57 |
+
|
58 |
+
# Create list of coin ids with full hisoties over the backtest period
|
59 |
+
ids_with_histories = ids_with_histories(histories_df,
|
60 |
+
st.session_state.start_date, st.session_state.end_date)
|
61 |
+
names_with_histories = list(map(ids2names_dict.get, ids_with_histories))
|
62 |
+
|
63 |
+
def change_date_range():
|
64 |
+
st.session_state.start_date = st.session_state.myslider[0]
|
65 |
+
st.session_state.end_date = st.session_state.myslider[1]
|
66 |
+
|
67 |
+
st.write('''
|
68 |
+
N.B. This app is in Beta, it will be buggy and some of the calculations may be
|
69 |
+
erroneous. **It is deeeeeefinitely not investment advice**.
|
70 |
+
''', unsafe_allow_html = True)
|
71 |
+
with st.expander("Explain this app 🤔"):
|
72 |
+
st.subheader("What's this all about then, eh?")
|
73 |
+
st.write('''
|
74 |
+
|
75 |
+
This app allows you to graph the historic performance of a portfolio of
|
76 |
+
your choosing against an individual coin of your choosing. You can
|
77 |
+
compare the historic performance of all available assets (portfolios and coins)
|
78 |
+
in the overview table at the bottom of the page.
|
79 |
+
|
80 |
+
The 'Portfolio vs coin' chart displays the historic performance of the selected
|
81 |
+
portfolio and coin for the selected date range, rebased to $1 at the start
|
82 |
+
date.
|
83 |
+
|
84 |
+
To switch focus between the selected portfolio and coin, use the radio buttons
|
85 |
+
above the graph. For the asset under focus the "maximum drawdown" is drawn
|
86 |
+
on the graph (this is the maximum loss the asset suffered over the selected
|
87 |
+
period). A high level view of the performance of the asset under focus is also
|
88 |
+
given inside the 'Asset performance' expander.
|
89 |
+
|
90 |
+
The 'Overview of performance section' sets out performance metrics for all of
|
91 |
+
the portfolios and coins over the selected investment period. The table can
|
92 |
+
be sorted based on the metric of interest.
|
93 |
+
|
94 |
+
There are two pre-defined portfolio construction strategies, but you can also
|
95 |
+
define your own allocation using the 'Create your own' option in the 'Select
|
96 |
+
portfolio strategy' dropdown box in the sidebar.
|
97 |
+
|
98 |
+
Pre-defined portfolios:
|
99 |
+
- Uniform - An equal propotion of your initial investment is allocated to each coin in the 'investment set' (i.e. the total number of coins available for investment).
|
100 |
+
- Markowitz - Your initial investment is allocated to each coin to achieve the portfolio with the highest sharpe ratio in the 365 day period prior to the investment start date. For some date ranges and investment sets, the optimiser used in the background of this app cannot find an optimal solution in which case the Markowitz weights will not be updated and you will see a warning at the top of the screen. I have used the default settings in the PyPortfolioOpt python package to construct this portfolio.
|
101 |
+
|
102 |
+
To select how many coins you would like in your investment set use the
|
103 |
+
'Maximum number of coins in portfolio' inside the 'Portfolio construction
|
104 |
+
settings' expander.
|
105 |
+
|
106 |
+
To adjust the date range for the portfolio backtest using the slider
|
107 |
+
widget.
|
108 |
+
|
109 |
+
To create your own portfolio:
|
110 |
+
|
111 |
+
1. Select 'Create your own' in the 'Select portfolio strategy' dropdown;
|
112 |
+
2. Select the maximum number of coins in your portfolio;
|
113 |
+
3. Select the relative weights for each of these assets;
|
114 |
+
4. Choose a name for your portfolio and click add portfolio;
|
115 |
+
5. Click the 'Update viewer' button.
|
116 |
+
|
117 |
+
The data for this app has been generated using the coincap api and is updated
|
118 |
+
every day to include the previous day's closing prices.
|
119 |
+
|
120 |
+
''')
|
121 |
+
|
122 |
+
# Add select slider to allow
|
123 |
+
date_list = date_range(end_date,lookback_years-1)
|
124 |
+
start_port_date, end_port_date = st.select_slider(
|
125 |
+
'Select date range (max 4 years)',
|
126 |
+
key="myslider",
|
127 |
+
options=date_list,
|
128 |
+
#value=(date.today() - timedelta(365), date.today()),
|
129 |
+
value = (st.session_state.start_date, st.session_state.end_date),
|
130 |
+
on_change=change_date_range
|
131 |
+
)
|
132 |
+
|
133 |
+
with st.sidebar:
|
134 |
+
st.subheader("Select a coin and a portfolio to compare on the graph")
|
135 |
+
with st.expander("Portfolio construction settings", expanded = False):
|
136 |
+
st.session_state.max_coins = st.number_input(
|
137 |
+
"Maximum number of coins in portfolio",
|
138 |
+
min_value=1,
|
139 |
+
max_value=50,
|
140 |
+
value=8,
|
141 |
+
key='max_coin_selector',
|
142 |
+
help='''
|
143 |
+
Coins will be added to your "investment set" in order of largest market cap.
|
144 |
+
|
145 |
+
The "investment set" is the group of assets from which your portfolio is
|
146 |
+
constructed. Depending on the portfolio strategy you choose, not all of the
|
147 |
+
assets in your investment set will be included in your portfolio.
|
148 |
+
|
149 |
+
'''
|
150 |
+
)
|
151 |
+
|
152 |
+
# calculate weghts for the uniform and markowitz pfs
|
153 |
+
uniform_weights = uniform_weights_dict(ids_with_histories[:int(st.session_state.max_coins)])
|
154 |
+
update_Markowitz = True
|
155 |
+
try:
|
156 |
+
markowitz_weights = markowitz_weights_dict(histories_df,
|
157 |
+
st.session_state.start_date ,ids_with_histories[:int(st.session_state.max_coins)], analysis_days=365)
|
158 |
+
except:
|
159 |
+
update_Markowitz = False
|
160 |
+
st.warning('Markowitz weights could not be updated for this date range')
|
161 |
+
|
162 |
+
if 'strategy_dict' not in st.session_state:
|
163 |
+
st.session_state.strategy_dict = {'Uniform': uniform_weights, 'Markowitz':markowitz_weights}
|
164 |
+
else:
|
165 |
+
st.session_state.strategy_dict['Uniform'] = uniform_weights
|
166 |
+
if update_Markowitz == True:
|
167 |
+
st.session_state.strategy_dict['Markowitz'] = markowitz_weights
|
168 |
+
|
169 |
+
#if "strategy_dict" not in st.session_state:
|
170 |
+
# st.session_state.strategy_dict=strategy_dict
|
171 |
+
|
172 |
+
with st.sidebar:
|
173 |
+
selected_coin = st.selectbox(
|
174 |
+
'Select coin',
|
175 |
+
names_with_histories
|
176 |
+
)
|
177 |
+
portfolio_type = st.selectbox(
|
178 |
+
'Select portfolio strategy',
|
179 |
+
['Create your own'] + (list(st.session_state.strategy_dict.keys())),
|
180 |
+
index = st.session_state.start_id
|
181 |
+
)
|
182 |
+
|
183 |
+
|
184 |
+
|
185 |
+
|
186 |
+
|
187 |
+
|
188 |
+
# calculate returns for the portfolios and add to it the rebased df for assets
|
189 |
+
# with hisories. This is the new returns_df
|
190 |
+
rebased_df = gen_rebased_df(histories_df, ids_with_histories,
|
191 |
+
st.session_state.start_date, st.session_state.end_date)
|
192 |
+
|
193 |
+
all_returns_df = gen_all_returns(rebased_df, ids_with_histories, st.session_state.strategy_dict)
|
194 |
+
|
195 |
+
if portfolio_type != 'Create your own':
|
196 |
+
st.session_state.portfolio_type = portfolio_type
|
197 |
+
|
198 |
+
st.subheader("Portfolio vs coin")
|
199 |
+
focus=st.radio("Focus on",("Portfolio 📉","Coin 📈"))
|
200 |
+
|
201 |
+
chart_df = create_chart_df(all_returns_df, st.session_state.portfolio_type, names2ids_dict[selected_coin])
|
202 |
+
|
203 |
+
fig = px.line(chart_df, x=chart_df.index, y='Value (USD)', color='Asset')
|
204 |
+
if focus == "Portfolio 📉":
|
205 |
+
fig, port_dd, port_dd_start, port_dd_end = add_drawdown(fig, all_returns_df, st.session_state.portfolio_type)
|
206 |
+
else:
|
207 |
+
fig, coin_dd, coin_dd_start, coin_dd_end = add_drawdown(fig, all_returns_df, names2ids_dict[selected_coin])
|
208 |
+
|
209 |
+
st.write(fig)
|
210 |
+
|
211 |
+
with st.expander("Asset performance"):
|
212 |
+
if focus == "Portfolio 📉":
|
213 |
+
st.subheader("{} portfolio performance".format(st.session_state.portfolio_type))
|
214 |
+
|
215 |
+
cols = st.columns([1,3])
|
216 |
+
outlay = cols[0].number_input('Initial $ amount', min_value=0, value=1000,
|
217 |
+
step=1)
|
218 |
+
final_amount = outlay*all_returns_df[st.session_state.portfolio_type][-1]
|
219 |
+
max_loss=outlay*port_dd
|
220 |
+
|
221 |
+
with cols[1]:
|
222 |
+
st.markdown('''For an initial investment of **${:,}**'''.format(int(outlay)), unsafe_allow_html = True)
|
223 |
+
st.markdown('''You would have ended up with **${:,}**'''.format(int(final_amount)), unsafe_allow_html = True)
|
224 |
+
st.markdown('''You would have suffered a maximum loss of **{:.0f}%** of your portfolio value
|
225 |
+
between **{}** and **{}**'''.format(port_dd*100, port_dd_start, port_dd_end), unsafe_allow_html = True)
|
226 |
+
else:
|
227 |
+
st.subheader("{} coin performance".format(selected_coin))
|
228 |
+
|
229 |
+
cols = st.columns([1,3])
|
230 |
+
outlay = cols[0].number_input('Initial $ amount', min_value=0, value=1000,
|
231 |
+
step=1)
|
232 |
+
final_amount = outlay*all_returns_df[names2ids_dict[selected_coin]][-1]
|
233 |
+
max_loss=outlay*coin_dd
|
234 |
+
|
235 |
+
with cols[1]:
|
236 |
+
st.markdown('''For an initial investment of **${:,}**'''.format(int(outlay)), unsafe_allow_html = True)
|
237 |
+
st.markdown('''You would have ended up with **${:,}**'''.format(int(final_amount)), unsafe_allow_html = True)
|
238 |
+
st.markdown('''You would have suffered a maximum loss of **{:.0f}%** of your investment value
|
239 |
+
between **{}** and **{}**'''.format(coin_dd*100, coin_dd_start, coin_dd_end), unsafe_allow_html = True)
|
240 |
+
|
241 |
+
non_zero_coins = [key for key in st.session_state.strategy_dict[st.session_state.portfolio_type].keys() if st.session_state.strategy_dict[st.session_state.portfolio_type][key]>0]
|
242 |
+
|
243 |
+
with st.sidebar:
|
244 |
+
if portfolio_type == 'Create your own':
|
245 |
+
st.markdown("Bespoke portfolio weights (relative):" , unsafe_allow_html=False)
|
246 |
+
bespoke_weights = write_coins_custom(names_with_histories[:int(st.session_state.max_coins)])
|
247 |
+
#bespoke_weights = write_bespoke_coins(names_with_histories[:st.session_state.max_coins])
|
248 |
+
bespoke_cols = st.columns(2)
|
249 |
+
bespoke_cols[0].write(" ")
|
250 |
+
bespoke_cols[0].write(" ")
|
251 |
+
add_bespoke = bespoke_cols[0].button("Add portfolio", key='bespoke_button')
|
252 |
+
bespoke_name = bespoke_cols[1].text_input("Choose portfolio name")
|
253 |
+
if add_bespoke:
|
254 |
+
if bespoke_name=="" or bespoke_name in all_returns_df.columns:
|
255 |
+
st.warning("Please give your portfolio a unique name")
|
256 |
+
else:
|
257 |
+
beskpoke_weights_dict={}
|
258 |
+
for i, wt in enumerate(bespoke_weights):
|
259 |
+
beskpoke_weights_dict[coin_ids[i]] = wt
|
260 |
+
st.session_state.strategy_dict[bespoke_name] = beskpoke_weights_dict
|
261 |
+
st.session_state.start_id = len(st.session_state.strategy_dict)
|
262 |
+
#st.session_state.selected_assets.append(bespoke_name)
|
263 |
+
st.success("Porfolio added, update viewer to see results")
|
264 |
+
st.button('Update viewer', on_click = change_date_range)
|
265 |
+
#st.button('Update viewer', on_click = change_date_range)
|
266 |
+
else:
|
267 |
+
st.markdown(st.session_state.portfolio_type + " portfolio weights (%):" , unsafe_allow_html=False)
|
268 |
+
write_coins(non_zero_coins, st.session_state.strategy_dict[st.session_state.portfolio_type], ids2names_dict)
|
269 |
+
|
270 |
+
performance_df = gen_performance_df(all_returns_df, market_cap_dict, st.session_state.strategy_dict)
|
271 |
+
|
272 |
+
|
273 |
+
st.subheader("Overview of performance of all available assets")
|
274 |
+
st.dataframe(performance_df.style.background_gradient(cmap='Greens',
|
275 |
+
subset=['Risk / return', 'Total return %'])
|
276 |
+
.background_gradient(cmap='Reds',
|
277 |
+
subset=['Annual vol', 'Max loss %']).format("{:,.2f}",
|
278 |
+
subset=['Risk / return', 'Total return %', 'Annual vol',
|
279 |
+
'Max loss %']).format("{:,.0f}", subset=['Market cap $M']))
|
280 |
+
|
281 |
+
|
crypto_viewer.py
ADDED
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import requests
|
3 |
+
import json
|
4 |
+
import plotly.express as px
|
5 |
+
import pandas as pd
|
6 |
+
import datetime as dt
|
7 |
+
from risk_metrics import annual_return, absolute_return, annual_vol, max_drawdown
|
8 |
+
try:
|
9 |
+
from PIL import Image
|
10 |
+
except ImportError:
|
11 |
+
import Image
|
12 |
+
import numpy as np
|
13 |
+
|
14 |
+
st.markdown(
|
15 |
+
"""
|
16 |
+
<style>
|
17 |
+
|
18 |
+
.css-1inwz65 {
|
19 |
+
font-size: 0px;
|
20 |
+
}
|
21 |
+
</style>
|
22 |
+
""",
|
23 |
+
unsafe_allow_html = True
|
24 |
+
)
|
25 |
+
|
26 |
+
def load_data(limit='10'):
|
27 |
+
'''
|
28 |
+
Returns a dictionary with data for each of the top 'limit' cypto currencies
|
29 |
+
ranked by market cap. The data is generated by querying the coincap API
|
30 |
+
/assets endpoint. See coincap documentation for more info:
|
31 |
+
https://docs.coincap.io/
|
32 |
+
|
33 |
+
Parameters:
|
34 |
+
limit (str): The number of crypto coins that you want to return data for.
|
35 |
+
Ranked in order of market cap.
|
36 |
+
|
37 |
+
Returns:
|
38 |
+
(dict): A dictionary object of data.
|
39 |
+
|
40 |
+
'''
|
41 |
+
url = "https://api.coincap.io/v2/assets"
|
42 |
+
# N.B. here adampt the params dict to only request what you need
|
43 |
+
payload={'limit': limit}
|
44 |
+
headers = {}
|
45 |
+
return requests.request("GET", url, params=payload, headers=headers).json()
|
46 |
+
|
47 |
+
def load_histories(ids_list):
|
48 |
+
url = "http://api.coincap.io/v2/assets/{}/history?interval=d1"
|
49 |
+
|
50 |
+
payload={}
|
51 |
+
headers = {}
|
52 |
+
|
53 |
+
histories_dict = {}
|
54 |
+
for id in ids_list:
|
55 |
+
response_histories = requests.request("GET", url.format(id), headers=headers, data=payload)
|
56 |
+
histories_json = response_histories.json()
|
57 |
+
histories_dict[id] = histories_json['data']
|
58 |
+
return histories_dict
|
59 |
+
|
60 |
+
|
61 |
+
def gen_symbols(assets_json):
|
62 |
+
symbols_list = []
|
63 |
+
names_list = []
|
64 |
+
ids_list =[]
|
65 |
+
for dict in assets_json['data']:
|
66 |
+
symbols_list.append(dict['symbol'])
|
67 |
+
names_list.append(dict['name'])
|
68 |
+
ids_list.append(dict['id'])
|
69 |
+
return symbols_list, names_list, ids_list
|
70 |
+
|
71 |
+
def write_symbols(symbols_list):
|
72 |
+
cols = st.columns(len(symbols_list))
|
73 |
+
for i, symbol in enumerate(symbols_list):
|
74 |
+
col = cols[i]
|
75 |
+
col.image(f'logos/{symbol}.png',width=40)
|
76 |
+
globals()[st.session_state.names[i]] = col.checkbox(symbol, value = 0)
|
77 |
+
#col.checkbox(symbol, st.image(f'logos/{symbol}.png',width=40))
|
78 |
+
|
79 |
+
if "assets_json" not in st.session_state:
|
80 |
+
st.session_state.assets_json = load_data()
|
81 |
+
symbols, names, ids = gen_symbols(st.session_state.assets_json)
|
82 |
+
st.session_state.symbols = symbols
|
83 |
+
st.session_state.names = names
|
84 |
+
st.session_state.ids = ids
|
85 |
+
st.session_state.histories = load_histories(ids)
|
86 |
+
id_symbol_map = {}
|
87 |
+
for i, id in enumerate(ids):
|
88 |
+
id_symbol_map[id]=symbols[i]
|
89 |
+
st.session_state.id_symbol_map = id_symbol_map
|
90 |
+
|
91 |
+
|
92 |
+
|
93 |
+
#write_symbols(st.session_state.symbols)
|
94 |
+
symbols_list = st.session_state.symbols
|
95 |
+
names_list = st.session_state.names
|
96 |
+
ids_list = st.session_state.ids
|
97 |
+
asset_json = st.session_state.assets_json
|
98 |
+
histories_dict = st.session_state.histories
|
99 |
+
id_symbol_map = st.session_state.id_symbol_map
|
100 |
+
|
101 |
+
def date_conv(date):
|
102 |
+
return dt.datetime.strptime(date, '%Y-%m-%d')
|
103 |
+
price_histories_df = pd.DataFrame(columns=['coin','date','price'])
|
104 |
+
return_histories_df = pd.DataFrame(columns=['coin','date','price'])
|
105 |
+
for id in ids_list:
|
106 |
+
price=[]
|
107 |
+
date=[]
|
108 |
+
for observation in histories_dict[id]:
|
109 |
+
date.append(date_conv(observation['date'][0:10]))
|
110 |
+
#date.append(observation['time'])
|
111 |
+
price.append(float(observation['priceUsd']))
|
112 |
+
price_df = pd.DataFrame({"coin": id, "date":date, "price": price})
|
113 |
+
price_histories_df = pd.concat([price_histories_df, price_df])
|
114 |
+
returns = [float(b) / float(a) for b,a in zip(price[1:], price[:-1])]
|
115 |
+
returns_df = pd.DataFrame({"coin": id, "date":date[1:], "price": returns})
|
116 |
+
return_histories_df = pd.concat([return_histories_df, returns_df])
|
117 |
+
|
118 |
+
|
119 |
+
|
120 |
+
start_date = dt.date.today()-dt.timedelta(360)
|
121 |
+
rebased_prices_df = pd.DataFrame(columns=['coin','date','price','rebased_price'])
|
122 |
+
for id in ids_list:
|
123 |
+
temp_rebase_df = return_histories_df[(return_histories_df['date']>=pd.Timestamp(start_date))
|
124 |
+
& (return_histories_df['coin']==id)]
|
125 |
+
rebased_price=[1]
|
126 |
+
for i in range(1,len(temp_rebase_df)):
|
127 |
+
rebased_price.append(temp_rebase_df['price'].iloc[i]*rebased_price[i-1])
|
128 |
+
temp_rebase_df['rebased_price']=rebased_price
|
129 |
+
rebased_prices_df = pd.concat([rebased_prices_df, temp_rebase_df])
|
130 |
+
|
131 |
+
fig2 = px.line(rebased_prices_df, x="date", y="rebased_price", color="coin")
|
132 |
+
st.write(fig2)
|
133 |
+
cols = st.columns(len(symbols_list))
|
134 |
+
checkboxes=[]
|
135 |
+
|
136 |
+
def write_coins(id_symbol_map, n_cols=5):
|
137 |
+
n_coins = len(id_symbol_map)
|
138 |
+
n_rows = 1 + n_coins // int(n_cols)
|
139 |
+
|
140 |
+
rows = [st.container() for _ in range(n_rows)]
|
141 |
+
cols_per_row = [r.columns(n_cols) for r in rows]
|
142 |
+
cols = [column for row in cols_per_row for column in row]
|
143 |
+
|
144 |
+
#cols = st.columns(n_coins)
|
145 |
+
#checkboxes=[]
|
146 |
+
for i, id in enumerate(id_symbol_map):
|
147 |
+
cols[i].image('logos/{}.png'.format(id_symbol_map[id]),width=40)
|
148 |
+
globals()[st.session_state.names[i]] = cols[i].checkbox("include", value = 1, key=id)
|
149 |
+
globals()["slider_"+ids_list[i]] = cols[i].slider(id, min_value=0, max_value=100, value=50, key=id)
|
150 |
+
checkboxes.append(globals()[st.session_state.names[i]])
|
151 |
+
|
152 |
+
write_coins(id_symbol_map)
|
153 |
+
|
154 |
+
|
155 |
+
|
156 |
+
#for i, symbol in enumerate(symbols_list):
|
157 |
+
# col = cols[i]
|
158 |
+
# col.image(f'logos/{symbol}.png',width=40)
|
159 |
+
# globals()[st.session_state.names[i]] = col.checkbox(symbol, value = 1)
|
160 |
+
# checkboxes.append(globals()[st.session_state.names[i]])
|
161 |
+
|
162 |
+
|
163 |
+
|
164 |
+
|
165 |
+
|
166 |
+
|
167 |
+
#if any(checkboxes):
|
168 |
+
# checked_ids=[]
|
169 |
+
# cols2 = st.columns(sum(checkboxes))
|
170 |
+
# j=0
|
171 |
+
# for i, value in enumerate(checkboxes):
|
172 |
+
# if value==1:
|
173 |
+
# checked_ids.append(ids_list[i])
|
174 |
+
# col2=cols2[j]
|
175 |
+
# col2.image(f'logos/{symbols_list[i]}.png',width=20)
|
176 |
+
# j+=1
|
177 |
+
|
178 |
+
def create_grid(top_left, bottom_right):
|
179 |
+
num_rows=3
|
180 |
+
num_cols=7
|
181 |
+
col_positions = np.linspace(top_left[0], bottom_right[0], num=num_cols)
|
182 |
+
row_positions = np.linspace(top_left[1], bottom_right[1], num=num_rows)
|
183 |
+
return [(int(col_positions[i]),int(row_positions[j])) for j in range(num_rows) for i in range(num_cols)]
|
184 |
+
|
185 |
+
# These are the coordinates of the top left and bottom right of the cart image
|
186 |
+
# given it's curent size. You need to change these if you change the size of the
|
187 |
+
# cart
|
188 |
+
top_left=[300,300]
|
189 |
+
bottom_right=[650, 450]
|
190 |
+
|
191 |
+
grid = create_grid(top_left, bottom_right)
|
192 |
+
|
193 |
+
def add_logo(background, symbol, position, size=(70,70)):
|
194 |
+
bg = Image.open(background)
|
195 |
+
fg = Image.open("logos/{}.png".format(symbol))
|
196 |
+
|
197 |
+
bg = bg.convert("RGBA")
|
198 |
+
fg = fg.convert("RGBA")
|
199 |
+
|
200 |
+
# Resize logo
|
201 |
+
fg_resized = fg.resize(size)
|
202 |
+
|
203 |
+
# Overlay logo onto background at position
|
204 |
+
bg.paste(fg_resized,box=position,mask=fg_resized)
|
205 |
+
|
206 |
+
# Save result
|
207 |
+
bg.save(background)
|
208 |
+
|
209 |
+
|
210 |
+
|
211 |
+
cart_cols = st.columns([3,2])
|
212 |
+
|
213 |
+
|
214 |
+
|
215 |
+
if any(checkboxes):
|
216 |
+
checked_ids=[]
|
217 |
+
for i, value in enumerate(checkboxes):
|
218 |
+
if value==1:
|
219 |
+
checked_ids.append(ids_list[i])
|
220 |
+
#cart_cols[1].image(f'logos/{symbols_list[i]}.png',width=20)
|
221 |
+
#cart_cols[2].slider(ids_list[i],min_value=0, max_value=100, value=50)
|
222 |
+
|
223 |
+
|
224 |
+
# change the below to make it run only if checked_ids ecists - i.e. wrap it up oin a function
|
225 |
+
original = Image.open("images/cart.png")
|
226 |
+
original.save('images/background.png')
|
227 |
+
position_ids = [round(x) for x in np.linspace(0, len(grid)-1, num=len(checked_ids))]
|
228 |
+
for i, id in enumerate(checked_ids):
|
229 |
+
size = tuple([int(num * globals()["slider_"+id]/50) for num in (70,70)])
|
230 |
+
|
231 |
+
add_logo('images/background.png', id_symbol_map[id], grid[position_ids[i]], size=size)
|
232 |
+
|
233 |
+
weights=[]
|
234 |
+
for id in checked_ids:
|
235 |
+
weights.append(globals()["slider_"+id])
|
236 |
+
sum_weights = sum(weights)
|
237 |
+
weights = [weight/sum_weights for weight in weights]
|
238 |
+
|
239 |
+
weights_df = pd.DataFrame({'ids':checked_ids, 'weights': weights, 'portfolio': 'port_1'})
|
240 |
+
pie_fig = px.pie(weights_df, values='weights', names='ids')
|
241 |
+
pie_fig.update_layout(showlegend=False)
|
242 |
+
|
243 |
+
bar_fig = px.bar(weights_df, x="portfolio", y="weights", color="ids", width=200)
|
244 |
+
bar_fig.update_layout(showlegend=False)
|
245 |
+
|
246 |
+
cart_cols[0].image('images/background.png', width=400)
|
247 |
+
cart_cols[1].write(bar_fig)
|
248 |
+
gen_port = st.button('Generate portfolio return')
|
249 |
+
|
250 |
+
metrics_dict= {'annual_return' : "Return (annualised)", 'absolute_return': "Return over period",
|
251 |
+
'annual_vol': 'Annual volatility', 'max_drawdown': 'Max loss'}
|
252 |
+
|
253 |
+
def write_metrics(prices, *metrics):
|
254 |
+
for metric in metrics:
|
255 |
+
cols = st.columns(2)
|
256 |
+
if metric.__name__ == 'max_drawdown':
|
257 |
+
cols[0].write(metrics_dict[metric.__name__] +': ')
|
258 |
+
cols[1].write('{:.2%}'.format(metric(prices)[0]))
|
259 |
+
else:
|
260 |
+
cols[0].write(metrics_dict[metric.__name__] +': ')
|
261 |
+
cols[1].write('{:.2%}'.format(metric(prices)))
|
262 |
+
|
263 |
+
if gen_port:
|
264 |
+
# adjust weight calculation to read in from globals()["slider_"+ids_list[i]]
|
265 |
+
#weights = [1/len(checked_ids)]*len(checked_ids)
|
266 |
+
portfolio_dict={checked_ids[i]:weights[i] for i in range(len(checked_ids))}
|
267 |
+
start_date = dt.date.today()-dt.timedelta(360)
|
268 |
+
weighted_prices_df = pd.DataFrame(columns=['coin','date','price','weighted_price'])
|
269 |
+
for id in checked_ids:
|
270 |
+
temp_weight_df = return_histories_df[(return_histories_df['date']>=pd.Timestamp(start_date))
|
271 |
+
& (return_histories_df['coin']==id)]
|
272 |
+
weighted_price=[portfolio_dict[id]]
|
273 |
+
for i in range(1,len(temp_weight_df)):
|
274 |
+
weighted_price.append(temp_weight_df['price'].iloc[i]*weighted_price[i-1])
|
275 |
+
temp_weight_df['weighted_price']=weighted_price
|
276 |
+
weighted_prices_df = pd.concat([weighted_prices_df, temp_weight_df])
|
277 |
+
date_list = [start_date + dt.timedelta(days=x) for x in range(360)]
|
278 |
+
port_returns=[]
|
279 |
+
for date in date_list:
|
280 |
+
port_returns.append(weighted_prices_df['weighted_price'][weighted_prices_df['date']==pd.Timestamp(date)].sum())
|
281 |
+
port_returns_df = pd.DataFrame({'date':date_list, 'price': port_returns})
|
282 |
+
prices = port_returns_df['price']
|
283 |
+
max_dd, start_idx, end_idx = max_drawdown(prices)
|
284 |
+
start_dt = port_returns_df['date'].iloc[start_idx]
|
285 |
+
end_dt = port_returns_df['date'].iloc[end_idx]
|
286 |
+
fig3 = px.line(port_returns_df, x="date", y="price")
|
287 |
+
fig3.add_vline(x=start_dt, line_width=1, line_color="red")
|
288 |
+
fig3.add_vline(x=end_dt, line_width=1, line_color="red")
|
289 |
+
fig3.add_vrect(x0=start_dt, x1=end_dt, line_width=0, fillcolor="red", opacity=0.05, annotation_text="max loss ")
|
290 |
+
st.write(fig3)
|
291 |
+
|
292 |
+
st.title("Risk metrics")
|
293 |
+
write_metrics(prices, absolute_return, annual_return, annual_vol, max_drawdown)
|
294 |
+
|
295 |
+
|
296 |
+
|
297 |
+
|
298 |
+
|
299 |
+
#for i, symbol in enumerate(symbols_list):
|
300 |
+
# col2 = cols2[i]
|
301 |
+
# col.image(f'logos/{symbol}.png',width=40)
|
302 |
+
#price_subset_df = price_histories_df[price_histories_df['coin'].isin(checked_ids)]
|
303 |
+
#rebased_subset_df = rebased_prices_df[rebased_prices_df['coin'].isin(checked_ids)]
|
304 |
+
#fig1 = px.line(price_subset_df, x="date", y="price", color="coin")
|
305 |
+
#st.write(fig1)
|
306 |
+
#fig2 = px.line(rebased_subset_df, x="date", y="rebased_price", color="coin")
|
307 |
+
#st.write(fig2)
|
308 |
+
|
309 |
+
|
310 |
+
|
311 |
+
|
312 |
+
|
313 |
+
|
314 |
+
|
data_creator.py
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from datetime import date, timedelta, datetime
|
3 |
+
import time
|
4 |
+
import pandas as pd
|
5 |
+
from os.path import exists
|
6 |
+
import streamlit as st
|
7 |
+
|
8 |
+
@st.cache(persist=True)
|
9 |
+
def create_assets(total_coins=50):
|
10 |
+
'''
|
11 |
+
A function to retrieve info about the largest total_coins number of
|
12 |
+
cryptocurrencies, ranked by market cap, generated by a call to coincap assets
|
13 |
+
api.
|
14 |
+
'''
|
15 |
+
url = "https://api.coincap.io/v2/assets"
|
16 |
+
|
17 |
+
# N.B. here adampt the params dict to only request what you need
|
18 |
+
payload={'limit': total_coins}
|
19 |
+
headers = {}
|
20 |
+
|
21 |
+
assets_json = requests.request("GET", url, params=payload, headers=headers).json()
|
22 |
+
return assets_json
|
23 |
+
|
24 |
+
@st.cache(persist=True)
|
25 |
+
def gen_symbols(assets_json):
|
26 |
+
'''
|
27 |
+
Function to generate three lists: symbols, names and ids, from the result of
|
28 |
+
a call to the coincap assets api, assets_json.
|
29 |
+
'''
|
30 |
+
symbols_list = []
|
31 |
+
names_list = []
|
32 |
+
ids_list =[]
|
33 |
+
for dict in assets_json['data']:
|
34 |
+
symbols_list.append(dict['symbol'])
|
35 |
+
names_list.append(dict['name'])
|
36 |
+
ids_list.append(dict['id'])
|
37 |
+
return symbols_list, names_list, ids_list
|
38 |
+
|
39 |
+
@st.cache(persist=True, show_spinner=False)
|
40 |
+
def create_market_cap_dict(assets_json):
|
41 |
+
market_cap_dict = {}
|
42 |
+
for asset_dict in assets_json['data']:
|
43 |
+
market_cap_dict[asset_dict['id']] = int(float(asset_dict['marketCapUsd']))
|
44 |
+
return market_cap_dict
|
45 |
+
|
46 |
+
def load_histories(coin_ids, start, end):
|
47 |
+
'''
|
48 |
+
Function to load daily historic prices for all crypto currencies in the
|
49 |
+
coin_ids list within the time period defined by the interval [start, end].
|
50 |
+
'''
|
51 |
+
url = "http://api.coincap.io/v2/assets/{}/history"
|
52 |
+
|
53 |
+
payload={'interval':'d1', 'start':start, 'end':end}
|
54 |
+
headers = {}
|
55 |
+
|
56 |
+
histories_dict = {}
|
57 |
+
for id in coin_ids:
|
58 |
+
response_histories = requests.request("GET", url.format(id), headers=headers, params=payload)
|
59 |
+
histories_json = response_histories.json()
|
60 |
+
histories_dict[id] = histories_json['data']
|
61 |
+
return histories_dict
|
62 |
+
|
63 |
+
@st.cache(persist=True, show_spinner=False)
|
64 |
+
def date_conv(date):
|
65 |
+
'''
|
66 |
+
Function to convert string to datetime.date.
|
67 |
+
'''
|
68 |
+
return datetime.strptime(date, '%Y-%m-%d').date()
|
69 |
+
|
70 |
+
@st.cache(persist=True)
|
71 |
+
def create_unix_dates(today=date.today(), lookback_years = 5):
|
72 |
+
'''
|
73 |
+
A function to create start_unix and end_unix times in UNIX time in milliseconds
|
74 |
+
'''
|
75 |
+
start_datetime = today-timedelta(365*lookback_years)
|
76 |
+
start_unix = int(time.mktime(start_datetime.timetuple()) * 1000)
|
77 |
+
end_unix = int(time.mktime(date.today().timetuple()) * 1000)
|
78 |
+
return start_unix, end_unix
|
79 |
+
|
80 |
+
@st.cache(persist=True, show_spinner=False)
|
81 |
+
def create_histories_df(coin_ids, start_unix, end_unix):
|
82 |
+
'''
|
83 |
+
A function to create a dataframe of historical prices for all of the
|
84 |
+
crypto currencies in the coin_ids=ids list, over a period defined by the
|
85 |
+
interval [start_unix, end_unix].
|
86 |
+
N.B. This code uses the data for bitcoin as the first dataframe on which
|
87 |
+
other temp_df are outer joined from the right. This is because bitcoin
|
88 |
+
has the longest history.
|
89 |
+
'''
|
90 |
+
print('Downloading data from coincap.io, may take several minutes...')
|
91 |
+
|
92 |
+
# download histories from coincap.io
|
93 |
+
with st.spinner("You're the first user today so asset histories are being updated. May take several minutes."):
|
94 |
+
histories_dict = load_histories(coin_ids, start_unix, end_unix)
|
95 |
+
|
96 |
+
# convert all dates in histories_dict to python datetime.date objects and remove 'time' key
|
97 |
+
for id in coin_ids:
|
98 |
+
for dict in histories_dict[id]:
|
99 |
+
dict.pop('time')
|
100 |
+
dict['priceUsd']=float(dict['priceUsd'])
|
101 |
+
dict['date'] = date_conv(dict['date'][0:10])
|
102 |
+
|
103 |
+
# convert histories_dict to pd.DataFrame
|
104 |
+
histories_df = pd.json_normalize(histories_dict['bitcoin'])
|
105 |
+
histories_df = histories_df.set_index('date', drop=True)
|
106 |
+
for id in [x for x in coin_ids if x != "bitcoin"]:
|
107 |
+
temp_df = pd.json_normalize(histories_dict[id])
|
108 |
+
temp_df = temp_df.set_index('date', drop=True)
|
109 |
+
histories_df = histories_df.merge(temp_df, how='outer', left_index=True, right_index=True)
|
110 |
+
|
111 |
+
histories_df.columns = coin_ids
|
112 |
+
return histories_df
|
113 |
+
|
114 |
+
# N.B. allow_output_mutation set to True because in create_rebased_df I am
|
115 |
+
# deliberately chaning the value returns_df[start_date:start_date] to 0
|
116 |
+
# however I want the cahced value to remain unchanged so that if I rebase to a
|
117 |
+
# different start_date we go back to the orrignal returns_df.
|
118 |
+
@st.cache(persist=True, show_spinner=False, allow_output_mutation=True)
|
119 |
+
def create_returns_df(histories_df):
|
120 |
+
return histories_df.pct_change(1)
|
121 |
+
|
122 |
+
@st.cache(persist=True, show_spinner=False)
|
123 |
+
def create_rebased_df(returns_df, start_date, end_date):
|
124 |
+
returns_df[start_date:start_date]=0
|
125 |
+
return (1 + returns_df[start_date:end_date]).cumprod()
|
126 |
+
|
127 |
+
@st.cache(persist=True, show_spinner=False)
|
128 |
+
def date_range(end_date, lookback_years):
|
129 |
+
return [end_date - timedelta(x) for x in range(365 * lookback_years)][::-1]
|
130 |
+
|
131 |
+
@st.cache(persist=True, show_spinner=False)
|
132 |
+
def ids2names_dict(coin_ids, names):
|
133 |
+
ids2names_dict={}
|
134 |
+
for i, id in enumerate(coin_ids):
|
135 |
+
ids2names_dict[id] = names[i]
|
136 |
+
return ids2names_dict
|
137 |
+
|
138 |
+
@st.cache(persist=True, show_spinner=False)
|
139 |
+
def names2ids_dict(names, coin_ids):
|
140 |
+
names2ids_dict={}
|
141 |
+
for i, name in enumerate(names):
|
142 |
+
names2ids_dict[name] = coin_ids[i]
|
143 |
+
return names2ids_dict
|
144 |
+
|
145 |
+
@st.cache(persist=True, show_spinner=False)
|
146 |
+
def gen_rebased_df(histories_df, ids_with_histories, start_date, end_date):
|
147 |
+
returns_df = histories_df[ids_with_histories].pct_change(1)
|
148 |
+
returns_df[start_date:start_date]=0
|
149 |
+
return (1 + returns_df[start_date:end_date]).cumprod()
|
150 |
+
|
frontpage.py
ADDED
@@ -0,0 +1,211 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
#-------------------
|
4 |
+
# Imports
|
5 |
+
#-------------------
|
6 |
+
import streamlit as st
|
7 |
+
import yfinance as yf
|
8 |
+
import pandas as pd
|
9 |
+
import numpy as np
|
10 |
+
import plotly.graph_objs as go
|
11 |
+
import plotly.io as pio
|
12 |
+
from bs4 import BeautifulSoup
|
13 |
+
import requests
|
14 |
+
from datetime import datetime
|
15 |
+
|
16 |
+
# today's date
|
17 |
+
today = datetime.today().strftime('%d %B %Y')
|
18 |
+
|
19 |
+
st.set_page_config(layout="wide")
|
20 |
+
|
21 |
+
|
22 |
+
#-------------------
|
23 |
+
# Web scraping Yahoo Finance
|
24 |
+
#-------------------
|
25 |
+
dic = {}
|
26 |
+
url = 'https://finance.yahoo.com/cryptocurrencies?offset=0&count=100'
|
27 |
+
soup = BeautifulSoup(requests.get(url).text)
|
28 |
+
|
29 |
+
# store values in separate lists and then in a dictionary
|
30 |
+
for listing in soup.find_all('div', attrs={'id':'fin-scr-res-table'}):
|
31 |
+
symbol_list = []
|
32 |
+
name_list = []
|
33 |
+
price_list = []
|
34 |
+
change_list = []
|
35 |
+
mcap_list = []
|
36 |
+
for symbol in listing.find_all('td', attrs={'aria-label':'Symbol'}):
|
37 |
+
symbol_list.append(symbol.text)
|
38 |
+
dic['Symbol'] = symbol_list
|
39 |
+
for name in listing.find_all('td', attrs={'aria-label':'Name'}):
|
40 |
+
name_list.append(name.text)
|
41 |
+
dic['Name'] = name_list
|
42 |
+
for price in listing.find_all('td', attrs={'aria-label':'Price (Intraday)'}):
|
43 |
+
price_list.append(price.text)
|
44 |
+
dic['Price'] = price_list
|
45 |
+
for change in listing.find_all('td', attrs={'aria-label':'% Change'}):
|
46 |
+
change_list.append(change.text)
|
47 |
+
dic['% Change'] = change_list
|
48 |
+
for mcap in listing.find_all('td', attrs={'aria-label':'Market Cap'}):
|
49 |
+
mcap_list.append(mcap.text)
|
50 |
+
dic['Market Cap'] = mcap_list
|
51 |
+
|
52 |
+
# create a dataframe from dictionary
|
53 |
+
df_scrape = pd.DataFrame(dic)
|
54 |
+
df_scrape.Symbol = df_scrape.Symbol.str.replace('-USD','')
|
55 |
+
df_scrape.Name = df_scrape.Name.str.replace(' USD','')
|
56 |
+
dic1 = dict(zip(df_scrape.Symbol,df_scrape.Name))
|
57 |
+
|
58 |
+
|
59 |
+
#-------------------
|
60 |
+
# Streamlit Sidebar
|
61 |
+
#-------------------
|
62 |
+
fiat = ['USD','EUR','GBP']
|
63 |
+
tokens = df_scrape.Symbol.values
|
64 |
+
|
65 |
+
# filters selectbox
|
66 |
+
st.sidebar.title("Filters")
|
67 |
+
select_token = st.sidebar.selectbox('Tokens', tokens)
|
68 |
+
select_fiat = st.sidebar.selectbox('Fiat', fiat)
|
69 |
+
|
70 |
+
# special expander objects
|
71 |
+
st.sidebar.markdown('***')
|
72 |
+
with st.sidebar.expander('Help'):
|
73 |
+
st.markdown('''
|
74 |
+
- Select token and fiat of your choice.
|
75 |
+
- Interactive plots can be zoomed or hovered to retrieve more info.
|
76 |
+
- Plots can be downloaded using Plotly tools.''')
|
77 |
+
|
78 |
+
with st.sidebar.expander('Sources'):
|
79 |
+
st.markdown('''
|
80 |
+
- Python Libraries: yfinance, BeautifulSoup, Plotly, Pandas, Streamlit
|
81 |
+
- Prices: https://finance.yahoo.com
|
82 |
+
- Logos: https://cryptologos.cc/
|
83 |
+
''')
|
84 |
+
|
85 |
+
|
86 |
+
st.write("Dashboard adapted from https://github.com/rohithteja/the-crypto-dashboard")
|
87 |
+
#-------------------
|
88 |
+
# Add crypto logo and name
|
89 |
+
#-------------------
|
90 |
+
col1, col2 = st.columns([1,10])
|
91 |
+
with col1:
|
92 |
+
try:
|
93 |
+
st.image(f'logos/{select_token}.png',width=70)
|
94 |
+
except:
|
95 |
+
pass
|
96 |
+
with col2:
|
97 |
+
st.markdown(f'''## {dic1[select_token]}''')
|
98 |
+
|
99 |
+
|
100 |
+
#-------------------
|
101 |
+
# Candlestick chart with moving averages
|
102 |
+
#-------------------
|
103 |
+
st.markdown('''
|
104 |
+
- The following is an interactive Candlestick chart for the price fluctuations over the past 5 years.
|
105 |
+
- Simple moving averages were computed for 20, 50 and 100 day frequencies.
|
106 |
+
- Aids in trading strategy and to better interpret the price fluctuations.''')
|
107 |
+
|
108 |
+
# download 5 year crypto prices from Yahoo Finance
|
109 |
+
df = yf.download(tickers=f'{select_token}-{select_fiat}', period = '5y', interval = '1d')
|
110 |
+
|
111 |
+
# compute moving averages
|
112 |
+
df['MA100'] = df.Close.rolling(100).mean()
|
113 |
+
df['MA50'] = df.Close.rolling(50).mean()
|
114 |
+
df['MA20'] = df.Close.rolling(20).mean()
|
115 |
+
|
116 |
+
# Plotly candlestick chart
|
117 |
+
fig = go.Figure(data=
|
118 |
+
[go.Candlestick(x=df.index,
|
119 |
+
open=df.Open,
|
120 |
+
high=df.High,
|
121 |
+
low=df.Low,
|
122 |
+
close=df.Close,
|
123 |
+
name=f'{select_token}'),
|
124 |
+
go.Scatter(x=df.index, y=df.MA20,
|
125 |
+
line=dict(color='yellow',width=1),name='MA20'),
|
126 |
+
go.Scatter(x=df.index, y=df.MA50,
|
127 |
+
line=dict(color='green',width=1),name='MA50'),
|
128 |
+
go.Scatter(x=df.index, y=df.MA100,
|
129 |
+
line=dict(color='red',width=1),name='MA100')])
|
130 |
+
|
131 |
+
fig.update_layout(go.Layout(xaxis = {'showgrid': False},
|
132 |
+
yaxis = {'showgrid': False}),
|
133 |
+
title=f'{dic1[select_token]} Price Fluctuation with Moving Averages',
|
134 |
+
yaxis_title=f'Price ({select_fiat})',
|
135 |
+
xaxis_rangeslider_visible=False)
|
136 |
+
|
137 |
+
st.plotly_chart(fig, use_container_width=True)
|
138 |
+
|
139 |
+
#-------------------
|
140 |
+
# Line Chart with daily trends
|
141 |
+
#-------------------
|
142 |
+
st.markdown('## Daily Trends')
|
143 |
+
st.markdown(f'''
|
144 |
+
- Line graph below shows the price fluctuation of {dic1[select_token]} every minute for today's date ({today}).
|
145 |
+
- The data is automatically updated for the current day.
|
146 |
+
- The horizontal line shows the current day's open price.
|
147 |
+
- Green portion indicates the price greater than open price and red for lower.
|
148 |
+
''')
|
149 |
+
|
150 |
+
# download daily crypto prices from Yahoo Finance
|
151 |
+
df = yf.download(tickers=f'{select_token}-{select_fiat}', period = '1d', interval = '1m')
|
152 |
+
|
153 |
+
# Plotly line chart
|
154 |
+
fig = go.Figure()
|
155 |
+
fig.add_scattergl(x=df.index, y=df.Close,
|
156 |
+
line={'color': 'green'},name='Up trend')
|
157 |
+
fig.add_scattergl(x=df.index, y=df.Close.where(df.Close <= df.Open[0]),
|
158 |
+
line={'color': 'red'},name='Down trend')
|
159 |
+
fig.add_hline(y=df.Open[0])
|
160 |
+
fig.update_layout(go.Layout(xaxis = {'showgrid': False},
|
161 |
+
yaxis = {'showgrid': False}),
|
162 |
+
title=f'{dic1[select_token]} Daily Trends in Comparison to Open Price',
|
163 |
+
yaxis_title=f'Price ({select_fiat})',template='plotly_dark',
|
164 |
+
xaxis_rangeslider_visible=False)
|
165 |
+
st.plotly_chart(fig, use_container_width=True)
|
166 |
+
|
167 |
+
#-------------------
|
168 |
+
# Table showing top 25 cryptos
|
169 |
+
#-------------------
|
170 |
+
st.markdown('## Top 25 Cryptocurrency Prices and Stats')
|
171 |
+
st.markdown('''
|
172 |
+
- Realtime price changes (in USD).
|
173 |
+
- Values updated every few minutes.
|
174 |
+
- Colour coded column indicates the increase or decrease in price.
|
175 |
+
''')
|
176 |
+
|
177 |
+
# create table from webscraped data
|
178 |
+
df_scrape = df_scrape.rename(columns={'Symbol':'Token'})
|
179 |
+
df_scrape['% Change'] = df_scrape['% Change'].str.replace('%','').str.replace(',','').astype(float)
|
180 |
+
|
181 |
+
df_scrape["color"] = df_scrape["% Change"].map(lambda x:'red' if x<0 else 'green')
|
182 |
+
cols_to_show = ['Name','Token', 'Price', '% Change', 'Market Cap']
|
183 |
+
|
184 |
+
# to change color of "% change" column
|
185 |
+
fill_color = []
|
186 |
+
n = len(df_scrape)
|
187 |
+
for col in cols_to_show:
|
188 |
+
if col!='% Change':
|
189 |
+
fill_color.append(['black']*n)
|
190 |
+
else:
|
191 |
+
fill_color.append(df_scrape["color"].to_list())
|
192 |
+
|
193 |
+
# Plotly Table
|
194 |
+
data=[go.Table(columnwidth = [20,15,15,15,15],
|
195 |
+
header=dict(values=[f"<b>{col}</b>" for col in cols_to_show],
|
196 |
+
font=dict(color='white', size=20),
|
197 |
+
height=30,
|
198 |
+
line_color='black',
|
199 |
+
fill_color='dimgrey',
|
200 |
+
align=['left','left', 'right','right','right']),
|
201 |
+
cells=dict(values=df_scrape[cols_to_show].values.T,
|
202 |
+
fill_color=fill_color,
|
203 |
+
font=dict(color='white', size=20),
|
204 |
+
height=30,
|
205 |
+
line_color='black',
|
206 |
+
align=['left','left', 'right','right','right']))]
|
207 |
+
|
208 |
+
fig = go.Figure(data=data)
|
209 |
+
fig.update_layout(go.Layout(xaxis = {'showgrid': False},
|
210 |
+
yaxis = {'showgrid': False}))
|
211 |
+
st.plotly_chart(fig, use_container_width=True)
|
goofing.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
histories.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
images/background.png
ADDED
![]() |
images/cart.png
ADDED
![]() |
logos/ADA.png
ADDED
![]() |
logos/ATOM.png
ADDED
![]() |
logos/AVAX.png
ADDED
![]() |
logos/BNB.png
ADDED
![]() |
logos/BTC.png
ADDED
![]() |
logos/BUSD.png
ADDED
![]() |
logos/CRO.png
ADDED
![]() |
logos/DAI.png
ADDED
![]() |
logos/DOGE.png
ADDED
![]() |
logos/DOT.png
ADDED
![]() |
logos/ETH.png
ADDED
![]() |
logos/HEX.png
ADDED
![]() |
logos/LINK.png
ADDED
![]() |
logos/LTC.png
ADDED
![]() |
logos/LUNA.png
ADDED
![]() |
logos/MATIC.png
ADDED
![]() |
logos/NEAR.png
ADDED
![]() |
logos/SHIB.png
ADDED
![]() |
logos/SOL.png
ADDED
![]() |
logos/USDC.png
ADDED
![]() |
logos/USDT.png
ADDED
![]() |
logos/UST.png
ADDED
![]() |
logos/WBTC.png
ADDED
![]() |
logos/XRP.png
ADDED
![]() |
performance analysis.ipynb
ADDED
@@ -0,0 +1,1206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 60,
|
6 |
+
"id": "00769472-1fe0-4010-8a2f-248e5154fc92",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [],
|
9 |
+
"source": [
|
10 |
+
"import requests\n",
|
11 |
+
"import json\n",
|
12 |
+
"import plotly.graph_objs as go\n",
|
13 |
+
"import plotly.express as px\n",
|
14 |
+
"import pandas as pd\n",
|
15 |
+
"import numpy as np\n",
|
16 |
+
"import time\n",
|
17 |
+
"from datetime import date, timedelta, datetime\n",
|
18 |
+
"from pypfopt import EfficientFrontier # nb pypfopt is shorthand for the package pyportfolioopt\n",
|
19 |
+
"from pypfopt import risk_models\n",
|
20 |
+
"from pypfopt import expected_returns"
|
21 |
+
]
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"cell_type": "code",
|
25 |
+
"execution_count": 56,
|
26 |
+
"id": "ca4202a4-9a1b-4046-9bc6-d03568b26e5b",
|
27 |
+
"metadata": {},
|
28 |
+
"outputs": [],
|
29 |
+
"source": [
|
30 |
+
"def create_assets(total_coins=100):\n",
|
31 |
+
" '''\n",
|
32 |
+
" A function to retrieve info about the largest total_coins number of\n",
|
33 |
+
" cryptocurrencies, ranked by market cap, generated by a call to coincap assets\n",
|
34 |
+
" api.\n",
|
35 |
+
" '''\n",
|
36 |
+
" url = \"https://api.coincap.io/v2/assets\"\n",
|
37 |
+
"\n",
|
38 |
+
" # N.B. here adampt the params dict to only request what you need\n",
|
39 |
+
" payload={'limit': total_coins}\n",
|
40 |
+
" headers = {}\n",
|
41 |
+
"\n",
|
42 |
+
" assets_json = requests.request(\"GET\", url, params=payload, headers=headers).json()\n",
|
43 |
+
" return assets_json\n",
|
44 |
+
"\n",
|
45 |
+
"def gen_symbols(assets_json):\n",
|
46 |
+
" '''\n",
|
47 |
+
" Function to generate three lists: symbols, names and ids, from the result of\n",
|
48 |
+
" a call to the coincap assets api, assets_json.\n",
|
49 |
+
" '''\n",
|
50 |
+
" symbols_list = []\n",
|
51 |
+
" names_list = []\n",
|
52 |
+
" ids_list =[]\n",
|
53 |
+
" for dict in assets_json['data']:\n",
|
54 |
+
" symbols_list.append(dict['symbol'])\n",
|
55 |
+
" names_list.append(dict['name'])\n",
|
56 |
+
" ids_list.append(dict['id'])\n",
|
57 |
+
" return symbols_list, names_list, ids_list\n",
|
58 |
+
"\n",
|
59 |
+
"def load_histories(coin_ids, start, end):\n",
|
60 |
+
" '''\n",
|
61 |
+
" Function to load daily historic prices for all crypto currencies in the\n",
|
62 |
+
" coin_ids list within the time period defined by the interval [start, end].\n",
|
63 |
+
" '''\n",
|
64 |
+
" url = \"http://api.coincap.io/v2/assets/{}/history\"\n",
|
65 |
+
"\n",
|
66 |
+
" payload={'interval':'d1', 'start':start, 'end':end}\n",
|
67 |
+
" headers = {}\n",
|
68 |
+
"\n",
|
69 |
+
" histories_dict = {}\n",
|
70 |
+
" for id in coin_ids:\n",
|
71 |
+
" response_histories = requests.request(\"GET\", url.format(id), headers=headers, params=payload)\n",
|
72 |
+
" histories_json = response_histories.json()\n",
|
73 |
+
" histories_dict[id] = histories_json['data']\n",
|
74 |
+
" return histories_dict\n",
|
75 |
+
"\n",
|
76 |
+
"def create_unix_dates(today=date.today(), lookback_years = 5):\n",
|
77 |
+
" '''\n",
|
78 |
+
" A function to create start_unix and end_unix times in UNIX time in milliseconds\n",
|
79 |
+
" '''\n",
|
80 |
+
" start_datetime = today-timedelta(365*lookback_years)\n",
|
81 |
+
" start_unix = int(time.mktime(start_datetime.timetuple()) * 1000)\n",
|
82 |
+
" end_unix = int(time.mktime(date.today().timetuple()) * 1000)\n",
|
83 |
+
" return start_unix, end_unix"
|
84 |
+
]
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"cell_type": "code",
|
88 |
+
"execution_count": 63,
|
89 |
+
"id": "8f806ff9-d219-4935-b2fa-142409796951",
|
90 |
+
"metadata": {},
|
91 |
+
"outputs": [],
|
92 |
+
"source": [
|
93 |
+
"assets_json = create_assets(total_coins=100)"
|
94 |
+
]
|
95 |
+
},
|
96 |
+
{
|
97 |
+
"cell_type": "code",
|
98 |
+
"execution_count": 64,
|
99 |
+
"id": "eb3ac8a0-24ee-4cfb-b410-2bc4792e7485",
|
100 |
+
"metadata": {},
|
101 |
+
"outputs": [],
|
102 |
+
"source": [
|
103 |
+
"symbols, names, coin_ids = gen_symbols(assets_json)"
|
104 |
+
]
|
105 |
+
},
|
106 |
+
{
|
107 |
+
"cell_type": "code",
|
108 |
+
"execution_count": 65,
|
109 |
+
"id": "9b4f9e03-5f5d-4c35-b515-3ed956191603",
|
110 |
+
"metadata": {},
|
111 |
+
"outputs": [],
|
112 |
+
"source": [
|
113 |
+
"start_unix, end_unix = create_unix_dates(today=date.today(), lookback_years=5)"
|
114 |
+
]
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"cell_type": "code",
|
118 |
+
"execution_count": 67,
|
119 |
+
"id": "c4c15661-17fb-420c-bce1-0850066ab683",
|
120 |
+
"metadata": {},
|
121 |
+
"outputs": [],
|
122 |
+
"source": [
|
123 |
+
"url = \"http://api.coincap.io/v2/assets/{}/history\"\n",
|
124 |
+
"\n",
|
125 |
+
"payload={'interval':'d1', 'start':start_unix, 'end':end_unix}\n",
|
126 |
+
"headers = {}\n",
|
127 |
+
"\n",
|
128 |
+
"histories_dict = {}\n",
|
129 |
+
"for id in coin_ids:\n",
|
130 |
+
" response_histories = requests.request(\"GET\", url.format(id), headers=headers, params=payload)\n",
|
131 |
+
" histories_json = response_histories.json()\n",
|
132 |
+
" histories_dict[id] = histories_json['data']"
|
133 |
+
]
|
134 |
+
},
|
135 |
+
{
|
136 |
+
"cell_type": "code",
|
137 |
+
"execution_count": 66,
|
138 |
+
"id": "de780763-c0ad-4c82-acd0-f279d46e54d3",
|
139 |
+
"metadata": {
|
140 |
+
"collapsed": true,
|
141 |
+
"jupyter": {
|
142 |
+
"outputs_hidden": true
|
143 |
+
},
|
144 |
+
"tags": []
|
145 |
+
},
|
146 |
+
"outputs": [
|
147 |
+
{
|
148 |
+
"ename": "JSONDecodeError",
|
149 |
+
"evalue": "Expecting value: line 1 column 1 (char 0)",
|
150 |
+
"output_type": "error",
|
151 |
+
"traceback": [
|
152 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
153 |
+
"\u001b[0;31mJSONDecodeError\u001b[0m Traceback (most recent call last)",
|
154 |
+
"\u001b[0;32m/var/folders/ff/pmf9d5156jz_pr_s8ybs3x780000gn/T/ipykernel_63357/1421263752.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mhistories_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mload_histories\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcoin_ids\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstart_unix\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mend_unix\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
|
155 |
+
"\u001b[0;32m/var/folders/ff/pmf9d5156jz_pr_s8ybs3x780000gn/T/ipykernel_63357/437834857.py\u001b[0m in \u001b[0;36mload_histories\u001b[0;34m(coin_ids, start, end)\u001b[0m\n\u001b[1;32m 41\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mid\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mcoin_ids\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0mresponse_histories\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrequests\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"GET\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mid\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mheaders\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparams\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpayload\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 43\u001b[0;31m \u001b[0mhistories_json\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mresponse_histories\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjson\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 44\u001b[0m \u001b[0mhistories_dict\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mid\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhistories_json\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'data'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 45\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mhistories_dict\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
156 |
+
"\u001b[0;32m/usr/local/lib/python3.9/site-packages/requests/models.py\u001b[0m in \u001b[0;36mjson\u001b[0;34m(self, **kwargs)\u001b[0m\n\u001b[1;32m 864\u001b[0m \u001b[0;31m# used.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 865\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 866\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mcomplexjson\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mloads\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtext\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 867\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 868\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
157 |
+
"\u001b[0;32m/usr/local/Cellar/[email protected]/3.9.5/Frameworks/Python.framework/Versions/3.9/lib/python3.9/json/__init__.py\u001b[0m in \u001b[0;36mloads\u001b[0;34m(s, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)\u001b[0m\n\u001b[1;32m 344\u001b[0m \u001b[0mparse_int\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mparse_float\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32mand\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 345\u001b[0m parse_constant is None and object_pairs_hook is None and not kw):\n\u001b[0;32m--> 346\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0m_default_decoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdecode\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 347\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcls\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 348\u001b[0m \u001b[0mcls\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mJSONDecoder\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
158 |
+
"\u001b[0;32m/usr/local/Cellar/[email protected]/3.9.5/Frameworks/Python.framework/Versions/3.9/lib/python3.9/json/decoder.py\u001b[0m in \u001b[0;36mdecode\u001b[0;34m(self, s, _w)\u001b[0m\n\u001b[1;32m 335\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 336\u001b[0m \"\"\"\n\u001b[0;32m--> 337\u001b[0;31m \u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraw_decode\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0midx\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0m_w\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 338\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_w\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mend\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 339\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
159 |
+
"\u001b[0;32m/usr/local/Cellar/[email protected]/3.9.5/Frameworks/Python.framework/Versions/3.9/lib/python3.9/json/decoder.py\u001b[0m in \u001b[0;36mraw_decode\u001b[0;34m(self, s, idx)\u001b[0m\n\u001b[1;32m 353\u001b[0m \u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mend\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mscan_once\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 354\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mStopIteration\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0merr\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 355\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mJSONDecodeError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Expecting value\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0merr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 356\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mend\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
160 |
+
"\u001b[0;31mJSONDecodeError\u001b[0m: Expecting value: line 1 column 1 (char 0)"
|
161 |
+
]
|
162 |
+
}
|
163 |
+
],
|
164 |
+
"source": [
|
165 |
+
"histories_dict = load_histories(coin_ids, start_unix, end_unix)"
|
166 |
+
]
|
167 |
+
},
|
168 |
+
{
|
169 |
+
"cell_type": "code",
|
170 |
+
"execution_count": 2,
|
171 |
+
"id": "c687db6d-bfd8-4e43-9e7a-b03923eed314",
|
172 |
+
"metadata": {},
|
173 |
+
"outputs": [],
|
174 |
+
"source": [
|
175 |
+
"def date_conv(date):\n",
|
176 |
+
" return datetime.strptime(date, '%Y-%m-%d')"
|
177 |
+
]
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"cell_type": "code",
|
181 |
+
"execution_count": 3,
|
182 |
+
"id": "8a6a3771-f0f0-41dc-b34b-e9f5cd403dc4",
|
183 |
+
"metadata": {},
|
184 |
+
"outputs": [],
|
185 |
+
"source": [
|
186 |
+
"histories_df= pd.read_csv('histories.csv')\n",
|
187 |
+
"histories_df['date'] = list(map(date_conv,histories_df['date']))\n",
|
188 |
+
"histories_df = histories_df.set_index('date')"
|
189 |
+
]
|
190 |
+
},
|
191 |
+
{
|
192 |
+
"cell_type": "code",
|
193 |
+
"execution_count": 4,
|
194 |
+
"id": "93ccd49c-55d4-4984-a768-2d1d961adf7c",
|
195 |
+
"metadata": {},
|
196 |
+
"outputs": [],
|
197 |
+
"source": [
|
198 |
+
"def ids_with_histories(histories_df, start_date, end_date):\n",
|
199 |
+
" investment_df = histories_df[start_date:end_date]\n",
|
200 |
+
" investment_df.dropna(axis=1, inplace=True) # drop cols with any NaN values\n",
|
201 |
+
" return investment_df.columns\n",
|
202 |
+
"\n",
|
203 |
+
"def uniform_weights_dict(ids_with_histories):\n",
|
204 |
+
" weight = 1/len(ids_with_histories)\n",
|
205 |
+
" uniform_weights_dict = {}\n",
|
206 |
+
" for id in ids_with_histories:\n",
|
207 |
+
" uniform_weights_dict[id] = weight\n",
|
208 |
+
" return uniform_weights_dict\n",
|
209 |
+
"\n",
|
210 |
+
"\n",
|
211 |
+
"def markowitz_weights_dict(histories_df,start_port_date,ids_with_histories, analysis_days=365):\n",
|
212 |
+
" start_analysis_date = start_port_date - timedelta(analysis_days)\n",
|
213 |
+
" analysis_df = histories_df[start_analysis_date:start_port_date][ids_with_histories]\n",
|
214 |
+
"\n",
|
215 |
+
" # Calculate expected returns and sample covariance\n",
|
216 |
+
" mu = expected_returns.mean_historical_return(analysis_df)\n",
|
217 |
+
" S = risk_models.sample_cov(analysis_df)\n",
|
218 |
+
" # Optimize for maximal Sharpe ratio\n",
|
219 |
+
" attempts=0\n",
|
220 |
+
" while attempts < 10:\n",
|
221 |
+
" try:\n",
|
222 |
+
" ef = EfficientFrontier(mu, S, weight_bounds=(0, 1))\n",
|
223 |
+
" ef.max_sharpe()\n",
|
224 |
+
" break\n",
|
225 |
+
" except Exception as e:\n",
|
226 |
+
" attempts += 1\n",
|
227 |
+
" try:\n",
|
228 |
+
" cleaned_weights = ef.clean_weights()\n",
|
229 |
+
" except Exception as e:\n",
|
230 |
+
" print(\"Could not find optimal solution, try changing optimisation constraints or investment set\")\n",
|
231 |
+
" return cleaned_weights\n",
|
232 |
+
"\n",
|
233 |
+
"\n",
|
234 |
+
"def gen_port_rtns(rebased_df, weights_dict):\n",
|
235 |
+
" return rebased_df[list(weights_dict.keys())].dot(list(weights_dict.values()))\n",
|
236 |
+
"\n",
|
237 |
+
"def gen_rebased_df(histories_df, ids_with_histories, start_date, end_date):\n",
|
238 |
+
" returns_df = histories_df[ids_with_histories].pct_change(1)\n",
|
239 |
+
" returns_df[start_date:start_date]=0\n",
|
240 |
+
" return (1 + returns_df[start_date:end_date]).cumprod()"
|
241 |
+
]
|
242 |
+
},
|
243 |
+
{
|
244 |
+
"cell_type": "code",
|
245 |
+
"execution_count": 5,
|
246 |
+
"id": "6092ed85-0472-447d-90e5-ab0e357d1a5f",
|
247 |
+
"metadata": {},
|
248 |
+
"outputs": [
|
249 |
+
{
|
250 |
+
"name": "stderr",
|
251 |
+
"output_type": "stream",
|
252 |
+
"text": [
|
253 |
+
"/var/folders/ff/pmf9d5156jz_pr_s8ybs3x780000gn/T/ipykernel_63357/2151836904.py:3: SettingWithCopyWarning: \n",
|
254 |
+
"A value is trying to be set on a copy of a slice from a DataFrame\n",
|
255 |
+
"\n",
|
256 |
+
"See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
|
257 |
+
" investment_df.dropna(axis=1, inplace=True) # drop cols with any NaN values\n"
|
258 |
+
]
|
259 |
+
}
|
260 |
+
],
|
261 |
+
"source": [
|
262 |
+
"lookback_years = 5\n",
|
263 |
+
"start_date = date.today() - timedelta(365)\n",
|
264 |
+
"end_date = date.today()\n",
|
265 |
+
"ids_with_histories = ids_with_histories(histories_df,\n",
|
266 |
+
" start_date, end_date)"
|
267 |
+
]
|
268 |
+
},
|
269 |
+
{
|
270 |
+
"cell_type": "code",
|
271 |
+
"execution_count": 6,
|
272 |
+
"id": "6044aecd-752f-4097-8598-2ca9b1693c2f",
|
273 |
+
"metadata": {},
|
274 |
+
"outputs": [],
|
275 |
+
"source": [
|
276 |
+
"uniform_weights_dict = uniform_weights_dict(ids_with_histories[:10])\n",
|
277 |
+
"markowitz_weights_dict = markowitz_weights_dict(histories_df,\n",
|
278 |
+
" start_date ,ids_with_histories[:10], analysis_days=365)\n",
|
279 |
+
"rebased_df = gen_rebased_df(histories_df, ids_with_histories,\n",
|
280 |
+
" start_date, end_date)"
|
281 |
+
]
|
282 |
+
},
|
283 |
+
{
|
284 |
+
"cell_type": "code",
|
285 |
+
"execution_count": 9,
|
286 |
+
"id": "0741b8f0-32f5-411a-a412-f40320297ef2",
|
287 |
+
"metadata": {},
|
288 |
+
"outputs": [],
|
289 |
+
"source": [
|
290 |
+
"def gen_all_returns(rebased_df, ids_with_histories,uniform_weights_dict,\n",
|
291 |
+
" markowitz_weights_dict):\n",
|
292 |
+
" '''\n",
|
293 |
+
" A function to generate returns for all portfolios and all coins with full\n",
|
294 |
+
" histories over the backtest period, rebased to the start of the backtest\n",
|
295 |
+
" period.\n",
|
296 |
+
" '''\n",
|
297 |
+
" uniform_returns = gen_port_rtns(rebased_df, uniform_weights_dict)\n",
|
298 |
+
" uniform_returns.name = \"Uniform\"\n",
|
299 |
+
" markowitz_returns = gen_port_rtns(rebased_df, markowitz_weights_dict)\n",
|
300 |
+
" markowitz_returns.name = \"Markowitz\"\n",
|
301 |
+
" port_returns = uniform_returns.to_frame().join(markowitz_returns)\n",
|
302 |
+
" return port_returns.join(rebased_df[ids_with_histories])"
|
303 |
+
]
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"cell_type": "code",
|
307 |
+
"execution_count": 10,
|
308 |
+
"id": "0b67f3d3-ae6f-4d8e-b52c-1c4c7c18d662",
|
309 |
+
"metadata": {},
|
310 |
+
"outputs": [],
|
311 |
+
"source": [
|
312 |
+
"all_returns_df = gen_all_returns(rebased_df, ids_with_histories,uniform_weights_dict,\n",
|
313 |
+
" markowitz_weights_dict)"
|
314 |
+
]
|
315 |
+
},
|
316 |
+
{
|
317 |
+
"cell_type": "code",
|
318 |
+
"execution_count": 11,
|
319 |
+
"id": "7dd39fac-3015-4024-a045-04f8a273ffec",
|
320 |
+
"metadata": {},
|
321 |
+
"outputs": [
|
322 |
+
{
|
323 |
+
"data": {
|
324 |
+
"text/html": [
|
325 |
+
"<div>\n",
|
326 |
+
"<style scoped>\n",
|
327 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
328 |
+
" vertical-align: middle;\n",
|
329 |
+
" }\n",
|
330 |
+
"\n",
|
331 |
+
" .dataframe tbody tr th {\n",
|
332 |
+
" vertical-align: top;\n",
|
333 |
+
" }\n",
|
334 |
+
"\n",
|
335 |
+
" .dataframe thead th {\n",
|
336 |
+
" text-align: right;\n",
|
337 |
+
" }\n",
|
338 |
+
"</style>\n",
|
339 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
340 |
+
" <thead>\n",
|
341 |
+
" <tr style=\"text-align: right;\">\n",
|
342 |
+
" <th></th>\n",
|
343 |
+
" <th>Uniform</th>\n",
|
344 |
+
" <th>Markowitz</th>\n",
|
345 |
+
" <th>bitcoin</th>\n",
|
346 |
+
" <th>ethereum</th>\n",
|
347 |
+
" <th>tether</th>\n",
|
348 |
+
" <th>usd-coin</th>\n",
|
349 |
+
" <th>binance-coin</th>\n",
|
350 |
+
" <th>xrp</th>\n",
|
351 |
+
" <th>binance-usd</th>\n",
|
352 |
+
" <th>cardano</th>\n",
|
353 |
+
" <th>...</th>\n",
|
354 |
+
" <th>arweave</th>\n",
|
355 |
+
" <th>compound</th>\n",
|
356 |
+
" <th>kava</th>\n",
|
357 |
+
" <th>holo</th>\n",
|
358 |
+
" <th>gatetoken</th>\n",
|
359 |
+
" <th>fei-protocol</th>\n",
|
360 |
+
" <th>kyber-network</th>\n",
|
361 |
+
" <th>qtum</th>\n",
|
362 |
+
" <th>bancor</th>\n",
|
363 |
+
" <th>1inch</th>\n",
|
364 |
+
" </tr>\n",
|
365 |
+
" <tr>\n",
|
366 |
+
" <th>date</th>\n",
|
367 |
+
" <th></th>\n",
|
368 |
+
" <th></th>\n",
|
369 |
+
" <th></th>\n",
|
370 |
+
" <th></th>\n",
|
371 |
+
" <th></th>\n",
|
372 |
+
" <th></th>\n",
|
373 |
+
" <th></th>\n",
|
374 |
+
" <th></th>\n",
|
375 |
+
" <th></th>\n",
|
376 |
+
" <th></th>\n",
|
377 |
+
" <th></th>\n",
|
378 |
+
" <th></th>\n",
|
379 |
+
" <th></th>\n",
|
380 |
+
" <th></th>\n",
|
381 |
+
" <th></th>\n",
|
382 |
+
" <th></th>\n",
|
383 |
+
" <th></th>\n",
|
384 |
+
" <th></th>\n",
|
385 |
+
" <th></th>\n",
|
386 |
+
" <th></th>\n",
|
387 |
+
" <th></th>\n",
|
388 |
+
" </tr>\n",
|
389 |
+
" </thead>\n",
|
390 |
+
" <tbody>\n",
|
391 |
+
" <tr>\n",
|
392 |
+
" <th>2021-05-24</th>\n",
|
393 |
+
" <td>1.000000</td>\n",
|
394 |
+
" <td>1.000000</td>\n",
|
395 |
+
" <td>1.000000</td>\n",
|
396 |
+
" <td>1.000000</td>\n",
|
397 |
+
" <td>1.000000</td>\n",
|
398 |
+
" <td>1.000000</td>\n",
|
399 |
+
" <td>1.000000</td>\n",
|
400 |
+
" <td>1.000000</td>\n",
|
401 |
+
" <td>1.000000</td>\n",
|
402 |
+
" <td>1.000000</td>\n",
|
403 |
+
" <td>...</td>\n",
|
404 |
+
" <td>1.000000</td>\n",
|
405 |
+
" <td>1.000000</td>\n",
|
406 |
+
" <td>1.000000</td>\n",
|
407 |
+
" <td>1.000000</td>\n",
|
408 |
+
" <td>1.000000</td>\n",
|
409 |
+
" <td>1.000000</td>\n",
|
410 |
+
" <td>1.000000</td>\n",
|
411 |
+
" <td>1.000000</td>\n",
|
412 |
+
" <td>1.000000</td>\n",
|
413 |
+
" <td>1.000000</td>\n",
|
414 |
+
" </tr>\n",
|
415 |
+
" <tr>\n",
|
416 |
+
" <th>2021-05-25</th>\n",
|
417 |
+
" <td>1.057165</td>\n",
|
418 |
+
" <td>1.015792</td>\n",
|
419 |
+
" <td>1.035396</td>\n",
|
420 |
+
" <td>1.104084</td>\n",
|
421 |
+
" <td>1.004346</td>\n",
|
422 |
+
" <td>1.008332</td>\n",
|
423 |
+
" <td>1.117736</td>\n",
|
424 |
+
" <td>1.141806</td>\n",
|
425 |
+
" <td>1.004241</td>\n",
|
426 |
+
" <td>1.067167</td>\n",
|
427 |
+
" <td>...</td>\n",
|
428 |
+
" <td>1.015432</td>\n",
|
429 |
+
" <td>1.051228</td>\n",
|
430 |
+
" <td>1.084118</td>\n",
|
431 |
+
" <td>1.231873</td>\n",
|
432 |
+
" <td>1.106947</td>\n",
|
433 |
+
" <td>1.004524</td>\n",
|
434 |
+
" <td>1.037904</td>\n",
|
435 |
+
" <td>1.264676</td>\n",
|
436 |
+
" <td>1.078122</td>\n",
|
437 |
+
" <td>1.064602</td>\n",
|
438 |
+
" </tr>\n",
|
439 |
+
" <tr>\n",
|
440 |
+
" <th>2021-05-26</th>\n",
|
441 |
+
" <td>1.103699</td>\n",
|
442 |
+
" <td>1.018414</td>\n",
|
443 |
+
" <td>1.059262</td>\n",
|
444 |
+
" <td>1.182178</td>\n",
|
445 |
+
" <td>0.998764</td>\n",
|
446 |
+
" <td>0.998087</td>\n",
|
447 |
+
" <td>1.211152</td>\n",
|
448 |
+
" <td>1.172456</td>\n",
|
449 |
+
" <td>0.998892</td>\n",
|
450 |
+
" <td>1.180429</td>\n",
|
451 |
+
" <td>...</td>\n",
|
452 |
+
" <td>1.157293</td>\n",
|
453 |
+
" <td>1.142980</td>\n",
|
454 |
+
" <td>1.216742</td>\n",
|
455 |
+
" <td>1.508202</td>\n",
|
456 |
+
" <td>1.167414</td>\n",
|
457 |
+
" <td>1.001874</td>\n",
|
458 |
+
" <td>1.153683</td>\n",
|
459 |
+
" <td>1.288506</td>\n",
|
460 |
+
" <td>1.173015</td>\n",
|
461 |
+
" <td>1.220068</td>\n",
|
462 |
+
" </tr>\n",
|
463 |
+
" <tr>\n",
|
464 |
+
" <th>2021-05-27</th>\n",
|
465 |
+
" <td>1.094057</td>\n",
|
466 |
+
" <td>1.016329</td>\n",
|
467 |
+
" <td>1.047055</td>\n",
|
468 |
+
" <td>1.171337</td>\n",
|
469 |
+
" <td>0.998721</td>\n",
|
470 |
+
" <td>0.997785</td>\n",
|
471 |
+
" <td>1.209819</td>\n",
|
472 |
+
" <td>1.151604</td>\n",
|
473 |
+
" <td>0.998899</td>\n",
|
474 |
+
" <td>1.166563</td>\n",
|
475 |
+
" <td>...</td>\n",
|
476 |
+
" <td>1.159935</td>\n",
|
477 |
+
" <td>1.114319</td>\n",
|
478 |
+
" <td>1.253248</td>\n",
|
479 |
+
" <td>1.432746</td>\n",
|
480 |
+
" <td>1.155625</td>\n",
|
481 |
+
" <td>0.999451</td>\n",
|
482 |
+
" <td>1.178437</td>\n",
|
483 |
+
" <td>1.424998</td>\n",
|
484 |
+
" <td>1.192224</td>\n",
|
485 |
+
" <td>1.226486</td>\n",
|
486 |
+
" </tr>\n",
|
487 |
+
" <tr>\n",
|
488 |
+
" <th>2021-05-28</th>\n",
|
489 |
+
" <td>1.035221</td>\n",
|
490 |
+
" <td>1.005436</td>\n",
|
491 |
+
" <td>0.988150</td>\n",
|
492 |
+
" <td>1.081310</td>\n",
|
493 |
+
" <td>0.998400</td>\n",
|
494 |
+
" <td>0.997946</td>\n",
|
495 |
+
" <td>1.118547</td>\n",
|
496 |
+
" <td>1.055065</td>\n",
|
497 |
+
" <td>0.998932</td>\n",
|
498 |
+
" <td>1.070248</td>\n",
|
499 |
+
" <td>...</td>\n",
|
500 |
+
" <td>1.011761</td>\n",
|
501 |
+
" <td>1.007871</td>\n",
|
502 |
+
" <td>1.218521</td>\n",
|
503 |
+
" <td>1.269149</td>\n",
|
504 |
+
" <td>1.053198</td>\n",
|
505 |
+
" <td>0.994776</td>\n",
|
506 |
+
" <td>1.072810</td>\n",
|
507 |
+
" <td>1.336266</td>\n",
|
508 |
+
" <td>1.095105</td>\n",
|
509 |
+
" <td>1.125980</td>\n",
|
510 |
+
" </tr>\n",
|
511 |
+
" </tbody>\n",
|
512 |
+
"</table>\n",
|
513 |
+
"<p>5 rows × 87 columns</p>\n",
|
514 |
+
"</div>"
|
515 |
+
],
|
516 |
+
"text/plain": [
|
517 |
+
" Uniform Markowitz bitcoin ethereum tether usd-coin \\\n",
|
518 |
+
"date \n",
|
519 |
+
"2021-05-24 1.000000 1.000000 1.000000 1.000000 1.000000 1.000000 \n",
|
520 |
+
"2021-05-25 1.057165 1.015792 1.035396 1.104084 1.004346 1.008332 \n",
|
521 |
+
"2021-05-26 1.103699 1.018414 1.059262 1.182178 0.998764 0.998087 \n",
|
522 |
+
"2021-05-27 1.094057 1.016329 1.047055 1.171337 0.998721 0.997785 \n",
|
523 |
+
"2021-05-28 1.035221 1.005436 0.988150 1.081310 0.998400 0.997946 \n",
|
524 |
+
"\n",
|
525 |
+
" binance-coin xrp binance-usd cardano ... arweave \\\n",
|
526 |
+
"date ... \n",
|
527 |
+
"2021-05-24 1.000000 1.000000 1.000000 1.000000 ... 1.000000 \n",
|
528 |
+
"2021-05-25 1.117736 1.141806 1.004241 1.067167 ... 1.015432 \n",
|
529 |
+
"2021-05-26 1.211152 1.172456 0.998892 1.180429 ... 1.157293 \n",
|
530 |
+
"2021-05-27 1.209819 1.151604 0.998899 1.166563 ... 1.159935 \n",
|
531 |
+
"2021-05-28 1.118547 1.055065 0.998932 1.070248 ... 1.011761 \n",
|
532 |
+
"\n",
|
533 |
+
" compound kava holo gatetoken fei-protocol \\\n",
|
534 |
+
"date \n",
|
535 |
+
"2021-05-24 1.000000 1.000000 1.000000 1.000000 1.000000 \n",
|
536 |
+
"2021-05-25 1.051228 1.084118 1.231873 1.106947 1.004524 \n",
|
537 |
+
"2021-05-26 1.142980 1.216742 1.508202 1.167414 1.001874 \n",
|
538 |
+
"2021-05-27 1.114319 1.253248 1.432746 1.155625 0.999451 \n",
|
539 |
+
"2021-05-28 1.007871 1.218521 1.269149 1.053198 0.994776 \n",
|
540 |
+
"\n",
|
541 |
+
" kyber-network qtum bancor 1inch \n",
|
542 |
+
"date \n",
|
543 |
+
"2021-05-24 1.000000 1.000000 1.000000 1.000000 \n",
|
544 |
+
"2021-05-25 1.037904 1.264676 1.078122 1.064602 \n",
|
545 |
+
"2021-05-26 1.153683 1.288506 1.173015 1.220068 \n",
|
546 |
+
"2021-05-27 1.178437 1.424998 1.192224 1.226486 \n",
|
547 |
+
"2021-05-28 1.072810 1.336266 1.095105 1.125980 \n",
|
548 |
+
"\n",
|
549 |
+
"[5 rows x 87 columns]"
|
550 |
+
]
|
551 |
+
},
|
552 |
+
"execution_count": 11,
|
553 |
+
"metadata": {},
|
554 |
+
"output_type": "execute_result"
|
555 |
+
}
|
556 |
+
],
|
557 |
+
"source": [
|
558 |
+
"all_returns_df.head()"
|
559 |
+
]
|
560 |
+
},
|
561 |
+
{
|
562 |
+
"cell_type": "code",
|
563 |
+
"execution_count": 12,
|
564 |
+
"id": "ebe9263f-1a3f-4dc8-ba19-732b83d30e50",
|
565 |
+
"metadata": {},
|
566 |
+
"outputs": [],
|
567 |
+
"source": [
|
568 |
+
"def absolute_return(prices):\n",
|
569 |
+
" 'a function to calculate the absolute return given a daily price series'\n",
|
570 |
+
" abs_rtn = ((prices.iloc[-1]/prices[0])-1)\n",
|
571 |
+
" return abs_rtn\n",
|
572 |
+
"\n",
|
573 |
+
"def annual_return(prices):\n",
|
574 |
+
" 'a function to calculate the annualised return given a daily price series'\n",
|
575 |
+
" abs_rtn = absolute_return(prices)\n",
|
576 |
+
" annual_rnt = (pow((abs_rtn/100)+1, 365/len(prices))-1)*100\n",
|
577 |
+
" return annual_rnt\n",
|
578 |
+
"\n",
|
579 |
+
"def max_drawdown(prices):\n",
|
580 |
+
" '''\n",
|
581 |
+
" A function to calculate the max drawdown for a given price series \"prices\"\n",
|
582 |
+
" as well as the index of the start of the max drawdown period, \"start_idx\"\n",
|
583 |
+
" and the index of end of the max drawdwon period, \"end index\"\n",
|
584 |
+
" '''\n",
|
585 |
+
" if type(prices)==type(pd.Series(dtype='object')):\n",
|
586 |
+
" prices = prices.values\n",
|
587 |
+
" end_idx = np.argmax(np.maximum.accumulate(prices) - prices) # end of the period\n",
|
588 |
+
" start_idx = np.argmax(prices[:end_idx]) # start of period\n",
|
589 |
+
" max_dd = (prices[start_idx]-prices[end_idx])/prices[start_idx]\n",
|
590 |
+
" return max_dd, start_idx, end_idx\n",
|
591 |
+
"\n",
|
592 |
+
"def annual_vol(prices):\n",
|
593 |
+
" '''\n",
|
594 |
+
" A function to calculate the annuaised volatility of a price series assuming\n",
|
595 |
+
" cryptos trade 365 days a year\n",
|
596 |
+
" '''\n",
|
597 |
+
" return prices.pct_change().std()*(365**0.5)"
|
598 |
+
]
|
599 |
+
},
|
600 |
+
{
|
601 |
+
"cell_type": "code",
|
602 |
+
"execution_count": 26,
|
603 |
+
"id": "c7cfd009-dbe6-4afe-86a8-9706beb1f351",
|
604 |
+
"metadata": {},
|
605 |
+
"outputs": [
|
606 |
+
{
|
607 |
+
"name": "stdout",
|
608 |
+
"output_type": "stream",
|
609 |
+
"text": [
|
610 |
+
"CPU times: user 38.2 ms, sys: 22.1 ms, total: 60.3 ms\n",
|
611 |
+
"Wall time: 677 ms\n"
|
612 |
+
]
|
613 |
+
}
|
614 |
+
],
|
615 |
+
"source": [
|
616 |
+
"%%time \n",
|
617 |
+
"\n",
|
618 |
+
"url = \"https://api.coincap.io/v2/assets\"\n",
|
619 |
+
"\n",
|
620 |
+
"# N.B. here adampt the params dict to only request what you need\n",
|
621 |
+
"payload={'limit': '100'}\n",
|
622 |
+
"headers = {}\n",
|
623 |
+
"\n",
|
624 |
+
"response_assets = requests.request(\"GET\", url, params=payload, headers=headers)\n",
|
625 |
+
"assets_json = response_assets.json()"
|
626 |
+
]
|
627 |
+
},
|
628 |
+
{
|
629 |
+
"cell_type": "code",
|
630 |
+
"execution_count": 51,
|
631 |
+
"id": "9f8abf03-8214-498d-8cd1-778f77ff308e",
|
632 |
+
"metadata": {},
|
633 |
+
"outputs": [],
|
634 |
+
"source": [
|
635 |
+
"market_cap_dict = {}\n",
|
636 |
+
"for asset_dict in assets_json['data']:\n",
|
637 |
+
" market_cap_dict[asset_dict['id']] = int(float(asset_dict['marketCapUsd']))"
|
638 |
+
]
|
639 |
+
},
|
640 |
+
{
|
641 |
+
"cell_type": "code",
|
642 |
+
"execution_count": 53,
|
643 |
+
"id": "68a777da-401f-41cb-8485-688bd3d6df70",
|
644 |
+
"metadata": {},
|
645 |
+
"outputs": [],
|
646 |
+
"source": [
|
647 |
+
"assets = all_returns_df.columns\n",
|
648 |
+
"performance_df = pd.DataFrame(index = assets)\n",
|
649 |
+
"performance_df['Type'] = [\"Portfolio\" if x in ['Uniform','Markowitz'] else \"Coin\" for x in assets]\n",
|
650 |
+
"abs_return = all_returns_df.apply(absolute_return)\n",
|
651 |
+
"ann_vol = all_returns_df.apply(annual_vol)\n",
|
652 |
+
"drawdown_triples = all_returns_df.apply(max_drawdown)\n",
|
653 |
+
"sharpe = abs_return.divide(ann_vol)\n",
|
654 |
+
"market_caps=[]\n",
|
655 |
+
"for asset in assets:\n",
|
656 |
+
" try:\n",
|
657 |
+
" market_caps.append(int(market_cap_dict[asset]))\n",
|
658 |
+
" except:\n",
|
659 |
+
" market_caps.append(0)\n",
|
660 |
+
"performance_df['Risk adjusted return'] = sharpe *100\n",
|
661 |
+
"performance_df['Return over period'] = abs_return * 100\n",
|
662 |
+
"performance_df['Annual volatility'] = ann_vol *100\n",
|
663 |
+
"performance_df['Max loss'] = drawdown_triples.iloc[0] *100\n",
|
664 |
+
"performance_df['Market cap'] = market_caps"
|
665 |
+
]
|
666 |
+
},
|
667 |
+
{
|
668 |
+
"cell_type": "code",
|
669 |
+
"execution_count": 55,
|
670 |
+
"id": "3bf84fd6-795b-4e06-b82b-bdf4e0714224",
|
671 |
+
"metadata": {},
|
672 |
+
"outputs": [
|
673 |
+
{
|
674 |
+
"data": {
|
675 |
+
"text/html": [
|
676 |
+
"<div>\n",
|
677 |
+
"<style scoped>\n",
|
678 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
679 |
+
" vertical-align: middle;\n",
|
680 |
+
" }\n",
|
681 |
+
"\n",
|
682 |
+
" .dataframe tbody tr th {\n",
|
683 |
+
" vertical-align: top;\n",
|
684 |
+
" }\n",
|
685 |
+
"\n",
|
686 |
+
" .dataframe thead th {\n",
|
687 |
+
" text-align: right;\n",
|
688 |
+
" }\n",
|
689 |
+
"</style>\n",
|
690 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
691 |
+
" <thead>\n",
|
692 |
+
" <tr style=\"text-align: right;\">\n",
|
693 |
+
" <th></th>\n",
|
694 |
+
" <th>Type</th>\n",
|
695 |
+
" <th>Risk adjusted return</th>\n",
|
696 |
+
" <th>Return over period</th>\n",
|
697 |
+
" <th>Annual volatility</th>\n",
|
698 |
+
" <th>Max loss</th>\n",
|
699 |
+
" <th>Market cap</th>\n",
|
700 |
+
" </tr>\n",
|
701 |
+
" </thead>\n",
|
702 |
+
" <tbody>\n",
|
703 |
+
" <tr>\n",
|
704 |
+
" <th>Uniform</th>\n",
|
705 |
+
" <td>Portfolio</td>\n",
|
706 |
+
" <td>-22.24</td>\n",
|
707 |
+
" <td>-12.20</td>\n",
|
708 |
+
" <td>54.84</td>\n",
|
709 |
+
" <td>62.39</td>\n",
|
710 |
+
" <td>0</td>\n",
|
711 |
+
" </tr>\n",
|
712 |
+
" <tr>\n",
|
713 |
+
" <th>Markowitz</th>\n",
|
714 |
+
" <td>Portfolio</td>\n",
|
715 |
+
" <td>-32.44</td>\n",
|
716 |
+
" <td>-4.10</td>\n",
|
717 |
+
" <td>12.65</td>\n",
|
718 |
+
" <td>20.49</td>\n",
|
719 |
+
" <td>0</td>\n",
|
720 |
+
" </tr>\n",
|
721 |
+
" <tr>\n",
|
722 |
+
" <th>bitcoin</th>\n",
|
723 |
+
" <td>Coin</td>\n",
|
724 |
+
" <td>-32.53</td>\n",
|
725 |
+
" <td>-18.42</td>\n",
|
726 |
+
" <td>56.61</td>\n",
|
727 |
+
" <td>57.98</td>\n",
|
728 |
+
" <td>564237420636</td>\n",
|
729 |
+
" </tr>\n",
|
730 |
+
" <tr>\n",
|
731 |
+
" <th>ethereum</th>\n",
|
732 |
+
" <td>Coin</td>\n",
|
733 |
+
" <td>-18.21</td>\n",
|
734 |
+
" <td>-12.59</td>\n",
|
735 |
+
" <td>69.11</td>\n",
|
736 |
+
" <td>59.35</td>\n",
|
737 |
+
" <td>238817158476</td>\n",
|
738 |
+
" </tr>\n",
|
739 |
+
" <tr>\n",
|
740 |
+
" <th>tether</th>\n",
|
741 |
+
" <td>Coin</td>\n",
|
742 |
+
" <td>-20.42</td>\n",
|
743 |
+
" <td>-0.35</td>\n",
|
744 |
+
" <td>1.74</td>\n",
|
745 |
+
" <td>1.38</td>\n",
|
746 |
+
" <td>73268815333</td>\n",
|
747 |
+
" </tr>\n",
|
748 |
+
" </tbody>\n",
|
749 |
+
"</table>\n",
|
750 |
+
"</div>"
|
751 |
+
],
|
752 |
+
"text/plain": [
|
753 |
+
" Type Risk adjusted return Return over period \\\n",
|
754 |
+
"Uniform Portfolio -22.24 -12.20 \n",
|
755 |
+
"Markowitz Portfolio -32.44 -4.10 \n",
|
756 |
+
"bitcoin Coin -32.53 -18.42 \n",
|
757 |
+
"ethereum Coin -18.21 -12.59 \n",
|
758 |
+
"tether Coin -20.42 -0.35 \n",
|
759 |
+
"\n",
|
760 |
+
" Annual volatility Max loss Market cap \n",
|
761 |
+
"Uniform 54.84 62.39 0 \n",
|
762 |
+
"Markowitz 12.65 20.49 0 \n",
|
763 |
+
"bitcoin 56.61 57.98 564237420636 \n",
|
764 |
+
"ethereum 69.11 59.35 238817158476 \n",
|
765 |
+
"tether 1.74 1.38 73268815333 "
|
766 |
+
]
|
767 |
+
},
|
768 |
+
"execution_count": 55,
|
769 |
+
"metadata": {},
|
770 |
+
"output_type": "execute_result"
|
771 |
+
}
|
772 |
+
],
|
773 |
+
"source": [
|
774 |
+
"performance_df.round(2).head()\n",
|
775 |
+
" "
|
776 |
+
]
|
777 |
+
},
|
778 |
+
{
|
779 |
+
"cell_type": "code",
|
780 |
+
"execution_count": 48,
|
781 |
+
"id": "62360da2-742f-418d-b071-2f9256855341",
|
782 |
+
"metadata": {},
|
783 |
+
"outputs": [],
|
784 |
+
"source": [
|
785 |
+
"market_caps=[]\n",
|
786 |
+
"for asset in assets:\n",
|
787 |
+
" try:\n",
|
788 |
+
" market_caps.append(int(market_cap_dict[asset]))\n",
|
789 |
+
" except:\n",
|
790 |
+
" market_caps.append(0)"
|
791 |
+
]
|
792 |
+
},
|
793 |
+
{
|
794 |
+
"cell_type": "code",
|
795 |
+
"execution_count": 81,
|
796 |
+
"id": "8c4e6c8b-5496-4ea7-b599-370fab33f4e1",
|
797 |
+
"metadata": {
|
798 |
+
"tags": []
|
799 |
+
},
|
800 |
+
"outputs": [
|
801 |
+
{
|
802 |
+
"ename": "ValueError",
|
803 |
+
"evalue": "['Uniform', 'bitcoin'] is not in list",
|
804 |
+
"output_type": "error",
|
805 |
+
"traceback": [
|
806 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
807 |
+
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
|
808 |
+
"\u001b[0;32m/var/folders/ff/pmf9d5156jz_pr_s8ybs3x780000gn/T/ipykernel_63357/545937437.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mlist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mall_returns_df\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcolumns\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'Uniform'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'bitcoin'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
|
809 |
+
"\u001b[0;31mValueError\u001b[0m: ['Uniform', 'bitcoin'] is not in list"
|
810 |
+
]
|
811 |
+
}
|
812 |
+
],
|
813 |
+
"source": [
|
814 |
+
"list(all_returns_df.columns).index(['Uniform','bitcoin'])"
|
815 |
+
]
|
816 |
+
},
|
817 |
+
{
|
818 |
+
"cell_type": "code",
|
819 |
+
"execution_count": 82,
|
820 |
+
"id": "4c8bdb96-620f-418d-8d76-3cb7d633168c",
|
821 |
+
"metadata": {},
|
822 |
+
"outputs": [],
|
823 |
+
"source": [
|
824 |
+
"N = [i for i in range(len(all_returns_df.columns)) if all_returns_df.columns[i] in ['Uniform','bitcoin']]"
|
825 |
+
]
|
826 |
+
},
|
827 |
+
{
|
828 |
+
"cell_type": "code",
|
829 |
+
"execution_count": 83,
|
830 |
+
"id": "f21e4471-a78d-42d9-8d44-782a36b80931",
|
831 |
+
"metadata": {},
|
832 |
+
"outputs": [
|
833 |
+
{
|
834 |
+
"data": {
|
835 |
+
"text/plain": [
|
836 |
+
"[0, 2]"
|
837 |
+
]
|
838 |
+
},
|
839 |
+
"execution_count": 83,
|
840 |
+
"metadata": {},
|
841 |
+
"output_type": "execute_result"
|
842 |
+
}
|
843 |
+
],
|
844 |
+
"source": [
|
845 |
+
"N"
|
846 |
+
]
|
847 |
+
},
|
848 |
+
{
|
849 |
+
"cell_type": "code",
|
850 |
+
"execution_count": 95,
|
851 |
+
"id": "6cce98be-4c73-4b2b-98d2-4f995e3be60f",
|
852 |
+
"metadata": {},
|
853 |
+
"outputs": [],
|
854 |
+
"source": [
|
855 |
+
"dic={'a':1, 'b':2}"
|
856 |
+
]
|
857 |
+
},
|
858 |
+
{
|
859 |
+
"cell_type": "code",
|
860 |
+
"execution_count": 98,
|
861 |
+
"id": "37c4c19f-21ec-4817-a142-cde2b767a179",
|
862 |
+
"metadata": {},
|
863 |
+
"outputs": [
|
864 |
+
{
|
865 |
+
"ename": "SyntaxError",
|
866 |
+
"evalue": "invalid syntax (1228105859.py, line 1)",
|
867 |
+
"output_type": "error",
|
868 |
+
"traceback": [
|
869 |
+
"\u001b[0;36m File \u001b[0;32m\"/var/folders/ff/pmf9d5156jz_pr_s8ybs3x780000gn/T/ipykernel_63357/1228105859.py\"\u001b[0;36m, line \u001b[0;32m1\u001b[0m\n\u001b[0;31m for temp in del dic['a']:\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n"
|
870 |
+
]
|
871 |
+
}
|
872 |
+
],
|
873 |
+
"source": [
|
874 |
+
"for temp in del dic['a']:\n",
|
875 |
+
" print('yes')"
|
876 |
+
]
|
877 |
+
},
|
878 |
+
{
|
879 |
+
"cell_type": "code",
|
880 |
+
"execution_count": 101,
|
881 |
+
"id": "72199468-c241-4550-9686-c4349c7c0734",
|
882 |
+
"metadata": {},
|
883 |
+
"outputs": [],
|
884 |
+
"source": [
|
885 |
+
"del dic['b']"
|
886 |
+
]
|
887 |
+
},
|
888 |
+
{
|
889 |
+
"cell_type": "code",
|
890 |
+
"execution_count": 104,
|
891 |
+
"id": "09e95fe9-5000-419e-be53-686ca0e88a12",
|
892 |
+
"metadata": {},
|
893 |
+
"outputs": [
|
894 |
+
{
|
895 |
+
"data": {
|
896 |
+
"text/plain": [
|
897 |
+
"False"
|
898 |
+
]
|
899 |
+
},
|
900 |
+
"execution_count": 104,
|
901 |
+
"metadata": {},
|
902 |
+
"output_type": "execute_result"
|
903 |
+
}
|
904 |
+
],
|
905 |
+
"source": [
|
906 |
+
"len(dic)!=0"
|
907 |
+
]
|
908 |
+
},
|
909 |
+
{
|
910 |
+
"cell_type": "code",
|
911 |
+
"execution_count": 108,
|
912 |
+
"id": "46b8e046-810c-4284-be8b-fb24fcf46588",
|
913 |
+
"metadata": {},
|
914 |
+
"outputs": [],
|
915 |
+
"source": [
|
916 |
+
"strategy_dict = {'Uniform': {'a':2}, 'Markowitz':{'b':3}}"
|
917 |
+
]
|
918 |
+
},
|
919 |
+
{
|
920 |
+
"cell_type": "code",
|
921 |
+
"execution_count": 113,
|
922 |
+
"id": "770e265a-d31d-43bd-a7f6-053a1f9cdcf3",
|
923 |
+
"metadata": {},
|
924 |
+
"outputs": [
|
925 |
+
{
|
926 |
+
"name": "stdout",
|
927 |
+
"output_type": "stream",
|
928 |
+
"text": [
|
929 |
+
"Uniform\n",
|
930 |
+
"Markowitz\n"
|
931 |
+
]
|
932 |
+
}
|
933 |
+
],
|
934 |
+
"source": [
|
935 |
+
"for name, weights in strategy_dict.items():\n",
|
936 |
+
" print(name)"
|
937 |
+
]
|
938 |
+
},
|
939 |
+
{
|
940 |
+
"cell_type": "code",
|
941 |
+
"execution_count": 114,
|
942 |
+
"id": "ed0aea65-24c0-48a2-a957-0b1a1a4b518d",
|
943 |
+
"metadata": {},
|
944 |
+
"outputs": [],
|
945 |
+
"source": [
|
946 |
+
"port_returns = gen_port_rtns(rebased_df, uniform_weights_dict)"
|
947 |
+
]
|
948 |
+
},
|
949 |
+
{
|
950 |
+
"cell_type": "code",
|
951 |
+
"execution_count": 115,
|
952 |
+
"id": "72ec929c-629e-4e3c-841a-9fcae6610d2c",
|
953 |
+
"metadata": {},
|
954 |
+
"outputs": [
|
955 |
+
{
|
956 |
+
"data": {
|
957 |
+
"text/plain": [
|
958 |
+
"date\n",
|
959 |
+
"2021-05-24 1.000000\n",
|
960 |
+
"2021-05-25 1.057165\n",
|
961 |
+
"2021-05-26 1.103699\n",
|
962 |
+
"2021-05-27 1.094057\n",
|
963 |
+
"2021-05-28 1.035221\n",
|
964 |
+
" ... \n",
|
965 |
+
"2022-05-13 0.858603\n",
|
966 |
+
"2022-05-14 0.842769\n",
|
967 |
+
"2022-05-15 0.863536\n",
|
968 |
+
"2022-05-16 0.868936\n",
|
969 |
+
"2022-05-17 0.878021\n",
|
970 |
+
"Length: 359, dtype: float64"
|
971 |
+
]
|
972 |
+
},
|
973 |
+
"execution_count": 115,
|
974 |
+
"metadata": {},
|
975 |
+
"output_type": "execute_result"
|
976 |
+
}
|
977 |
+
],
|
978 |
+
"source": [
|
979 |
+
"port_returns"
|
980 |
+
]
|
981 |
+
},
|
982 |
+
{
|
983 |
+
"cell_type": "code",
|
984 |
+
"execution_count": 116,
|
985 |
+
"id": "b0694bbc-9941-41a4-bf49-96d581f82907",
|
986 |
+
"metadata": {},
|
987 |
+
"outputs": [],
|
988 |
+
"source": [
|
989 |
+
"port_returns = pd.DataFrame({'Uniform': port_returns})"
|
990 |
+
]
|
991 |
+
},
|
992 |
+
{
|
993 |
+
"cell_type": "code",
|
994 |
+
"execution_count": 117,
|
995 |
+
"id": "02b1c5fd-0b51-4b4d-9bf1-f5dcf46477a1",
|
996 |
+
"metadata": {},
|
997 |
+
"outputs": [
|
998 |
+
{
|
999 |
+
"data": {
|
1000 |
+
"text/html": [
|
1001 |
+
"<div>\n",
|
1002 |
+
"<style scoped>\n",
|
1003 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
1004 |
+
" vertical-align: middle;\n",
|
1005 |
+
" }\n",
|
1006 |
+
"\n",
|
1007 |
+
" .dataframe tbody tr th {\n",
|
1008 |
+
" vertical-align: top;\n",
|
1009 |
+
" }\n",
|
1010 |
+
"\n",
|
1011 |
+
" .dataframe thead th {\n",
|
1012 |
+
" text-align: right;\n",
|
1013 |
+
" }\n",
|
1014 |
+
"</style>\n",
|
1015 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
1016 |
+
" <thead>\n",
|
1017 |
+
" <tr style=\"text-align: right;\">\n",
|
1018 |
+
" <th></th>\n",
|
1019 |
+
" <th>Uniform</th>\n",
|
1020 |
+
" </tr>\n",
|
1021 |
+
" <tr>\n",
|
1022 |
+
" <th>date</th>\n",
|
1023 |
+
" <th></th>\n",
|
1024 |
+
" </tr>\n",
|
1025 |
+
" </thead>\n",
|
1026 |
+
" <tbody>\n",
|
1027 |
+
" <tr>\n",
|
1028 |
+
" <th>2021-05-24</th>\n",
|
1029 |
+
" <td>1.000000</td>\n",
|
1030 |
+
" </tr>\n",
|
1031 |
+
" <tr>\n",
|
1032 |
+
" <th>2021-05-25</th>\n",
|
1033 |
+
" <td>1.057165</td>\n",
|
1034 |
+
" </tr>\n",
|
1035 |
+
" <tr>\n",
|
1036 |
+
" <th>2021-05-26</th>\n",
|
1037 |
+
" <td>1.103699</td>\n",
|
1038 |
+
" </tr>\n",
|
1039 |
+
" <tr>\n",
|
1040 |
+
" <th>2021-05-27</th>\n",
|
1041 |
+
" <td>1.094057</td>\n",
|
1042 |
+
" </tr>\n",
|
1043 |
+
" <tr>\n",
|
1044 |
+
" <th>2021-05-28</th>\n",
|
1045 |
+
" <td>1.035221</td>\n",
|
1046 |
+
" </tr>\n",
|
1047 |
+
" <tr>\n",
|
1048 |
+
" <th>...</th>\n",
|
1049 |
+
" <td>...</td>\n",
|
1050 |
+
" </tr>\n",
|
1051 |
+
" <tr>\n",
|
1052 |
+
" <th>2022-05-13</th>\n",
|
1053 |
+
" <td>0.858603</td>\n",
|
1054 |
+
" </tr>\n",
|
1055 |
+
" <tr>\n",
|
1056 |
+
" <th>2022-05-14</th>\n",
|
1057 |
+
" <td>0.842769</td>\n",
|
1058 |
+
" </tr>\n",
|
1059 |
+
" <tr>\n",
|
1060 |
+
" <th>2022-05-15</th>\n",
|
1061 |
+
" <td>0.863536</td>\n",
|
1062 |
+
" </tr>\n",
|
1063 |
+
" <tr>\n",
|
1064 |
+
" <th>2022-05-16</th>\n",
|
1065 |
+
" <td>0.868936</td>\n",
|
1066 |
+
" </tr>\n",
|
1067 |
+
" <tr>\n",
|
1068 |
+
" <th>2022-05-17</th>\n",
|
1069 |
+
" <td>0.878021</td>\n",
|
1070 |
+
" </tr>\n",
|
1071 |
+
" </tbody>\n",
|
1072 |
+
"</table>\n",
|
1073 |
+
"<p>359 rows × 1 columns</p>\n",
|
1074 |
+
"</div>"
|
1075 |
+
],
|
1076 |
+
"text/plain": [
|
1077 |
+
" Uniform\n",
|
1078 |
+
"date \n",
|
1079 |
+
"2021-05-24 1.000000\n",
|
1080 |
+
"2021-05-25 1.057165\n",
|
1081 |
+
"2021-05-26 1.103699\n",
|
1082 |
+
"2021-05-27 1.094057\n",
|
1083 |
+
"2021-05-28 1.035221\n",
|
1084 |
+
"... ...\n",
|
1085 |
+
"2022-05-13 0.858603\n",
|
1086 |
+
"2022-05-14 0.842769\n",
|
1087 |
+
"2022-05-15 0.863536\n",
|
1088 |
+
"2022-05-16 0.868936\n",
|
1089 |
+
"2022-05-17 0.878021\n",
|
1090 |
+
"\n",
|
1091 |
+
"[359 rows x 1 columns]"
|
1092 |
+
]
|
1093 |
+
},
|
1094 |
+
"execution_count": 117,
|
1095 |
+
"metadata": {},
|
1096 |
+
"output_type": "execute_result"
|
1097 |
+
}
|
1098 |
+
],
|
1099 |
+
"source": [
|
1100 |
+
"port_returns"
|
1101 |
+
]
|
1102 |
+
},
|
1103 |
+
{
|
1104 |
+
"cell_type": "code",
|
1105 |
+
"execution_count": 118,
|
1106 |
+
"id": "9f276fcd-b0aa-4faf-980d-3766737d4e35",
|
1107 |
+
"metadata": {},
|
1108 |
+
"outputs": [],
|
1109 |
+
"source": [
|
1110 |
+
"list1 = ['a','b', 'c']\n",
|
1111 |
+
"list2=['a','b']"
|
1112 |
+
]
|
1113 |
+
},
|
1114 |
+
{
|
1115 |
+
"cell_type": "code",
|
1116 |
+
"execution_count": 121,
|
1117 |
+
"id": "9d95a028-39f4-4bc4-b15e-1ac98c6c8242",
|
1118 |
+
"metadata": {},
|
1119 |
+
"outputs": [
|
1120 |
+
{
|
1121 |
+
"data": {
|
1122 |
+
"text/plain": [
|
1123 |
+
"['a', 'b']"
|
1124 |
+
]
|
1125 |
+
},
|
1126 |
+
"execution_count": 121,
|
1127 |
+
"metadata": {},
|
1128 |
+
"output_type": "execute_result"
|
1129 |
+
}
|
1130 |
+
],
|
1131 |
+
"source": [
|
1132 |
+
"[x for x in list1 if x in list2]"
|
1133 |
+
]
|
1134 |
+
},
|
1135 |
+
{
|
1136 |
+
"cell_type": "code",
|
1137 |
+
"execution_count": 122,
|
1138 |
+
"id": "ef331045-44ad-4ff5-8ba7-3b4584a818c5",
|
1139 |
+
"metadata": {},
|
1140 |
+
"outputs": [],
|
1141 |
+
"source": [
|
1142 |
+
"dic = {'a':1, 'b':2}"
|
1143 |
+
]
|
1144 |
+
},
|
1145 |
+
{
|
1146 |
+
"cell_type": "code",
|
1147 |
+
"execution_count": 123,
|
1148 |
+
"id": "087dc4d9-b6b5-4cd5-9c76-b227fc56e31c",
|
1149 |
+
"metadata": {},
|
1150 |
+
"outputs": [],
|
1151 |
+
"source": [
|
1152 |
+
"dic['a'] =3"
|
1153 |
+
]
|
1154 |
+
},
|
1155 |
+
{
|
1156 |
+
"cell_type": "code",
|
1157 |
+
"execution_count": 124,
|
1158 |
+
"id": "688edc89-974a-4562-81db-0bc30798c30c",
|
1159 |
+
"metadata": {},
|
1160 |
+
"outputs": [
|
1161 |
+
{
|
1162 |
+
"data": {
|
1163 |
+
"text/plain": [
|
1164 |
+
"{'a': 3, 'b': 2}"
|
1165 |
+
]
|
1166 |
+
},
|
1167 |
+
"execution_count": 124,
|
1168 |
+
"metadata": {},
|
1169 |
+
"output_type": "execute_result"
|
1170 |
+
}
|
1171 |
+
],
|
1172 |
+
"source": [
|
1173 |
+
"dic"
|
1174 |
+
]
|
1175 |
+
},
|
1176 |
+
{
|
1177 |
+
"cell_type": "code",
|
1178 |
+
"execution_count": null,
|
1179 |
+
"id": "6ef4a1cb-d075-4d09-8038-caf04767565b",
|
1180 |
+
"metadata": {},
|
1181 |
+
"outputs": [],
|
1182 |
+
"source": []
|
1183 |
+
}
|
1184 |
+
],
|
1185 |
+
"metadata": {
|
1186 |
+
"kernelspec": {
|
1187 |
+
"display_name": "Python 3 (ipykernel)",
|
1188 |
+
"language": "python",
|
1189 |
+
"name": "python3"
|
1190 |
+
},
|
1191 |
+
"language_info": {
|
1192 |
+
"codemirror_mode": {
|
1193 |
+
"name": "ipython",
|
1194 |
+
"version": 3
|
1195 |
+
},
|
1196 |
+
"file_extension": ".py",
|
1197 |
+
"mimetype": "text/x-python",
|
1198 |
+
"name": "python",
|
1199 |
+
"nbconvert_exporter": "python",
|
1200 |
+
"pygments_lexer": "ipython3",
|
1201 |
+
"version": "3.9.5"
|
1202 |
+
}
|
1203 |
+
},
|
1204 |
+
"nbformat": 4,
|
1205 |
+
"nbformat_minor": 5
|
1206 |
+
}
|
persist.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from streamlit import session_state as _state
|
2 |
+
|
3 |
+
_PERSIST_STATE_KEY = f"{__name__}_PERSIST"
|
4 |
+
|
5 |
+
|
6 |
+
def persist(key: str) -> str:
|
7 |
+
"""Mark widget state as persistent."""
|
8 |
+
if _PERSIST_STATE_KEY not in _state:
|
9 |
+
_state[_PERSIST_STATE_KEY] = set()
|
10 |
+
|
11 |
+
_state[_PERSIST_STATE_KEY].add(key)
|
12 |
+
|
13 |
+
return key
|
14 |
+
|
15 |
+
|
16 |
+
def load_widget_state():
|
17 |
+
"""Load persistent widget state."""
|
18 |
+
if _PERSIST_STATE_KEY in _state:
|
19 |
+
_state.update({
|
20 |
+
key: value
|
21 |
+
for key, value in _state.items()
|
22 |
+
if key in _state[_PERSIST_STATE_KEY]
|
23 |
+
})
|
plot_creator.py
ADDED
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import plotly.express as px
|
3 |
+
import streamlit as st
|
4 |
+
from PIL import Image
|
5 |
+
import glob
|
6 |
+
from risk_metrics import absolute_return, annual_vol, max_drawdown
|
7 |
+
from streamlit_custom_slider import st_custom_slider
|
8 |
+
|
9 |
+
@st.cache(persist=True, show_spinner=False)
|
10 |
+
def create_rebase_chart(rebased_df, num_coins):
|
11 |
+
melt_df = pd.melt(rebased_df.iloc[:,:num_coins], ignore_index=False)
|
12 |
+
melt_df.columns=['coin','price (USD)']
|
13 |
+
return melt_df
|
14 |
+
|
15 |
+
@st.cache(persist=True, show_spinner=False)
|
16 |
+
def create_chart_df(all_returns_df, portfolio, coin):
|
17 |
+
melt_df = pd.melt(all_returns_df, value_vars=[portfolio, coin], ignore_index=False)
|
18 |
+
melt_df.columns=['Asset','Value (USD)']
|
19 |
+
return melt_df
|
20 |
+
|
21 |
+
@st.cache(persist=True, show_spinner=False)
|
22 |
+
def create_comparison_df(all_returns_df, selected_assets):
|
23 |
+
selected_assets_present = [asset for asset in selected_assets if asset in list(all_returns_df.columns)]
|
24 |
+
melt_df = pd.melt(all_returns_df, value_vars=selected_assets_present, ignore_index=False)
|
25 |
+
melt_df.columns=['Asset','Value (USD)']
|
26 |
+
return melt_df
|
27 |
+
|
28 |
+
@st.cache(persist=True, show_spinner=False)
|
29 |
+
def ordered_dict(dictionary):
|
30 |
+
return {k: v for k, v in sorted(dictionary.items(), key=lambda item: item[1], reverse=True)}
|
31 |
+
|
32 |
+
# allow output mutation in this function because I'm not worried about mutation
|
33 |
+
# and i want to reduce the time it takes streamlit to check it hasn't mutated.
|
34 |
+
@st.cache(persist=True, show_spinner=False, allow_output_mutation=True)
|
35 |
+
def load_images():
|
36 |
+
image_dict = {}
|
37 |
+
for filename in glob.glob('logos/*.jpg'): #assuming all logos are png format
|
38 |
+
im=Image.open(filename)
|
39 |
+
image_dict[filename[6:][:-4]]=im
|
40 |
+
return image_dict
|
41 |
+
|
42 |
+
@st.cache(persist=True, show_spinner=False)
|
43 |
+
def gen_performance_df(all_returns_df, market_cap_dict, strategy_dict):
|
44 |
+
assets = all_returns_df.columns
|
45 |
+
performance_df = pd.DataFrame(index = assets)
|
46 |
+
performance_df['Type'] = ["Portfolio" if x in list(strategy_dict.keys()) else "Coin" for x in assets]
|
47 |
+
abs_return = all_returns_df.apply(absolute_return)
|
48 |
+
ann_vol = all_returns_df.apply(annual_vol)
|
49 |
+
drawdown_triples = all_returns_df.apply(max_drawdown)
|
50 |
+
sharpe = abs_return.divide(ann_vol)
|
51 |
+
market_caps=[]
|
52 |
+
for asset in assets:
|
53 |
+
try:
|
54 |
+
market_caps.append(int(market_cap_dict[asset]))
|
55 |
+
except:
|
56 |
+
market_caps.append(0)
|
57 |
+
performance_df['Total return %'] = abs_return * 100
|
58 |
+
performance_df['Risk / return'] = sharpe *100
|
59 |
+
performance_df['Annual vol'] = ann_vol *100
|
60 |
+
performance_df['Max loss %'] = drawdown_triples.iloc[0] *100
|
61 |
+
performance_df['Market cap $M'] = [cap/1000000 for cap in market_caps]
|
62 |
+
return performance_df
|
63 |
+
|
64 |
+
@st.cache(persist=True, show_spinner=False)
|
65 |
+
def gen_performance_ag_df(all_returns_df, market_cap_dict, strategy_dict):
|
66 |
+
assets = all_returns_df.columns
|
67 |
+
performance_df = pd.DataFrame(index=assets)
|
68 |
+
performance_df['Asset'] = assets
|
69 |
+
performance_df['Type'] = ["Portfolio" if x in list(strategy_dict.keys()) else "Coin" for x in assets]
|
70 |
+
abs_return = all_returns_df.apply(absolute_return)
|
71 |
+
ann_vol = all_returns_df.apply(annual_vol)
|
72 |
+
drawdown_triples = all_returns_df.apply(max_drawdown)
|
73 |
+
sharpe = abs_return.divide(ann_vol)
|
74 |
+
market_caps=[]
|
75 |
+
for asset in assets:
|
76 |
+
try:
|
77 |
+
market_caps.append(int(market_cap_dict[asset]))
|
78 |
+
except:
|
79 |
+
market_caps.append(0)
|
80 |
+
performance_df['Risk adjusted return %'] = sharpe *100
|
81 |
+
performance_df['Return over period %'] = abs_return * 100
|
82 |
+
performance_df['Annual volatility'] = ann_vol *100
|
83 |
+
performance_df['Max loss %'] = drawdown_triples.iloc[0] *100
|
84 |
+
performance_df['Market cap $M'] = [cap/1000000 for cap in market_caps]
|
85 |
+
return performance_df
|
86 |
+
|
87 |
+
@st.cache(persist=True, show_spinner=False)
|
88 |
+
def add_drawdown(fig, all_returns_df, selected_asset):
|
89 |
+
#calculate max drawdown
|
90 |
+
max_dd, start_idx, end_idx = max_drawdown(all_returns_df[selected_asset])
|
91 |
+
start_dd = all_returns_df.index[start_idx]
|
92 |
+
end_dd = all_returns_df.index[end_idx]
|
93 |
+
fig.add_vline(x=start_dd, line_width=1, line_color="red")
|
94 |
+
fig.add_vline(x=end_dd, line_width=1, line_color="red")
|
95 |
+
fig.add_vrect(x0=start_dd, x1=end_dd, line_width=0, fillcolor="red", opacity=0.05, annotation_text=selected_asset + " maxdd")
|
96 |
+
return fig, max_dd, start_dd, end_dd
|
97 |
+
|
98 |
+
def write_coins(non_zero_coins, weights_dict, ids2names_dict, n_cols=2):
|
99 |
+
n_coins = len(non_zero_coins)
|
100 |
+
n_rows = 1 + n_coins // int(n_cols)
|
101 |
+
|
102 |
+
rows = [st.container() for _ in range(n_rows)]
|
103 |
+
cols_per_row = [r.columns(n_cols) for r in rows]
|
104 |
+
cols = [column for row in cols_per_row for column in row]
|
105 |
+
|
106 |
+
#cols = st.columns(n_coins)
|
107 |
+
#checkboxes=[]
|
108 |
+
for i, coin_id in enumerate(non_zero_coins):
|
109 |
+
cols[i].slider(ids2names_dict[coin_id], min_value=0, max_value=100,
|
110 |
+
value=int(weights_dict[coin_id]*100), key=coin_id,
|
111 |
+
disabled=True)
|
112 |
+
|
113 |
+
def write_bespoke_coins(coin_names, n_cols=2):
|
114 |
+
n_coins = len(coin_names)
|
115 |
+
n_rows = 1 + n_coins // int(n_cols)
|
116 |
+
|
117 |
+
rows = [st.container() for _ in range(n_rows)]
|
118 |
+
cols_per_row = [r.columns(n_cols) for r in rows]
|
119 |
+
cols = [column for row in cols_per_row for column in row]
|
120 |
+
|
121 |
+
#cols = st.columns(n_coins)
|
122 |
+
#checkboxes=[]
|
123 |
+
weights_list = []
|
124 |
+
for i, coin_name in enumerate(coin_names):
|
125 |
+
weight = cols[i].slider(coin_name, min_value=0, max_value=100,
|
126 |
+
value=50, key=coin_name,
|
127 |
+
disabled=False)
|
128 |
+
weights_list.append(weight)
|
129 |
+
weights_list = [weight/sum(weights_list) for weight in weights_list]
|
130 |
+
return weights_list
|
131 |
+
|
132 |
+
|
133 |
+
def write_coins_custom(coin_names, n_cols=2):
|
134 |
+
n_coins = len(coin_names)
|
135 |
+
n_rows = 1 + n_coins // int(n_cols)
|
136 |
+
|
137 |
+
rows = [st.container() for _ in range(n_rows)]
|
138 |
+
cols_per_row = [r.columns(n_cols) for r in rows]
|
139 |
+
cols = [column for row in cols_per_row for column in row]
|
140 |
+
|
141 |
+
#cols = st.columns(n_coins)
|
142 |
+
#checkboxes=[]
|
143 |
+
weights_list = []
|
144 |
+
for i, coin_name in enumerate(coin_names):
|
145 |
+
with cols[i]:
|
146 |
+
weight = st_custom_slider(coin_name, min_value=0, max_value=100,
|
147 |
+
value=50, key=coin_name)
|
148 |
+
weights_list.append(weight)
|
149 |
+
weights_list = [weight/sum(weights_list) for weight in weights_list]
|
150 |
+
return weights_list
|
151 |
+
|
152 |
+
@st.cache(persist=True, show_spinner=False)
|
153 |
+
def get_pre_selected_idx(assets, pre_selected):
|
154 |
+
return [i for i in range(len(assets)) if assets[i] in pre_selected]
|
port_creator.py
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import streamlit as st
|
3 |
+
from pypfopt import EfficientFrontier
|
4 |
+
from pypfopt import risk_models
|
5 |
+
from pypfopt import expected_returns
|
6 |
+
from datetime import date, timedelta
|
7 |
+
|
8 |
+
@st.cache(persist=True, show_spinner=False)
|
9 |
+
def uniform(returns_df, num_coins, start_date, end_date):
|
10 |
+
# THERE IS AN ERROR
|
11 |
+
# Need to change this from num_coins being a number to investment_set being
|
12 |
+
# a list of assets available. otherwise there could be assets in your
|
13 |
+
# portfolio that are not included in your investment set graph
|
14 |
+
'''
|
15 |
+
A function to return a uniform distribution of weights across all assets with
|
16 |
+
a full returns history (no NaN values) between start_date and end_date.
|
17 |
+
|
18 |
+
Returns:
|
19 |
+
weights: a vector of weights of dimension num_coins.
|
20 |
+
investment_cols: a vector of column names for coins with full histories.
|
21 |
+
'''
|
22 |
+
investment_df = returns_df[start_date:end_date]
|
23 |
+
investment_df.dropna(axis=1, inplace=True) # drop cols with any NaN values
|
24 |
+
investment_cols = investment_df.columns[0:num_coins]
|
25 |
+
weights = [1/num_coins for _ in range(num_coins)]
|
26 |
+
return weights, investment_cols
|
27 |
+
|
28 |
+
@st.cache(persist=True, show_spinner=False)
|
29 |
+
def markowitz(returns_df):
|
30 |
+
pass
|
31 |
+
|
32 |
+
@st.cache(persist=True, show_spinner=False, allow_output_mutation=True)
|
33 |
+
def create_port_rtns(returns_df, weights, investment_cols, start_date, end_date):
|
34 |
+
investment_df = returns_df[investment_cols]
|
35 |
+
investment_df[start_date:start_date]=0
|
36 |
+
rebased_df = (1 + investment_df[start_date:end_date]).cumprod()
|
37 |
+
port_returns = rebased_df.dot(weights)
|
38 |
+
port_returns.index.name = 'date'
|
39 |
+
port_returns.name = 'price (USD)'
|
40 |
+
return port_returns
|
41 |
+
|
42 |
+
@st.cache(persist=True, show_spinner=False)
|
43 |
+
def markowitz_weights(histories_df,start_port_date,investment_cols, analysis_days=365):
|
44 |
+
start_analysis_date = start_port_date - timedelta(analysis_days)
|
45 |
+
analysis_df = histories_df[start_analysis_date:start_port_date][investment_cols]
|
46 |
+
|
47 |
+
# Calculate expected returns and sample covariance
|
48 |
+
mu = expected_returns.mean_historical_return(analysis_df)
|
49 |
+
S = risk_models.sample_cov(analysis_df)
|
50 |
+
# Optimize for maximal Sharpe ratio
|
51 |
+
attempts=0
|
52 |
+
while attempts < 50:
|
53 |
+
try:
|
54 |
+
ef = EfficientFrontier(mu, S, weight_bounds=(0, 1))
|
55 |
+
ef.max_sharpe()
|
56 |
+
break
|
57 |
+
except Exception as e:
|
58 |
+
attempts += 1
|
59 |
+
try:
|
60 |
+
cleaned_weights = ef.clean_weights()
|
61 |
+
except Exception as e:
|
62 |
+
print("Could not find optimal solution, try changing optimisation constraints or investment set")
|
63 |
+
return cleaned_weights
|
64 |
+
|
65 |
+
@st.cache(persist=True, show_spinner=False)
|
66 |
+
def create_weights_df(weights_dict, strategy):
|
67 |
+
return pd.DataFrame({
|
68 |
+
'strategy': strategy,
|
69 |
+
'assets': list(weights_dict.keys()),
|
70 |
+
'weights': list(weights_dict.values())
|
71 |
+
})
|
72 |
+
|
73 |
+
@st.cache(persist=True, show_spinner=False)
|
74 |
+
def ids_with_histories(histories_df, start_date, end_date):
|
75 |
+
investment_df = histories_df[start_date:end_date]
|
76 |
+
investment_df.dropna(axis=1, inplace=True) # drop cols with any NaN values
|
77 |
+
return investment_df.columns
|
78 |
+
|
79 |
+
@st.cache(persist=True, show_spinner=False)
|
80 |
+
def uniform_weights_dict(ids_with_histories):
|
81 |
+
weight = 1/len(ids_with_histories)
|
82 |
+
uniform_weights_dict = {}
|
83 |
+
for id in ids_with_histories:
|
84 |
+
uniform_weights_dict[id] = weight
|
85 |
+
return uniform_weights_dict
|
86 |
+
|
87 |
+
@st.cache(persist=True, show_spinner=False)
|
88 |
+
def markowitz_weights_dict(histories_df,start_port_date,ids_with_histories, analysis_days=365):
|
89 |
+
start_analysis_date = start_port_date - timedelta(analysis_days)
|
90 |
+
analysis_df = histories_df[start_analysis_date:start_port_date][ids_with_histories]
|
91 |
+
|
92 |
+
# Calculate expected returns and sample covariance
|
93 |
+
mu = expected_returns.mean_historical_return(analysis_df)
|
94 |
+
S = risk_models.sample_cov(analysis_df)
|
95 |
+
# Optimize for maximal Sharpe ratio
|
96 |
+
attempts=0
|
97 |
+
while attempts < 10:
|
98 |
+
try:
|
99 |
+
ef = EfficientFrontier(mu, S, weight_bounds=(0, 1))
|
100 |
+
ef.max_sharpe()
|
101 |
+
break
|
102 |
+
except Exception as e:
|
103 |
+
attempts += 1
|
104 |
+
try:
|
105 |
+
cleaned_weights = ef.clean_weights()
|
106 |
+
except Exception as e:
|
107 |
+
print("Could not find optimal solution, try changing optimisation constraints or investment set")
|
108 |
+
return {k: v for k, v in sorted(cleaned_weights.items(), key=lambda item: item[1], reverse=True)}
|
109 |
+
#return cleaned_weights
|
110 |
+
|
111 |
+
@st.cache(persist=True, show_spinner=False)
|
112 |
+
def gen_port_rtns(rebased_df, weights_dict):
|
113 |
+
new_weights_dict = {k: v for k, v in weights_dict.items() if k in rebased_df.columns}
|
114 |
+
new_weights_dict = {k: v/sum(new_weights_dict.values()) for k, v in new_weights_dict.items()}
|
115 |
+
return rebased_df[list(new_weights_dict.keys())].dot(list(new_weights_dict.values()))
|
116 |
+
#return rebased_df[list(weights_dict.keys())].dot(list(weights_dict.values()))
|
117 |
+
|
118 |
+
@st.cache(persist=True, show_spinner=False)
|
119 |
+
def gen_all_returns(rebased_df, ids_with_histories, strategy_dict):
|
120 |
+
'''
|
121 |
+
A function to generate returns for all portfolios and all coins with full
|
122 |
+
histories over the backtest period, rebased to the start of the backtest
|
123 |
+
period.
|
124 |
+
'''
|
125 |
+
port_returns = gen_port_rtns(rebased_df, strategy_dict['Uniform'])
|
126 |
+
port_returns = pd.DataFrame({'Uniform': port_returns})
|
127 |
+
temp_dict = {k: v for k, v in strategy_dict.items() if k != 'Uniform'}
|
128 |
+
if len(temp_dict)!=0:
|
129 |
+
for name, weights in temp_dict.items():
|
130 |
+
temp_returns = gen_port_rtns(rebased_df, weights)
|
131 |
+
temp_returns.name = name
|
132 |
+
port_returns = port_returns.join(temp_returns)
|
133 |
+
return port_returns.join(rebased_df[ids_with_histories])
|
134 |
+
|
135 |
+
#uniform_returns = gen_port_rtns(rebased_df, uniform_weights_dict)
|
136 |
+
#uniform_returns.name = "Uniform"
|
137 |
+
#markowitz_returns = gen_port_rtns(rebased_df, markowitz_weights_dict)
|
138 |
+
#markowitz_returns.name = "Markowitz"
|
139 |
+
#port_returns = uniform_returns.to_frame().join(markowitz_returns)
|
140 |
+
#return port_returns.join(rebased_df[ids_with_histories])
|
141 |
+
|
port_viewer.py
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import json
|
3 |
+
import plotly.express as px
|
4 |
+
import pandas as pd
|
5 |
+
from datetime import date, timedelta, datetime
|
6 |
+
from risk_metrics import annual_return, absolute_return, annual_vol, max_drawdown
|
7 |
+
import numpy as np
|
8 |
+
from data_creator import create_assets, gen_symbols, create_histories_df, create_unix_dates, create_returns_df, create_rebased_df, date_range
|
9 |
+
from plot_creator import create_rebase_chart
|
10 |
+
from port_creator import uniform, create_port_rtns, markowitz_weights, create_weights_df
|
11 |
+
from risk_metrics import max_drawdown
|
12 |
+
|
13 |
+
|
14 |
+
# load start and end dates for investment analysis
|
15 |
+
lookback_years = 5
|
16 |
+
start_date = date.today() - timedelta(365)
|
17 |
+
end_date = date.today()
|
18 |
+
|
19 |
+
|
20 |
+
with st.sidebar:
|
21 |
+
investment_set = st.number_input(
|
22 |
+
"How many coins to would you like in your investment set?",
|
23 |
+
min_value=1,
|
24 |
+
max_value=50,
|
25 |
+
value=10,
|
26 |
+
help="Coins will be added to your investment set in order of largest market cap"
|
27 |
+
#("Top 5 coins", "Top 10 coins")
|
28 |
+
)
|
29 |
+
portfolio_type = st.selectbox(
|
30 |
+
'Select portfolio strategy',
|
31 |
+
('Uniform', 'Markowitz'),
|
32 |
+
help='''
|
33 |
+
Uniform: An equal propotion of your initial investment is allocated to each
|
34 |
+
asset in the investment set (provided the asset existed at the start date of
|
35 |
+
your investment period). \n
|
36 |
+
Markowitz: Your initial investment is allocated to each asset in the
|
37 |
+
investment set to achieve the "market portfolio" using a risk-variance
|
38 |
+
analysis (provided the asset existed at the start date of your investment
|
39 |
+
period).
|
40 |
+
'''
|
41 |
+
)
|
42 |
+
|
43 |
+
|
44 |
+
# Pull down histories from coincap, and create dataframes for historic prices,
|
45 |
+
# returns and rebased cumulative price; histories_df, returns_df, and
|
46 |
+
# rebased_df, respectively.
|
47 |
+
# All fo the functions in the block below have been decorated with st.cache()
|
48 |
+
# and so will only be re-run if their arguments, or their underlying code, are
|
49 |
+
# changed
|
50 |
+
assets_json = create_assets(total_coins=100)
|
51 |
+
symbols, names, coin_ids = gen_symbols(assets_json)
|
52 |
+
start_unix, end_unix = create_unix_dates(today=date.today(), lookback_years=lookback_years)
|
53 |
+
histories_df = create_histories_df(coin_ids, start_unix, end_unix)
|
54 |
+
returns_df = create_returns_df(histories_df)
|
55 |
+
rebased_df = create_rebased_df(returns_df, start_date=start_date, end_date=end_date)
|
56 |
+
|
57 |
+
if 'rebased_df' not in st.session_state:
|
58 |
+
st.session_state.rebased_df = rebased_df
|
59 |
+
|
60 |
+
#def adjust_rebased(returns_df, start_date, end_date):
|
61 |
+
def adjust_rebased():
|
62 |
+
st.session_state.rebased_df = create_rebased_df(returns_df,
|
63 |
+
start_date = st.session_state.myslider[0],
|
64 |
+
end_date=st.session_state.myslider[1])
|
65 |
+
|
66 |
+
|
67 |
+
# Draw rebased graph
|
68 |
+
melt_df = create_rebase_chart(st.session_state.rebased_df, num_coins=investment_set)
|
69 |
+
fig = px.line(melt_df, x=melt_df.index, y='price (USD)', color='coin')
|
70 |
+
|
71 |
+
with st.expander('Quick explantion', expanded = True):
|
72 |
+
st.subheader("What's this all about then, eh?")
|
73 |
+
st.write('''
|
74 |
+
The app allows you to construct portfolios of crypto currencies and to
|
75 |
+
backtest their historic performance.
|
76 |
+
|
77 |
+
You can select how many coins you would like in your investment set using the
|
78 |
+
dropdown box in the sidebar.
|
79 |
+
|
80 |
+
You can select from two different portfolio constructions
|
81 |
+
strategies using the dropdown box in the sidebar:
|
82 |
+
|
83 |
+
- Uniform - An equal propotion of your initial investment is allocated to each coin.
|
84 |
+
- Markowitz - Your initial investment is allocated to each coin to achieve the portfolio with the highest sharpe ratio in the 365 day period prior to the investment start date.
|
85 |
+
|
86 |
+
You can adjust the date range for the portfolio backtest using the slider widget below.
|
87 |
+
|
88 |
+
If you would like to see the performance of the individual coins in your investment set
|
89 |
+
over the backtest period click the + icon in the Coin view expander.
|
90 |
+
|
91 |
+
To collapse this expander click the - icon at the top right.
|
92 |
+
|
93 |
+
''')
|
94 |
+
|
95 |
+
# Add select slider to allow
|
96 |
+
date_list = date_range(end_date,lookback_years-1)
|
97 |
+
start_port_date, end_port_date = st.select_slider(
|
98 |
+
'Select date range for portolio backtest',
|
99 |
+
key="myslider",
|
100 |
+
options=date_list,
|
101 |
+
value=(date.today() - timedelta(365), date.today()),
|
102 |
+
on_change=adjust_rebased
|
103 |
+
)
|
104 |
+
|
105 |
+
with st.expander("Coin view", expanded=False):
|
106 |
+
st.subheader('Individual coin performance')
|
107 |
+
st.write(fig)
|
108 |
+
|
109 |
+
uniform_weights, investment_cols = uniform(returns_df, num_coins=investment_set,
|
110 |
+
start_date=start_port_date, end_date=end_port_date)
|
111 |
+
|
112 |
+
uniform_weights_dict = {}
|
113 |
+
for i, coin in enumerate(investment_cols):
|
114 |
+
uniform_weights_dict[coin] = uniform_weights[i]
|
115 |
+
markowitz_weights_dict = markowitz_weights(histories_df,start_port_date,investment_cols, analysis_days=365)
|
116 |
+
|
117 |
+
uniform_returns = create_port_rtns(returns_df, uniform_weights, investment_cols, start_port_date, end_port_date)
|
118 |
+
markotwitz_returns = create_port_rtns(returns_df, list(markowitz_weights_dict.values()), investment_cols, start_port_date, end_port_date)
|
119 |
+
returns_dict = {'Uniform': uniform_returns, 'Markowitz':markotwitz_returns}
|
120 |
+
strategy_dict = {'Uniform': uniform_weights_dict, 'Markowitz':markowitz_weights_dict}
|
121 |
+
port_return = returns_dict[portfolio_type]
|
122 |
+
|
123 |
+
#calculate max drawdown
|
124 |
+
max_dd, start_idx, end_idx = max_drawdown(port_return)
|
125 |
+
start_dd = port_return.index[start_idx]
|
126 |
+
end_dd = port_return.index[end_idx]
|
127 |
+
|
128 |
+
|
129 |
+
port_fig = px.line(port_return, x=port_return.index, y='price (USD)')
|
130 |
+
port_fig.add_vline(x=start_dd, line_width=1, line_color="red")
|
131 |
+
port_fig.add_vline(x=end_dd, line_width=1, line_color="red")
|
132 |
+
port_fig.add_vrect(x0=start_dd, x1=end_dd, line_width=0, fillcolor="red", opacity=0.05, annotation_text="max drawdown ")
|
133 |
+
st.subheader("{} portfolio performance".format(portfolio_type))
|
134 |
+
|
135 |
+
weights_df = create_weights_df(strategy_dict[portfolio_type], portfolio_type)
|
136 |
+
|
137 |
+
bar_fig = px.bar(weights_df, x="strategy", y="weights", color="assets", width=200)
|
138 |
+
bar_fig.update_layout(showlegend=False, xaxis={'visible': False}, )
|
139 |
+
|
140 |
+
|
141 |
+
cols = st.columns([8,1])
|
142 |
+
cols[0].write(port_fig)
|
143 |
+
cols[1].write(bar_fig)
|
144 |
+
|
145 |
+
cols = st.columns([1,3])
|
146 |
+
outlay = cols[0].number_input('Initial $ amount', min_value=0, value=1000,
|
147 |
+
step=1)
|
148 |
+
final_amount = outlay*port_return[-1]
|
149 |
+
max_loss=outlay*max_dd
|
150 |
+
|
151 |
+
cols[1].write('''For an initial investment of **${:,}**\n
|
152 |
+
|
153 |
+
You would have ended up with **${:,}** \n
|
154 |
+
|
155 |
+
You would have suffered a maximum loss of **{:.0f}%** of your portfolio value
|
156 |
+
between **{}** and **{}**'''.format(
|
157 |
+
outlay, int(final_amount), max_dd*100, start_dd, end_dd))
|
158 |
+
|
159 |
+
|