Invicto69 commited on
Commit
98d3a70
·
verified ·
1 Parent(s): edba830

Update indicators.py

Browse files
Files changed (1) hide show
  1. indicators.py +629 -252
indicators.py CHANGED
@@ -1,252 +1,629 @@
1
- import pandas as pd
2
- import numpy as np
3
- import plotly.graph_objects as go
4
- from plotly.subplots import make_subplots
5
-
6
- class SMC:
7
- def __init__(self, data, swing_hl_window_sz=10):
8
- self.data = data
9
- self.data['Date'] = self.data.index.to_series()
10
- self.swing_hl_window_sz = swing_hl_window_sz
11
- self.order_blocks = self.order_block()
12
-
13
- def backtest_buy_signal(self):
14
- bull_ob = self.order_blocks[(self.order_blocks['OB']==1) & (self.order_blocks['MitigatedIndex']!=0)]
15
- arr = np.zeros(len(self.data))
16
- arr[bull_ob['MitigatedIndex'].apply(lambda x: int(x))] = 1
17
- return arr
18
-
19
- def backtest_sell_signal(self):
20
- bear_ob = self.order_blocks[(self.order_blocks['OB'] == -1) & (self.order_blocks['MitigatedIndex'] != 0)]
21
- arr = np.zeros(len(self.data))
22
- arr[bear_ob['MitigatedIndex'].apply(lambda x: int(x))] = -1
23
- return arr
24
-
25
- def swing_highs_lows(self, window_size):
26
- l = self.data['Low'].reset_index(drop=True)
27
- h = self.data['High'].reset_index(drop=True)
28
- swing_highs = (h.rolling(window_size, center=True).max() / h == 1.)
29
- swing_lows = (l.rolling(window_size, center=True).min() / l == 1.)
30
- return pd.DataFrame({'Date':self.data.index.to_series(), 'highs':swing_highs.values, 'lows':swing_lows.values})
31
-
32
- def fvg(self):
33
- """
34
- FVG - Fair Value Gap
35
- A fair value gap is when the previous high is lower than the next low if the current candle is bullish.
36
- Or when the previous low is higher than the next high if the current candle is bearish.
37
-
38
- parameters:
39
-
40
- returns:
41
- FVG = 1 if bullish fair value gap, -1 if bearish fair value gap
42
- Top = the top of the fair value gap
43
- Bottom = the bottom of the fair value gap
44
- MitigatedIndex = the index of the candle that mitigated the fair value gap
45
- """
46
-
47
- fvg = np.where(
48
- (
49
- (self.data["High"].shift(1) < self.data["Low"].shift(-1))
50
- & (self.data["Close"] > self.data["Open"])
51
- )
52
- | (
53
- (self.data["Low"].shift(1) > self.data["High"].shift(-1))
54
- & (self.data["Close"] < self.data["Open"])
55
- ),
56
- np.where(self.data["Close"] > self.data["Open"], 1, -1),
57
- np.nan,
58
- )
59
-
60
- top = np.where(
61
- ~np.isnan(fvg),
62
- np.where(
63
- self.data["Close"] > self.data["Open"],
64
- self.data["Low"].shift(-1),
65
- self.data["Low"].shift(1),
66
- ),
67
- np.nan,
68
- )
69
-
70
- bottom = np.where(
71
- ~np.isnan(fvg),
72
- np.where(
73
- self.data["Close"] > self.data["Open"],
74
- self.data["High"].shift(1),
75
- self.data["High"].shift(-1),
76
- ),
77
- np.nan,
78
- )
79
-
80
- mitigated_index = np.zeros(len(self.data), dtype=np.int32)
81
- for i in np.where(~np.isnan(fvg))[0]:
82
- mask = np.zeros(len(self.data), dtype=np.bool_)
83
- if fvg[i] == 1:
84
- mask = self.data["Low"][i + 2:] <= top[i]
85
- elif fvg[i] == -1:
86
- mask = self.data["High"][i + 2:] >= bottom[i]
87
- if np.any(mask):
88
- j = np.argmax(mask) + i + 2
89
- mitigated_index[i] = j
90
-
91
- mitigated_index = np.where(np.isnan(fvg), np.nan, mitigated_index)
92
-
93
- return pd.concat(
94
- [
95
- pd.Series(fvg.flatten(), name="FVG"),
96
- pd.Series(top.flatten(), name="Top"),
97
- pd.Series(bottom.flatten(), name="Bottom"),
98
- pd.Series(mitigated_index.flatten(), name="MitigatedIndex"),
99
- ],
100
- axis=1,
101
- )
102
-
103
- def order_block(self, imb_perc=.1, join_consecutive=True):
104
- hl = self.swing_highs_lows(self.swing_hl_window_sz)
105
-
106
- ob = np.where(
107
- (
108
- ((self.data["High"]*((100+imb_perc)/100)) < self.data["Low"].shift(-2))
109
- & ((hl['lows']==True) | (hl['lows'].shift(1)==True))
110
- )
111
- | (
112
- (self.data["Low"] > (self.data["High"].shift(-2)*((100+imb_perc)/100)))
113
- & ((hl['highs']==True) | (hl['highs'].shift(1)==True))
114
- ),
115
- np.where(((hl['lows']==True) | (hl['lows'].shift(1)==True)), 1, -1),
116
- np.nan,
117
- )
118
-
119
- # print(ob)
120
-
121
- top = np.where(
122
- ~np.isnan(ob),
123
- np.where(
124
- self.data["Close"] > self.data["Open"],
125
- self.data["Low"].shift(-2),
126
- self.data["Low"],
127
- ),
128
- np.nan,
129
- )
130
-
131
- bottom = np.where(
132
- ~np.isnan(ob),
133
- np.where(
134
- self.data["Close"] > self.data["Open"],
135
- self.data["High"],
136
- self.data["High"].shift(-2),
137
- ),
138
- np.nan,
139
- )
140
-
141
- # if join_consecutive:
142
- # for i in range(len(ob) - 1):
143
- # if ob[i] == ob[i + 1]:
144
- # top[i + 1] = max(top[i], top[i + 1])
145
- # bottom[i + 1] = min(bottom[i], bottom[i + 1])
146
- # ob[i] = top[i] = bottom[i] = np.nan
147
-
148
- mitigated_index = np.zeros(len(self.data), dtype=np.int32)
149
- for i in np.where(~np.isnan(ob))[0]:
150
- mask = np.zeros(len(self.data), dtype=np.bool_)
151
- if ob[i] == 1:
152
- mask = self.data["Low"][i + 3:] <= top[i]
153
- elif ob[i] == -1:
154
- mask = self.data["High"][i + 3:] >= bottom[i]
155
- if np.any(mask):
156
- j = np.argmax(mask) + i + 3
157
- mitigated_index[i] = int(j)
158
- ob = ob.flatten()
159
- mitigated_index1 = np.where(np.isnan(ob), np.nan, mitigated_index)
160
-
161
- return pd.concat(
162
- [
163
- pd.Series(ob.flatten(), name="OB"),
164
- pd.Series(top.flatten(), name="Top"),
165
- pd.Series(bottom.flatten(), name="Bottom"),
166
- pd.Series(mitigated_index1.flatten(), name="MitigatedIndex"),
167
- ],
168
- axis=1,
169
- ).dropna(subset=['OB'])
170
-
171
- def plot(self, swing_hl=True, show=True):
172
- fig = make_subplots(1, 1)
173
-
174
- # plot the candle stick graph
175
- fig.add_trace(go.Candlestick(x=self.data.index.to_series(),
176
- open=self.data['Open'],
177
- high=self.data['High'],
178
- low=self.data['Low'],
179
- close=self.data['Close'],
180
- name='ohlc'))
181
-
182
- # grab first and last observations from df.date and make a continuous date range from that
183
- dt_all = pd.date_range(start=self.data['Date'].iloc[0], end=self.data['Date'].iloc[-1], freq='5min')
184
-
185
- # check which dates from your source that also accur in the continuous date range
186
- dt_obs = [d.strftime("%Y-%m-%d %H:%M:%S") for d in self.data['Date']]
187
-
188
- # isolate missing timestamps
189
- dt_breaks = [d for d in dt_all.strftime("%Y-%m-%d %H:%M:%S").tolist() if not d in dt_obs]
190
-
191
- # adjust xaxis for rangebreaks
192
- fig.update_xaxes(rangebreaks=[dict(dvalue=5 * 60 * 1000, values=dt_breaks)])
193
-
194
- print(self.order_blocks.head())
195
- print(self.order_blocks.index.to_list())
196
-
197
- ob_df = self.data.iloc[self.order_blocks.index.to_list()]
198
- # print(ob_df)
199
-
200
- fig.add_trace(go.Scatter(
201
- x=ob_df['Date'],
202
- y=ob_df['Low'],
203
- name="Order Block",
204
- mode='markers',
205
- marker_symbol='diamond-dot',
206
- marker_size=13,
207
- marker_line_width=2,
208
- # offsetgroup=0,
209
- ))
210
-
211
- if swing_hl:
212
- hl = self.swing_highs_lows(self.swing_hl_window_sz)
213
- h = hl[(hl['highs']==True)]
214
- l = hl[hl['lows']==True]
215
- # print(h)
216
- # exit(0)
217
- fig.add_trace(go.Scatter(
218
- x=h['Date'],
219
- y=self.data[self.data.Date.isin(h['Date'])]['High']*(100.1/100),
220
- mode='markers',
221
- marker_symbol="triangle-up-dot",
222
- marker_size=10,
223
- name='Swing High',
224
- # offsetgroup=2,
225
- ))
226
- fig.add_trace(go.Scatter(
227
- x=l['Date'],
228
- y=self.data[self.data.Date.isin(l['Date'])]['Low']*(99.9/100),
229
- mode='markers',
230
- marker_symbol="triangle-down-dot",
231
- marker_size=10,
232
- name='Swing Low',
233
- marker_color='red',
234
- # offsetgroup=2,
235
- ))
236
-
237
- fig.update_layout(xaxis_rangeslider_visible=False)
238
- if show:
239
- fig.show()
240
- return fig
241
-
242
-
243
- def EMA(array, n):
244
- return pd.Series(array).ewm(span=n, adjust=False).mean()
245
-
246
- if __name__ == "__main__":
247
- from data_fetcher import fetch
248
- data = fetch('ICICIBANK.NS', period='1mo', interval='15m')
249
- # data = fetch('RELIANCE.NS', period='1mo', interval='15m')
250
-
251
- # print(SMC(data).backtest_buy_signal())
252
- SMC(data).plot()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ import plotly.graph_objects as go
4
+ from plotly.subplots import make_subplots
5
+
6
+ class SMC:
7
+ def __init__(self, data, swing_hl_window_sz=10):
8
+ """
9
+ Smart Money Concept
10
+ :param data:
11
+ Should contain Open, High, Low, Close columns and 'Date' as index.
12
+ :type data: pd.DataFrame
13
+ :param swing_hl_window_sz: {int}
14
+ CHoCH Detection Period.
15
+ """
16
+ self.data = data
17
+ self.data['Date'] = self.data.index.to_series()
18
+ self.swing_hl_window_sz = swing_hl_window_sz
19
+ self.order_blocks = self.order_block()
20
+ self.swing_hl = self.swing_highs_lows_v2(self.swing_hl_window_sz)
21
+ self.structure_map = self.bos_choch(self.swing_hl)
22
+
23
+ def backtest_buy_signal_ob(self):
24
+ """
25
+ :return:
26
+ Get buy signals from order blocks mitigation index.
27
+ :rtype: np.ndarray
28
+ """
29
+ # Get only bullish order blocks which are mitigated.
30
+ bull_ob = self.order_blocks[(self.order_blocks['OB']==1) & (self.order_blocks['MitigatedIndex']!=0)]
31
+ arr = np.zeros(len(self.data))
32
+ # Mark the mitigated indices with 1.
33
+ arr[bull_ob['MitigatedIndex'].apply(lambda x: int(x))] = 1
34
+ return arr
35
+
36
+ def backtest_sell_signal_ob(self):
37
+ """
38
+ :return:
39
+ Get sell signals from order blocks mitigation index.
40
+ :rtype: np.ndarray
41
+ """
42
+ # Get only bearish order blocks which are mitigated.
43
+ bear_ob = self.order_blocks[(self.order_blocks['OB'] == -1) & (self.order_blocks['MitigatedIndex'] != 0)]
44
+ arr = np.zeros(len(self.data))
45
+ # Mark the mitigated indices with -1.
46
+ arr[bear_ob['MitigatedIndex'].apply(lambda x: int(x))] = -1
47
+ return arr
48
+
49
+ def backtest_buy_signal_structure(self):
50
+ """
51
+ :return:
52
+ Get buy signals from bullish structure broken index.
53
+ :rtype: np.ndarray
54
+ """
55
+ # Get only bullish structure.
56
+ bull_struct = self.structure_map[(self.structure_map['BOS'] == 1) | (self.structure_map['CHOCH'] == 1)]
57
+ arr = np.zeros(len(self.data))
58
+ # Mark the broken indices with 1.
59
+ arr[bull_struct['BrokenIndex'].apply(lambda x: int(x))] = 1
60
+ return arr
61
+
62
+ def backtest_sell_signal_structure(self):
63
+ """
64
+ :return:
65
+ Get buy signals from bullish structure broken index.
66
+ :rtype: np.ndarray
67
+ """
68
+ # Get only bearish structure.
69
+ bull_struct = self.structure_map[(self.structure_map['BOS'] == -1) | (self.structure_map['CHOCH'] == -1)]
70
+ arr = np.zeros(len(self.data))
71
+ # Mark the broken indices with -1.
72
+ arr[bull_struct['BrokenIndex'].apply(lambda x: int(x))] = 1
73
+ return arr
74
+
75
+ def swing_highs_lows(self, window_size):
76
+ """
77
+ Basic version of swing highs and lows. Suitable for finding swing order blocks.
78
+ :param window_size:
79
+ Window size for searching swing highs and lows
80
+ :type window_size: int
81
+ :return:
82
+ DataFrame with Date, highs(bool), lows(bool) columns
83
+ :rtype: pd.DataFrame
84
+ """
85
+ l = self.data['Low'].reset_index(drop=True)
86
+ h = self.data['High'].reset_index(drop=True)
87
+ swing_highs = (h.rolling(window_size, center=True).max() / h == 1.)
88
+ swing_lows = (l.rolling(window_size, center=True).min() / l == 1.)
89
+ return pd.DataFrame({'Date':self.data.index.to_series(), 'highs':swing_highs.values, 'lows':swing_lows.values})
90
+
91
+ def swing_highs_lows_v2(self, window_size):
92
+ """
93
+ Updated version of swing_highs_lows function. Suitable for BOS and CHoCH.
94
+ :param window_size:
95
+ Window size for searching swing highs and lows.
96
+ :type window_size: int
97
+ :return:
98
+ DataFrame with HighLow(1 for bull, -1 for bear), Level columns.
99
+ :rtype: pd.DataFrame
100
+ """
101
+ # Reversing the datapoints for .rolling() method with right to left.
102
+ l = self.data['Low'][::-1].reset_index(drop=True)
103
+ h = self.data['High'][::-1].reset_index(drop=True)
104
+ swing_highs = (h.rolling(window_size, min_periods=1).max() / h == 1.)[::-1]
105
+ swing_lows = (l.rolling(window_size, min_periods=1).min() / l == 1.)[::-1]
106
+
107
+ swing_highs.reset_index(drop=True, inplace=True)
108
+ swing_lows.reset_index(drop=True, inplace=True)
109
+
110
+ # Mark swing highs as 1 and swing lows as -1.
111
+ swings = np.where((swing_highs | swing_lows), np.where(swing_highs, 1, -1), 0)
112
+
113
+ # Filtering only one swing high between two swing lows and vice-versa.
114
+ state = 1
115
+ for i in range(1, swings.shape[0]):
116
+ if swings[i] == state or swings[i] == 0:
117
+ swings[i] = 0
118
+ else:
119
+ state *= -1
120
+
121
+ # Replace 0 with NaN.
122
+ swing_highs_lows = np.where(swings==0, np.nan, swings)
123
+
124
+ # Get positions of swing_highs_lows where elements are not np.nan
125
+ pos = np.where(~np.isnan(swing_highs_lows))[0]
126
+
127
+ # Set first position and last position of swing_highs_lows.
128
+ if len(pos) > 0:
129
+ if swing_highs_lows[pos[0]] == 1:
130
+ swing_highs_lows[0] = -1
131
+ if swing_highs_lows[pos[0]] == -1:
132
+ swing_highs_lows[0] = 1
133
+ if swing_highs_lows[pos[-1]] == -1:
134
+ swing_highs_lows[-1] = 1
135
+ if swing_highs_lows[pos[-1]] == 1:
136
+ swing_highs_lows[-1] = -1
137
+
138
+ level = np.where(
139
+ ~np.isnan(swing_highs_lows),
140
+ np.where(swing_highs_lows == 1, self.data.High, self.data.Low),
141
+ np.nan,
142
+ )
143
+
144
+ return pd.concat(
145
+ [
146
+ pd.Series(swing_highs_lows, name="HighLow"),
147
+ pd.Series(level, name="Level"),
148
+ ],
149
+ axis=1,
150
+ )
151
+
152
+ def bos_choch(self, swing_highs_lows):
153
+ """
154
+ Break of Structure and Change of Character
155
+ :param swing_highs_lows: A DataFrame which contains swing highs and lows.
156
+ Format should be same as swing_highs_lows_v2() function.
157
+ :type swing_highs_lows: pd.DataFrame
158
+ :return: A DataFrame with BOS(1 for bear, -1 for bull),
159
+ CHOCH(1 for bear, -1 for bull), Level, BrokenIndex as columns.
160
+ :rtype: pd.DataFrame
161
+ """
162
+ level_order = []
163
+ highs_lows_order = []
164
+
165
+ bos = np.zeros(len(self.data), dtype=np.int32)
166
+ choch = np.zeros(len(self.data), dtype=np.int32)
167
+ level = np.zeros(len(self.data), dtype=np.float32)
168
+
169
+ last_positions = []
170
+
171
+ for i in range(len(swing_highs_lows["HighLow"])):
172
+ if not np.isnan(swing_highs_lows["HighLow"][i]):
173
+ level_order.append(swing_highs_lows["Level"][i])
174
+ highs_lows_order.append(swing_highs_lows["HighLow"][i])
175
+ if len(level_order) >= 4:
176
+ # bullish bos
177
+ # -1
178
+ # -3 __BOS__ / \
179
+ # / \ / \
180
+ # / \ /
181
+ # \ / \ /
182
+ # \ / -2
183
+ # -4
184
+ bos[last_positions[-2]] = (
185
+ 1
186
+ if (
187
+ np.all(highs_lows_order[-4:] == [-1, 1, -1, 1])
188
+ and np.all(
189
+ level_order[-4]
190
+ < level_order[-2]
191
+ < level_order[-3]
192
+ < level_order[-1]
193
+ )
194
+ )
195
+ else 0
196
+ )
197
+ level[last_positions[-2]] = (
198
+ level_order[-3] if bos[last_positions[-2]] !=0 else 0
199
+ )
200
+
201
+ # bearish bos
202
+ # -4
203
+ # / \ -2
204
+ # / \ / \
205
+ # \ / \
206
+ # \ / \
207
+ # \ /__BOS__\ /
208
+ # -3 \ /
209
+ # -1
210
+ bos[last_positions[-2]] = (
211
+ -1
212
+ if(
213
+ np.all(highs_lows_order[-4:] == [1, -1, 1, -1])
214
+ and np.all(
215
+ level_order[-4]
216
+ > level_order[-2]
217
+ > level_order[-3]
218
+ > level_order[-1]
219
+ )
220
+ )
221
+ else bos[last_positions[-2]]
222
+ )
223
+ level[last_positions[-2]] = (
224
+ level_order[-3] if bos[last_positions[-2]] != 0 else 0
225
+ )
226
+
227
+ # bullish CHoCH
228
+ # -1
229
+ # -3 __CHoCH__ / \
230
+ # / \ / \
231
+ # / \ /
232
+ # \ / \ /
233
+ # \ / \ /
234
+ # -4 \ /
235
+ # -2
236
+ choch[last_positions[-2]] = (
237
+ 1
238
+ if (
239
+ np.all(highs_lows_order[-4:] == [-1, 1, -1, 1])
240
+ and np.all(
241
+ level_order[-1]
242
+ > level_order[-3]
243
+ > level_order[-4]
244
+ > level_order[-2]
245
+ )
246
+ )
247
+ else 0
248
+ )
249
+ level[last_positions[-2]] = (
250
+ level_order[-3]
251
+ if choch[last_positions[-2]] != 0
252
+ else level[last_positions[-2]]
253
+ )
254
+
255
+ # bearish CHoCH
256
+ # -2
257
+ # -4 / \
258
+ # / \ / \
259
+ # / \ / \
260
+ # \ / \
261
+ # \ / \
262
+ # -3__CHoCH__ \ /
263
+ # \ /
264
+ # -1
265
+ choch[last_positions[-2]] = (
266
+ -1
267
+ if (
268
+ np.all(highs_lows_order[-4:] == [1, -1, 1, -1])
269
+ and np.all(
270
+ level_order[-1]
271
+ < level_order[-3]
272
+ < level_order[-4]
273
+ < level_order[-2]
274
+ )
275
+ )
276
+ else choch[last_positions[-2]]
277
+ )
278
+ level[last_positions[-2]] = (
279
+ level_order[-3]
280
+ if choch[last_positions[-2]] != 0
281
+ else level[last_positions[-2]]
282
+ )
283
+
284
+ last_positions.append(i)
285
+
286
+ broken = np.zeros(len(self.data), dtype=np.int32)
287
+ for i in np.where(np.logical_or(bos != 0, choch != 0))[0]:
288
+ mask = np.zeros(len(self.data), dtype=np.bool_)
289
+ # if the bos is 1 then check if the candles high has gone above the level
290
+ if bos[i] == 1 or choch[i] == 1:
291
+ mask = self.data.Close[i + 2:] > level[i]
292
+ # if the bos is -1 then check if the candles low has gone below the level
293
+ elif bos[i] == -1 or choch[i] == -1:
294
+ mask = self.data.Close[i + 2:] < level[i]
295
+ if np.any(mask):
296
+ j = np.argmax(mask) + i + 2
297
+ broken[i] = j
298
+ # if there are any unbroken bos or CHoCH that started before this one and ended after this one then remove them
299
+ for k in np.where(np.logical_or(bos != 0, choch != 0))[0]:
300
+ if k < i and broken[k] >= j:
301
+ bos[k] = 0
302
+ choch[k] = 0
303
+ level[k] = 0
304
+
305
+ # remove the ones that aren't broken
306
+ for i in np.where(
307
+ np.logical_and(np.logical_or(bos != 0, choch != 0), broken == 0)
308
+ )[0]:
309
+ bos[i] = 0
310
+ choch[i] = 0
311
+ level[i] = 0
312
+
313
+ # replace all the 0s with np.nan
314
+ bos = np.where(bos != 0, bos, np.nan)
315
+ choch = np.where(choch != 0, choch, np.nan)
316
+ level = np.where(level != 0, level, np.nan)
317
+ broken = np.where(broken != 0, broken, np.nan)
318
+
319
+ bos = pd.Series(bos, name="BOS")
320
+ choch = pd.Series(choch, name="CHOCH")
321
+ level = pd.Series(level, name="Level")
322
+ broken = pd.Series(broken, name="BrokenIndex")
323
+
324
+ return pd.concat([bos, choch, level, broken], axis=1)
325
+
326
+ def fvg(self):
327
+ """
328
+ FVG - Fair Value Gap
329
+ A fair value gap is when the previous high is lower than the next low if the current candle is bullish.
330
+ Or when the previous low is higher than the next high if the current candle is bearish.
331
+
332
+ :return:\
333
+ FVG = 1 if bullish fair value gap, -1 if bearish fair value gap
334
+ Top = the top of the fair value gap
335
+ Bottom = the bottom of the fair value gap
336
+ MitigatedIndex = the index of the candle that mitigated the fair value gap
337
+ :rtype: pd.DataFrame
338
+ """
339
+
340
+ fvg = np.where(
341
+ (
342
+ (self.data["High"].shift(1) < self.data["Low"].shift(-1))
343
+ & (self.data["Close"] > self.data["Open"])
344
+ )
345
+ | (
346
+ (self.data["Low"].shift(1) > self.data["High"].shift(-1))
347
+ & (self.data["Close"] < self.data["Open"])
348
+ ),
349
+ np.where(self.data["Close"] > self.data["Open"], 1, -1),
350
+ np.nan,
351
+ )
352
+
353
+ top = np.where(
354
+ ~np.isnan(fvg),
355
+ np.where(
356
+ self.data["Close"] > self.data["Open"],
357
+ self.data["Low"].shift(-1),
358
+ self.data["Low"].shift(1),
359
+ ),
360
+ np.nan,
361
+ )
362
+
363
+ bottom = np.where(
364
+ ~np.isnan(fvg),
365
+ np.where(
366
+ self.data["Close"] > self.data["Open"],
367
+ self.data["High"].shift(1),
368
+ self.data["High"].shift(-1),
369
+ ),
370
+ np.nan,
371
+ )
372
+
373
+ mitigated_index = np.zeros(len(self.data), dtype=np.int32)
374
+ for i in np.where(~np.isnan(fvg))[0]:
375
+ mask = np.zeros(len(self.data), dtype=np.bool_)
376
+ if fvg[i] == 1:
377
+ mask = self.data["Low"][i + 2:] <= top[i]
378
+ elif fvg[i] == -1:
379
+ mask = self.data["High"][i + 2:] >= bottom[i]
380
+ if np.any(mask):
381
+ j = np.argmax(mask) + i + 2
382
+ mitigated_index[i] = j
383
+
384
+ mitigated_index = np.where(np.isnan(fvg), np.nan, mitigated_index)
385
+
386
+ return pd.concat(
387
+ [
388
+ pd.Series(fvg.flatten(), name="FVG"),
389
+ pd.Series(top.flatten(), name="Top"),
390
+ pd.Series(bottom.flatten(), name="Bottom"),
391
+ pd.Series(mitigated_index.flatten(), name="MitigatedIndex"),
392
+ ],
393
+ axis=1,
394
+ )
395
+
396
+ def order_block(self, imb_perc=.1, join_consecutive=True):
397
+ """
398
+ OB - Order Block
399
+ Order block is the presence of a chunk of market orders that results in a sudden rise or fall in the market
400
+
401
+ :return:\
402
+ OB = 1 if bullish order block, -1 if bearish order block
403
+ Top = the top of the order block
404
+ Bottom = the bottom of the order block
405
+ MitigatedIndex = the index of the candle that mitigated the order block
406
+ :rtype: pd.DataFrame
407
+ """
408
+ hl = self.swing_highs_lows(self.swing_hl_window_sz)
409
+
410
+ ob = np.where(
411
+ (
412
+ ((self.data["High"]*((100+imb_perc)/100)) < self.data["Low"].shift(-2))
413
+ & ((hl['lows']==True) | (hl['lows'].shift(1)==True))
414
+ )
415
+ | (
416
+ (self.data["Low"] > (self.data["High"].shift(-2)*((100+imb_perc)/100)))
417
+ & ((hl['highs']==True) | (hl['highs'].shift(1)==True))
418
+ ),
419
+ np.where(((hl['lows']==True) | (hl['lows'].shift(1)==True)), 1, -1),
420
+ np.nan,
421
+ )
422
+
423
+ # print(ob)
424
+
425
+ top = np.where(
426
+ ~np.isnan(ob),
427
+ np.where(
428
+ self.data["Close"] > self.data["Open"],
429
+ self.data["Low"].shift(-2),
430
+ self.data["Low"],
431
+ ),
432
+ np.nan,
433
+ )
434
+
435
+ bottom = np.where(
436
+ ~np.isnan(ob),
437
+ np.where(
438
+ self.data["Close"] > self.data["Open"],
439
+ self.data["High"],
440
+ self.data["High"].shift(-2),
441
+ ),
442
+ np.nan,
443
+ )
444
+
445
+ # if join_consecutive:
446
+ # for i in range(len(ob) - 1):
447
+ # if ob[i] == ob[i + 1]:
448
+ # top[i + 1] = max(top[i], top[i + 1])
449
+ # bottom[i + 1] = min(bottom[i], bottom[i + 1])
450
+ # ob[i] = top[i] = bottom[i] = np.nan
451
+
452
+ mitigated_index = np.zeros(len(self.data), dtype=np.int32)
453
+ for i in np.where(~np.isnan(ob))[0]:
454
+ mask = np.zeros(len(self.data), dtype=np.bool_)
455
+ if ob[i] == 1:
456
+ mask = self.data["Low"][i + 3:] <= top[i]
457
+ elif ob[i] == -1:
458
+ mask = self.data["High"][i + 3:] >= bottom[i]
459
+ if np.any(mask):
460
+ j = np.argmax(mask) + i + 3
461
+ mitigated_index[i] = int(j)
462
+ ob = ob.flatten()
463
+ mitigated_index1 = np.where(np.isnan(ob), np.nan, mitigated_index)
464
+
465
+ return pd.concat(
466
+ [
467
+ pd.Series(ob.flatten(), name="OB"),
468
+ pd.Series(top.flatten(), name="Top"),
469
+ pd.Series(bottom.flatten(), name="Bottom"),
470
+ pd.Series(mitigated_index1.flatten(), name="MitigatedIndex"),
471
+ ],
472
+ axis=1,
473
+ ).dropna(subset=['OB'])
474
+
475
+ def plot(self, order_blocks=False, swing_hl=False, swing_hl_v2=False, structure=False, show=True):
476
+ """
477
+ :param order_blocks:
478
+ :param swing_hl:
479
+ :param swing_hl_v2:
480
+ :param structure:
481
+ :param show:
482
+ :return:
483
+ """
484
+ fig = make_subplots(1, 1)
485
+
486
+ # plot the candle stick graph
487
+ fig.add_trace(go.Candlestick(x=self.data.index.to_series(),
488
+ open=self.data['Open'],
489
+ high=self.data['High'],
490
+ low=self.data['Low'],
491
+ close=self.data['Close'],
492
+ name='ohlc'))
493
+
494
+ # grab first and last observations from df.date and make a continuous date range from that
495
+ dt_all = pd.date_range(start=self.data['Date'].iloc[0], end=self.data['Date'].iloc[-1], freq='5min')
496
+
497
+ # check which dates from your source that also accur in the continuous date range
498
+ dt_obs = [d.strftime("%Y-%m-%d %H:%M:%S") for d in self.data['Date']]
499
+
500
+ # isolate missing timestamps
501
+ dt_breaks = [d for d in dt_all.strftime("%Y-%m-%d %H:%M:%S").tolist() if not d in dt_obs]
502
+
503
+ # adjust xaxis for rangebreaks
504
+ fig.update_xaxes(rangebreaks=[dict(dvalue=5 * 60 * 1000, values=dt_breaks)])
505
+
506
+ if order_blocks:
507
+ print(self.order_blocks.head())
508
+ print(self.order_blocks.index.to_list())
509
+
510
+ ob_df = self.data.iloc[self.order_blocks.index.to_list()]
511
+ # print(ob_df)
512
+
513
+ fig.add_trace(go.Scatter(
514
+ x=ob_df['Date'],
515
+ y=ob_df['Low'],
516
+ name="Order Block",
517
+ mode='markers',
518
+ marker_symbol='diamond-dot',
519
+ marker_size=13,
520
+ marker_line_width=2,
521
+ # offsetgroup=0,
522
+ ))
523
+
524
+ if swing_hl:
525
+ hl = self.swing_highs_lows(self.swing_hl_window_sz)
526
+ h = hl[(hl['highs']==True)]
527
+ l = hl[hl['lows']==True]
528
+
529
+ fig.add_trace(go.Scatter(
530
+ x=h['Date'],
531
+ y=self.data[self.data.Date.isin(h['Date'])]['High']*(100.1/100),
532
+ mode='markers',
533
+ marker_symbol="triangle-up-dot",
534
+ marker_size=10,
535
+ name='Swing High',
536
+ # offsetgroup=2,
537
+ ))
538
+ fig.add_trace(go.Scatter(
539
+ x=l['Date'],
540
+ y=self.data[self.data.Date.isin(l['Date'])]['Low']*(99.9/100),
541
+ mode='markers',
542
+ marker_symbol="triangle-down-dot",
543
+ marker_size=10,
544
+ name='Swing Low',
545
+ marker_color='red',
546
+ # offsetgroup=2,
547
+ ))
548
+
549
+ if swing_hl_v2:
550
+ hl = self.swing_hl
551
+ h = hl[hl['HighLow']==1]
552
+ l = hl[hl['HighLow']==-1]
553
+
554
+ fig.add_trace(go.Scatter(
555
+ x=self.data['Date'].iloc[h.index],
556
+ y=h['Level'],
557
+ mode='markers',
558
+ marker_symbol="triangle-up-dot",
559
+ marker_size=10,
560
+ name='Swing High',
561
+ marker_color='green',
562
+ ))
563
+ fig.add_trace(go.Scatter(
564
+ x=self.data['Date'].iloc[l.index],
565
+ y=l['Level'],
566
+ mode='markers',
567
+ marker_symbol="triangle-down-dot",
568
+ marker_size=10,
569
+ name='Swing Low',
570
+ marker_color='red',
571
+ ))
572
+
573
+ if structure:
574
+ struct = self.structure_map
575
+ struct.dropna(subset=['Level'], inplace=True)
576
+
577
+ for i in range(len(struct)):
578
+ x0 = self.data['Date'].iloc[struct.index[i]]
579
+ x1 = self.data['Date'].iloc[int(struct['BrokenIndex'].iloc[i])]
580
+ y = struct['Level'].iloc[i]
581
+ label = "BOS" if np.isnan(struct['CHOCH'].iloc[i]) else "CHOCH"
582
+ direction = struct[label].iloc[i]
583
+
584
+ # Add scatter trace for the line
585
+ fig.add_trace(go.Scatter(
586
+ x=[x0, x1], # x-coordinates of the line
587
+ y=[y, y], # y-coordinates of the line
588
+ mode="lines+text", # Line and optional label
589
+ line=dict(color="blue" if label=="BOS" else "orange"), # Customize line color
590
+ text=[None, label], # Add label only at one end
591
+ textposition="top left" if direction==1 else "bottom left", # Adjust label position
592
+ name=label, # Legend entry for this line
593
+ showlegend=False
594
+ ))
595
+
596
+ fig.update_layout(xaxis_rangeslider_visible=False)
597
+ if show:
598
+ fig.show()
599
+ return fig
600
+
601
+
602
+ def EMA(array, n):
603
+ """
604
+ :param array: price of the stock
605
+ :param n: window size
606
+ :type n: int
607
+ :return: Exponential moving average
608
+ :rtype: pd.Series
609
+ """
610
+ return pd.Series(array).ewm(span=n, adjust=False).mean()
611
+
612
+ if __name__ == "__main__":
613
+ from data_fetcher import fetch
614
+ data = fetch('ICICIBANK.NS', period='1mo', interval='15m')
615
+ data = fetch('RELIANCE.NS', period='1mo', interval='15m')
616
+ data['Date'] = data.index.to_series()
617
+ filter = pd.to_datetime('2024-12-17 09:50:00.0000000011',
618
+ format='%Y-%m-%d %H:%M:%S.%f')
619
+ # data = data[data['Date']<filter]
620
+ # print(SMC(data).backtest_buy_signal())
621
+ # print(SMC(data).swing_highs_lows_v3(10).to_string())
622
+ # print(data.tail())
623
+ SMC(data).plot(order_blocks=False, swing_hl=False, swing_hl_v2=True, structure=True, show=True)
624
+ # struct = SMC(data).structure_map
625
+ # print(struct)
626
+ #
627
+ # for i in range(len(data)):
628
+ # print(i, data['Date'][i], struct['BrokenIndex'].iloc[i])
629
+ # SMC(data).structure()