Jayabalambika commited on
Commit
815161b
·
1 Parent(s): 033d96d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +157 -0
app.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import time
3
+ import numpy as np
4
+ import matplotlib.pyplot as plt
5
+
6
+ from scipy.linalg import toeplitz, cholesky
7
+ from sklearn.covariance import LedoitWolf, OAS
8
+
9
+ np.random.seed(0)
10
+
11
+
12
+
13
+ def generate_plots(min_slider_samples_range,max_slider_samples_range):
14
+ # print("slider_samples_range:",slider_samples_range)
15
+ slider_samples_range =np.arange(min_slider_samples_range,max_slider_samples_range,1)
16
+ n_features = 100
17
+ repeat = 100
18
+ lw_mse = np.zeros((slider_samples_range.size, repeat))
19
+ oa_mse = np.zeros((slider_samples_range.size, repeat))
20
+ lw_shrinkage = np.zeros((slider_samples_range.size, repeat))
21
+ oa_shrinkage = np.zeros((slider_samples_range.size, repeat))
22
+
23
+
24
+ for i, n_samples in enumerate(slider_samples_range):
25
+ for j in range(repeat):
26
+ X = np.dot(np.random.normal(size=(n_samples, n_features)), coloring_matrix.T)
27
+
28
+ lw = LedoitWolf(store_precision=False, assume_centered=True)
29
+ lw.fit(X)
30
+ lw_mse[i, j] = lw.error_norm(real_cov, scaling=False)
31
+ lw_shrinkage[i, j] = lw.shrinkage_
32
+
33
+ oa = OAS(store_precision=False, assume_centered=True)
34
+ oa.fit(X)
35
+ oa_mse[i, j] = oa.error_norm(real_cov, scaling=False)
36
+ oa_shrinkage[i, j] = oa.shrinkage_
37
+ return
38
+
39
+
40
+
41
+ def plot_mse():
42
+ # plot MSE
43
+ plt.clf()
44
+ plt.subplot(2, 1, 1)
45
+ plt.errorbar(
46
+ slider_samples_range,
47
+ lw_mse.mean(1),
48
+ yerr=lw_mse.std(1),
49
+ label="Ledoit-Wolf",
50
+ color="navy",
51
+ lw=2,
52
+ )
53
+ plt.errorbar(
54
+ slider_samples_range,
55
+ oa_mse.mean(1),
56
+ yerr=oa_mse.std(1),
57
+ label="OAS",
58
+ color="darkorange",
59
+ lw=2,
60
+ )
61
+ plt.ylabel("Squared error")
62
+ plt.legend(loc="upper right")
63
+ plt.title("Comparison of covariance estimators")
64
+ plt.xlim(5, 31)
65
+ return plt
66
+
67
+
68
+ def plot_shrinkage():
69
+ # plot shrinkage coefficient
70
+ plt.subplot(2, 1, 2)
71
+ plt.errorbar(
72
+ slider_samples_range,
73
+ lw_shrinkage.mean(1),
74
+ yerr=lw_shrinkage.std(1),
75
+ label="Ledoit-Wolf",
76
+ color="navy",
77
+ lw=2,
78
+ )
79
+ plt.errorbar(
80
+ slider_samples_range,
81
+ oa_shrinkage.mean(1),
82
+ yerr=oa_shrinkage.std(1),
83
+ label="OAS",
84
+ color="darkorange",
85
+ lw=2,
86
+ )
87
+ plt.xlabel("n_samples")
88
+ plt.ylabel("Shrinkage")
89
+ plt.legend(loc="lower right")
90
+ plt.ylim(plt.ylim()[0], 1.0 + (plt.ylim()[1] - plt.ylim()[0]) / 10.0)
91
+ plt.xlim(5, 31)
92
+
93
+ # plt.show()
94
+ return plt
95
+
96
+
97
+
98
+
99
+
100
+
101
+ title = "Ledoit-Wolf vs OAS estimation"
102
+
103
+ # def greet(name):
104
+ # return "Hello " + name + "!"
105
+ with gr.Blocks(title=title, theme=gr.themes.Default(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])) as demo:
106
+ gr.Markdown(f"# {title}")
107
+
108
+ gr.Markdown(
109
+ """
110
+ The usual covariance maximum likelihood estimate can be regularized using shrinkage. Ledoit and Wolf proposed a close formula to compute the asymptotically optimal shrinkage parameter (minimizing a MSE criterion), yielding the Ledoit-Wolf covariance estimate.
111
+
112
+ Chen et al. proposed an improvement of the Ledoit-Wolf shrinkage parameter, the OAS coefficient, whose convergence is significantly better under the assumption that the data are Gaussian.
113
+
114
+ This example, inspired from Chen’s publication [1], shows a comparison of the estimated MSE of the LW and OAS methods, using Gaussian distributed data.
115
+
116
+ [1] “Shrinkage Algorithms for MMSE Covariance Estimation” Chen et al., IEEE Trans. on Sign. Proc., Volume 58, Issue 10, October 2010.
117
+ """)
118
+
119
+ n_features = 100
120
+
121
+ min_slider_samples_range = gr.Slider(6, 31, value=6, step=1, label="min_samples_range", info="Choose between 6 and 31")
122
+ max_slider_samples_range = gr.Slider(6, 31, value=31, step=1, label="max_samples_range", info="Choose between 6 and 31")
123
+
124
+
125
+
126
+ r = 0.1
127
+
128
+ real_cov = toeplitz(r ** np.arange(n_features))
129
+ coloring_matrix = cholesky(real_cov)
130
+ gr.Markdown(" **[Demo is based on sklearn docs](https://scikit-learn.org/stable/auto_examples/covariance/plot_lw_vs_oas.html)**")
131
+ # name = "hardy"
132
+ # greet_btn = gr.Button("Greet")
133
+ # output = gr.Textbox(label="Output Box")
134
+ # greet_btn.click(fn=greet, inputs=name, outputs=output)
135
+ gr.Label(value="Comparison of Covariance Estimators")
136
+ generate_plots()
137
+ #if min_slider_samples_range:
138
+
139
+ min_slider_samples_range.change(plot_mse, outputs= gr.Plot() )
140
+ max_slider_samples_range.change(plot_shrinkage, outputs= gr.Plot() )
141
+
142
+
143
+
144
+ #elif max_slider_samples_range:
145
+
146
+
147
+
148
+ # elif changed == False:
149
+ # min_slider_samples_range.change(generate_plots, inputs=[min_slider_samples_range,max_slider_samples_range], outputs= gr.Plot() )
150
+ # max_slider_samples_range.change(generate_plots, inputs=[min_slider_samples_range,max_slider_samples_range], outputs= gr.Plot() )
151
+ # changed = True
152
+
153
+ # else:
154
+ # pass
155
+
156
+
157
+ demo.launch()