Jayabalambika commited on
Commit
8649ed2
·
1 Parent(s): baa0af9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +135 -0
app.py CHANGED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import time
3
+ import numpy as np
4
+ import matplotlib.pyplot as plt
5
+
6
+ from scipy.linalg import toeplitz, cholesky
7
+ from sklearn.covariance import LedoitWolf, OAS
8
+
9
+ np.random.seed(0)
10
+
11
+
12
+
13
+ def generate_plots(min_slider_samples_range,max_slider_samples_range):
14
+ # print("slider_samples_range:",slider_samples_range)
15
+ slider_samples_range =np.arange(min_slider_samples_range,max_slider_samples_range,1)
16
+ n_features = 100
17
+ repeat = 100
18
+ lw_mse = np.zeros((slider_samples_range.size, repeat))
19
+ oa_mse = np.zeros((slider_samples_range.size, repeat))
20
+ lw_shrinkage = np.zeros((slider_samples_range.size, repeat))
21
+ oa_shrinkage = np.zeros((slider_samples_range.size, repeat))
22
+
23
+
24
+ for i, n_samples in enumerate(slider_samples_range):
25
+ for j in range(repeat):
26
+ X = np.dot(np.random.normal(size=(n_samples, n_features)), coloring_matrix.T)
27
+
28
+ lw = LedoitWolf(store_precision=False, assume_centered=True)
29
+ lw.fit(X)
30
+ lw_mse[i, j] = lw.error_norm(real_cov, scaling=False)
31
+ lw_shrinkage[i, j] = lw.shrinkage_
32
+
33
+ oa = OAS(store_precision=False, assume_centered=True)
34
+ oa.fit(X)
35
+ oa_mse[i, j] = oa.error_norm(real_cov, scaling=False)
36
+ oa_shrinkage[i, j] = oa.shrinkage_
37
+
38
+ # plot MSE
39
+ plt.subplot(2, 1, 1)
40
+ plt.errorbar(
41
+ slider_samples_range,
42
+ lw_mse.mean(1),
43
+ yerr=lw_mse.std(1),
44
+ label="Ledoit-Wolf",
45
+ color="navy",
46
+ lw=2,
47
+ )
48
+ plt.errorbar(
49
+ slider_samples_range,
50
+ oa_mse.mean(1),
51
+ yerr=oa_mse.std(1),
52
+ label="OAS",
53
+ color="darkorange",
54
+ lw=2,
55
+ )
56
+ plt.ylabel("Squared error")
57
+ plt.legend(loc="upper right")
58
+ plt.title("Comparison of covariance estimators")
59
+ plt.xlim(5, 31)
60
+
61
+ # plot shrinkage coefficient
62
+ plt.subplot(2, 1, 2)
63
+ plt.errorbar(
64
+ slider_samples_range,
65
+ lw_shrinkage.mean(1),
66
+ yerr=lw_shrinkage.std(1),
67
+ label="Ledoit-Wolf",
68
+ color="navy",
69
+ lw=2,
70
+ )
71
+ plt.errorbar(
72
+ slider_samples_range,
73
+ oa_shrinkage.mean(1),
74
+ yerr=oa_shrinkage.std(1),
75
+ label="OAS",
76
+ color="darkorange",
77
+ lw=2,
78
+ )
79
+ plt.xlabel("n_samples")
80
+ plt.ylabel("Shrinkage")
81
+ plt.legend(loc="lower right")
82
+ plt.ylim(plt.ylim()[0], 1.0 + (plt.ylim()[1] - plt.ylim()[0]) / 10.0)
83
+ plt.xlim(5, 31)
84
+
85
+ # plt.show()
86
+ return plt
87
+
88
+
89
+
90
+
91
+
92
+
93
+ title = "Ledoit-Wolf vs OAS estimation"
94
+
95
+ # def greet(name):
96
+ # return "Hello " + name + "!"
97
+ with gr.Blocks(title=title, theme=gr.themes.Default(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])) as demo:
98
+ gr.Markdown(f"# {title}")
99
+
100
+ gr.Markdown(
101
+ """
102
+ The usual covariance maximum likelihood estimate can be regularized using shrinkage. Ledoit and Wolf proposed a close formula to compute the asymptotically optimal shrinkage parameter (minimizing a MSE criterion), yielding the Ledoit-Wolf covariance estimate.
103
+
104
+ Chen et al. proposed an improvement of the Ledoit-Wolf shrinkage parameter, the OAS coefficient, whose convergence is significantly better under the assumption that the data are Gaussian.
105
+
106
+ This example, inspired from Chen’s publication [1], shows a comparison of the estimated MSE of the LW and OAS methods, using Gaussian distributed data.
107
+
108
+ [1] “Shrinkage Algorithms for MMSE Covariance Estimation” Chen et al., IEEE Trans. on Sign. Proc., Volume 58, Issue 10, October 2010.
109
+ """)
110
+
111
+ n_features = 100
112
+
113
+ min_slider_samples_range = gr.Slider(6, 31, value=6, step=1, label="min_samples_range", info="Choose between 6 and 31")
114
+ max_slider_samples_range = gr.Slider(6, 31, value=31, step=1, label="max_samples_range", info="Choose between 6 and 31")
115
+
116
+
117
+
118
+ r = 0.1
119
+ real_cov = toeplitz(r ** np.arange(n_features))
120
+ coloring_matrix = cholesky(real_cov)
121
+ gr.Markdown(" **[Demo is based on sklearn docs](https://scikit-learn.org/stable/auto_examples/covariance/plot_lw_vs_oas.html)**")
122
+ # name = "hardy"
123
+ # greet_btn = gr.Button("Greet")
124
+ # output = gr.Textbox(label="Output Box")
125
+ # greet_btn.click(fn=greet, inputs=name, outputs=output)
126
+ gr.Label(value="Comparison of Covariance Estimators")
127
+ if min_slider_samples_range is not None:
128
+ min_slider_samples_range.change(generate_plots, inputs=[min_slider_samples_range,max_slider_samples_range], outputs= gr.Plot() )
129
+ elif max_slider_samples_range is not None:
130
+ max_slider_samples_range.change(generate_plots, inputs=[min_slider_samples_range,max_slider_samples_range], outputs= gr.Plot() )
131
+ else:
132
+ pass
133
+
134
+
135
+ demo.launch()