eswardivi commited on
Commit
d27e5fe
·
1 Parent(s): 609b5ae

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +88 -0
app.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import matplotlib
3
+
4
+ matplotlib.use("Agg")
5
+ import matplotlib.pyplot as plt
6
+ from sklearn import linear_model
7
+ import gradio as gr
8
+
9
+ np.random.seed(0)
10
+
11
+
12
+ def plot_it(X_train_x, X_train_y, Y_train_x, Y_train_y, X_test_x, X_test_y, alpha):
13
+ X_train = np.array([[X_train_x, X_train_y]]).T
14
+ y_train = [Y_train_x, Y_train_y]
15
+ X_test = np.array([[X_test_x, X_test_y]]).T
16
+
17
+ classifiers = dict(
18
+ ols=linear_model.LinearRegression(), ridge=linear_model.Ridge(alpha=alpha)
19
+ )
20
+
21
+ fig, axs = plt.subplots(ncols=len(classifiers), figsize=(8, 4))
22
+
23
+ for i, (name, clf) in enumerate(classifiers.items()):
24
+ ax = axs[i]
25
+
26
+ for _ in range(6):
27
+ this_X = 0.1 * np.random.normal(size=(2, 1)) + X_train
28
+ clf.fit(this_X, y_train)
29
+
30
+ ax.plot(X_test, clf.predict(X_test), color="gray")
31
+ ax.scatter(this_X, y_train, s=3, c="gray", marker="o", zorder=10)
32
+
33
+ clf.fit(X_train, y_train)
34
+ ax.plot(X_test, clf.predict(X_test), linewidth=2, color="blue")
35
+ ax.scatter(X_train, y_train, s=30, c="red", marker="+", zorder=10)
36
+
37
+ ax.set_title(name)
38
+ ax.set_xlim(0, 2)
39
+ ax.set_ylim((0, 1.6))
40
+ ax.set_xlabel("X")
41
+ ax.set_ylabel("y")
42
+
43
+ return fig
44
+
45
+
46
+ with gr.Blocks() as demo:
47
+ gr.Markdown("# Ordinary Least Squares and Ridge Regression Variance")
48
+ gr.Markdown(
49
+ "This interactive demo is based on the [Ordinary Least Squares and Ridge Regression Variance](https://scikit-learn.org/stable/auto_examples/linear_model/plot_ols_ridge_variance.html) shows how to use linear regression with OLS and ridge regression, and compare the variance of the coefficients. It generates a synthetic dataset with a small number of features and a large number of samples, fits both models to the data, and plots the variance of the coefficients for each model. It demonstrates that ridge regression can reduce the variance of coefficients when there is multicollinearity between the features, making it a useful tool in certain regression scenarios."
50
+ )
51
+
52
+ gr.Markdown("Select Training points For X_train and Y_train")
53
+
54
+ with gr.Row():
55
+ with gr.Column():
56
+ X_train_x = gr.Slider(
57
+ value=0.5, minimum=0, maximum=100, step=0.1, label="X_train_x"
58
+ )
59
+ X_train_y = gr.Slider(
60
+ value=1, minimum=0, maximum=100, step=0.1, label="X_train_y"
61
+ )
62
+ with gr.Column():
63
+ Y_train_x = gr.Slider(
64
+ value=0.5, minimum=0, maximum=100, step=0.1, label="Y_train_x"
65
+ )
66
+ Y_train_y = gr.Slider(
67
+ value=1, minimum=0, maximum=100, step=0.1, label="Y_train_y"
68
+ )
69
+ gr.Markdown("X_test")
70
+ with gr.Row():
71
+ X_test_x = gr.Slider(
72
+ value=0, minimum=0, maximum=100, step=0.1, label="X_test_x"
73
+ )
74
+ X_test_y = gr.Slider(
75
+ value=2, minimum=0, maximum=100, step=0.1, label="X_test_y"
76
+ )
77
+
78
+ gr.Markdown("Select Classifier parameters")
79
+ alpha = gr.Slider(value=0.5, minimum=0, maximum=100, step=0.1, label="alpha")
80
+
81
+ gr.Button("Plot").click(
82
+ plot_it,
83
+ inputs=[X_train_x, X_train_y, Y_train_x, Y_train_y, X_test_x, X_test_y, alpha],
84
+ outputs=gr.Plot(),
85
+ )
86
+
87
+
88
+ demo.launch()