Update description and remove max_depth adjustment
Browse files
app.py
CHANGED
@@ -14,7 +14,7 @@ def train_estimators(max_depth,n_estimators):
|
|
14 |
|
15 |
regr_1 = DecisionTreeRegressor(max_depth=4)
|
16 |
regr_2 = AdaBoostRegressor(
|
17 |
-
DecisionTreeRegressor(max_depth=
|
18 |
)
|
19 |
regr_1.fit(X, y)
|
20 |
regr_2.fit(X, y)
|
@@ -24,8 +24,8 @@ def train_estimators(max_depth,n_estimators):
|
|
24 |
|
25 |
fig, ax = plt.subplots()
|
26 |
ax.scatter(X, y, color=colors[0], label="training samples")
|
27 |
-
ax.plot(X, y_1, color=colors[1], label="Decision tree (
|
28 |
-
ax.plot(X, y_2, color=colors[2], label=f"Adaboost (
|
29 |
ax.set_xlabel("data")
|
30 |
ax.set_ylabel("target")
|
31 |
ax.legend()
|
@@ -34,14 +34,18 @@ def train_estimators(max_depth,n_estimators):
|
|
34 |
title = "Decision Tree Regression with AdaBoost"
|
35 |
with gr.Blocks(title=title) as demo:
|
36 |
gr.Markdown(f"## {title}")
|
37 |
-
gr.Markdown("
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
-
|
40 |
-
n_estimators = gr.Slider(minimum=1, maximum=300, step=1, label = "Number of Estimators")
|
41 |
|
42 |
plot = gr.Plot(label=title)
|
43 |
-
n_estimators.change(fn=train_estimators, inputs=
|
44 |
-
max_depth.change(fn=train_estimators, inputs=[max_depth,n_estimators], outputs=[plot])
|
45 |
|
46 |
demo.launch()
|
47 |
|
|
|
14 |
|
15 |
regr_1 = DecisionTreeRegressor(max_depth=4)
|
16 |
regr_2 = AdaBoostRegressor(
|
17 |
+
DecisionTreeRegressor(max_depth=4), n_estimators=n_estimators, random_state=rng
|
18 |
)
|
19 |
regr_1.fit(X, y)
|
20 |
regr_2.fit(X, y)
|
|
|
24 |
|
25 |
fig, ax = plt.subplots()
|
26 |
ax.scatter(X, y, color=colors[0], label="training samples")
|
27 |
+
ax.plot(X, y_1, color=colors[1], label=f"Decision tree (estimators=1)", linewidth=2)
|
28 |
+
ax.plot(X, y_2, color=colors[2], label=f"Adaboost (estimators={n_estimators})", linewidth=2)
|
29 |
ax.set_xlabel("data")
|
30 |
ax.set_ylabel("target")
|
31 |
ax.legend()
|
|
|
34 |
title = "Decision Tree Regression with AdaBoost"
|
35 |
with gr.Blocks(title=title) as demo:
|
36 |
gr.Markdown(f"## {title}")
|
37 |
+
gr.Markdown("""
|
38 |
+
This app demonstrates boosting of decision tree regressor using Adaboost. Boosting algorithms work by combining multiple models (weak learners) to reach the final output (strong learners).
|
39 |
+
A single decision tree trained on randomly generated regression dataset is used as baseline and compared with a boosted decision tree trained on the same dataset.
|
40 |
+
The outputs of each model are visualize together with actual data in the plot
|
41 |
+
The number of estimator used in boosted decision tree can be adjusted and the effect of this adjusment can be seen in the resulting plot.
|
42 |
+
This app is developed based on [scikit-learn example](https://scikit-learn.org/stable/auto_examples/ensemble/plot_adaboost_regression.html#sphx-glr-auto-examples-ensemble-plot-adaboost-regression-py)
|
43 |
+
""")
|
44 |
|
45 |
+
n_estimators = gr.Slider(minimum=2, maximum=300, step=1, label = "Number of Estimators")
|
|
|
46 |
|
47 |
plot = gr.Plot(label=title)
|
48 |
+
n_estimators.change(fn=train_estimators, inputs=n_estimators, outputs=[plot])
|
|
|
49 |
|
50 |
demo.launch()
|
51 |
|