File size: 2,282 Bytes
ac34f15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91ae28e
ac34f15
 
 
 
 
 
 
 
 
91ae28e
 
ac34f15
 
 
 
 
 
 
 
91ae28e
 
 
 
 
 
 
ac34f15
91ae28e
ac34f15
 
91ae28e
ac34f15
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import gradio as gr
import numpy as np
from sklearn.ensemble import AdaBoostRegressor
from sklearn.tree import DecisionTreeRegressor
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import seaborn as sns

def train_estimators(max_depth,n_estimators):
    rng = np.random.RandomState(1)
    X = np.linspace(0, 6, 100)[:, np.newaxis]
    y = np.sin(X).ravel() + np.sin(6 * X).ravel() + rng.normal(0, 0.1, X.shape[0])

    regr_1 = DecisionTreeRegressor(max_depth=4)
    regr_2 = AdaBoostRegressor(
        DecisionTreeRegressor(max_depth=4), n_estimators=n_estimators, random_state=rng
    )
    regr_1.fit(X, y)
    regr_2.fit(X, y)
    y_1 = regr_1.predict(X)
    y_2 = regr_2.predict(X)
    colors = sns.color_palette("colorblind")

    fig, ax = plt.subplots()
    ax.scatter(X, y, color=colors[0], label="training samples")
    ax.plot(X, y_1, color=colors[1], label=f"Decision tree (estimators=1)", linewidth=2)
    ax.plot(X, y_2, color=colors[2], label=f"Adaboost (estimators={n_estimators})", linewidth=2)
    ax.set_xlabel("data")
    ax.set_ylabel("target")
    ax.legend()
    return fig

title = "Decision Tree Regression with AdaBoost"
with gr.Blocks(title=title) as demo:
    gr.Markdown(f"## {title}")
    gr.Markdown("""
    This app demonstrates boosting of decision tree regressor using Adaboost. Boosting algorithms work by combining multiple models (weak learners) to reach the final output (strong learners).
    A single decision tree trained on randomly generated regression dataset is used as baseline and compared with a boosted decision tree trained on the same dataset. 
    The outputs of each model are visualize together with actual data in the plot
    The number of estimator used in boosted decision tree can be adjusted and the effect of this adjusment can be seen in the resulting plot.
    This app is developed based on [scikit-learn example](https://scikit-learn.org/stable/auto_examples/ensemble/plot_adaboost_regression.html#sphx-glr-auto-examples-ensemble-plot-adaboost-regression-py)
    """)

    n_estimators = gr.Slider(minimum=2, maximum=300, step=1, label = "Number of Estimators")

    plot = gr.Plot(label=title)
    n_estimators.change(fn=train_estimators, inputs=n_estimators, outputs=[plot])

demo.launch()