File size: 6,003 Bytes
d27e5fe
 
 
 
 
 
 
 
 
 
 
 
070602d
d27e5fe
 
 
 
070602d
d27e5fe
 
 
 
070602d
 
d27e5fe
070602d
d27e5fe
 
 
070602d
d27e5fe
 
 
 
 
 
 
070602d
d27e5fe
070602d
 
d27e5fe
 
070602d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d27e5fe
 
 
 
 
 
 
 
 
070602d
d27e5fe
 
070602d
d27e5fe
 
070602d
cacb5b2
 
 
 
 
 
 
 
 
 
 
070602d
 
 
d27e5fe
 
070602d
d27e5fe
070602d
d27e5fe
 
070602d
d27e5fe
 
070602d
d27e5fe
070602d
d27e5fe
 
070602d
d27e5fe
070602d
 
 
 
 
 
d27e5fe
070602d
 
d27e5fe
070602d
 
 
 
 
 
d27e5fe
070602d
d27e5fe
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
import numpy as np
import matplotlib

matplotlib.use("Agg")
import matplotlib.pyplot as plt
from sklearn import linear_model
import gradio as gr

np.random.seed(0)


def plot_it(X_train_x, X_train_y, Y_train_x, Y_train_y, X_test_x, X_test_y, alpha):
    # Prepare the training and test data
    X_train = np.array([[X_train_x, X_train_y]]).T
    y_train = [Y_train_x, Y_train_y]
    X_test = np.array([[X_test_x, X_test_y]]).T

    # Define the classifiers for Ordinary Least Squares (OLS) and Ridge Regression
    classifiers = dict(
        ols=linear_model.LinearRegression(), ridge=linear_model.Ridge(alpha=alpha)
    )

    # Create a figure with subplots for each classifier
    fig, axs = plt.subplots(ncols=len(classifiers), figsize=(8, 6))

    # Iterate over the classifiers and plot the results
    for i, (name, clf) in enumerate(classifiers.items()):
        ax = axs[i]

        # Generate and fit the data multiple times for visualization purposes
        for _ in range(6):
            this_X = 0.1 * np.random.normal(size=(2, 1)) + X_train
            clf.fit(this_X, y_train)

            ax.plot(X_test, clf.predict(X_test), color="gray")
            ax.scatter(this_X, y_train, s=3, c="gray", marker="o", zorder=10)

        # Fit the classifier to the original training data
        clf.fit(X_train, y_train)

        # Plot the fitted line and the training data points
        ax.plot(X_test, clf.predict(X_test), linewidth=2, color="blue")
        ax.scatter(X_train, y_train, s=30, c="red", marker="+", zorder=10)
        # Get the regression coefficients
        coef = clf.coef_
        intercept = clf.intercept_

        # Create a text box with the regression coefficients
        text_box = f"Intercept: {intercept:.2f}\nCoefficient: {coef[0]:.2f}"

        # Add the text box to the plot
        ax.text(
            0.05,
            0.95,
            text_box,
            transform=ax.transAxes,
            fontsize=10,
            verticalalignment="top",
            bbox=dict(facecolor="white", alpha=0.8),
        )

        ax.set_title(name)
        ax.set_xlabel("X")
        ax.set_ylabel("y")

    return fig


with gr.Blocks() as demo:
    # Introduction and explanation of the demo
    gr.Markdown("# Ordinary Least Squares and Ridge Regression Variance")
    gr.Markdown(
        "This interactive demo is based on the [Ordinary Least Squares and Ridge Regression Variance](https://scikit-learn.org/stable/auto_examples/linear_model/plot_ols_ridge_variance.html). It illustrates the concepts of Ordinary Least Squares (OLS) and Ridge Regression variance, demonstrates how to use linear regression with OLS and ridge regression, and compares the variance of the coefficients. You will have the opportunity to create your own data points, generate a synthetic dataset with a small number of features, fit both models to the data, and observe the variance of the coefficients for each model. This demo showcases how ridge regression can reduce the variance of coefficients when there is multicollinearity between the features, making it a valuable tool in certain regression scenarios."
    )

    # Explanation of selecting training points for X_train and Y_train
    gr.Markdown("""
    ## Select Training Points for Training Features(X_train) and Training Labels(Y_train)

    Example:
    - X_train_x = 2.0
    - X_train_y = 0.5
    - Y_train_x = 1.5
    - Y_train_y = 2.5

    This example demonstrates selecting the Training Features as (2.0, 0.5)  and (1.5, 2.5) for Training Labels. You can adjust the sliders to choose different coordinates for your training set.
        """)
    gr.Markdown(
        "In regression tasks, we split the available data into a training set and a test set. The training set is used to train the regression model, and the test set is used to evaluate its performance. Here, you can select the coordinates of the training points that form the training set."
    )
    with gr.Row():
        with gr.Column():
            gr.Markdown("X_train consists of training points (X_train_x, X_train_y)")
            X_train_x = gr.Slider(
                value=0.5, minimum=0, maximum=3, step=0.1, label="X_train_x"
            )
            X_train_y = gr.Slider(
                value=1, minimum=0, maximum=3, step=0.1, label="X_train_y"
            )
        with gr.Column():
            gr.Markdown("Y_train consists of training points (Y_train_x, Y_train_y)")
            Y_train_x = gr.Slider(
                value=0.5, minimum=0, maximum=3, step=0.1, label="Y_train_x"
            )
            Y_train_y = gr.Slider(
                value=1, minimum=0, maximum=3, step=0.1, label="Y_train_y"
            )

    # Explanation of selecting X_test
    gr.Markdown("## Select Test Point (X_test)")
    gr.Markdown(
        "To evaluate the trained regression model, we need a test point that is not part of the training set. Here, you can select the coordinates of the test point, which will be used to predict the target value based on the learned regression function."
    )
    with gr.Row():
        X_test_x = gr.Slider(value=0, minimum=0, maximum=3, step=0.1, label="X_test_x")
        X_test_y = gr.Slider(value=2, minimum=0, maximum=3, step=0.1, label="X_test_y")

    # Explanation of selecting classifier parameters
    gr.Markdown("## Select Classifier Parameters")
    gr.Markdown(
        "In this demo, we compare two regression models: Ordinary Least Squares (OLS) and Ridge Regression. You can adjust the 'alpha' parameter for the Ridge Regression model, which controls the amount of regularization. Higher values of alpha correspond to stronger regularization, reducing the variance of the coefficients."
    )
    alpha = gr.Slider(value=0.5, minimum=0, maximum=3, step=0.1, label="alpha")

    # Button to trigger the plot
    gr.Button("Plot").click(
        plot_it,
        inputs=[X_train_x, X_train_y, Y_train_x, Y_train_y, X_test_x, X_test_y, alpha],
        outputs=gr.Plot(),
    )

demo.launch()