|
import numpy as np |
|
import matplotlib |
|
|
|
matplotlib.use("Agg") |
|
import matplotlib.pyplot as plt |
|
from sklearn import linear_model |
|
import gradio as gr |
|
|
|
np.random.seed(0) |
|
|
|
|
|
def plot_it(X_train_x, X_train_y, Y_train_x, Y_train_y, X_test_x, X_test_y, alpha): |
|
|
|
X_train = np.array([[X_train_x, X_train_y]]).T |
|
y_train = [Y_train_x, Y_train_y] |
|
X_test = np.array([[X_test_x, X_test_y]]).T |
|
|
|
|
|
classifiers = dict( |
|
ols=linear_model.LinearRegression(), ridge=linear_model.Ridge(alpha=alpha) |
|
) |
|
|
|
|
|
fig, axs = plt.subplots(ncols=len(classifiers), figsize=(8, 6)) |
|
|
|
|
|
for i, (name, clf) in enumerate(classifiers.items()): |
|
ax = axs[i] |
|
|
|
|
|
for _ in range(6): |
|
this_X = 0.1 * np.random.normal(size=(2, 1)) + X_train |
|
clf.fit(this_X, y_train) |
|
|
|
ax.plot(X_test, clf.predict(X_test), color="gray") |
|
ax.scatter(this_X, y_train, s=3, c="gray", marker="o", zorder=10) |
|
|
|
|
|
clf.fit(X_train, y_train) |
|
|
|
|
|
ax.plot(X_test, clf.predict(X_test), linewidth=2, color="blue") |
|
ax.scatter(X_train, y_train, s=30, c="red", marker="+", zorder=10) |
|
|
|
coef = clf.coef_ |
|
intercept = clf.intercept_ |
|
|
|
|
|
text_box = f"Intercept: {intercept:.2f}\nCoefficient: {coef[0]:.2f}" |
|
|
|
|
|
ax.text( |
|
0.05, |
|
0.95, |
|
text_box, |
|
transform=ax.transAxes, |
|
fontsize=10, |
|
verticalalignment="top", |
|
bbox=dict(facecolor="white", alpha=0.8), |
|
) |
|
|
|
ax.set_title(name) |
|
ax.set_xlabel("X") |
|
ax.set_ylabel("y") |
|
|
|
return fig |
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
|
gr.Markdown("# Ordinary Least Squares and Ridge Regression Variance") |
|
gr.Markdown( |
|
"This interactive demo is based on the [Ordinary Least Squares and Ridge Regression Variance](https://scikit-learn.org/stable/auto_examples/linear_model/plot_ols_ridge_variance.html). It illustrates the concepts of Ordinary Least Squares (OLS) and Ridge Regression variance, demonstrates how to use linear regression with OLS and ridge regression, and compares the variance of the coefficients. You will have the opportunity to create your own data points, generate a synthetic dataset with a small number of features, fit both models to the data, and observe the variance of the coefficients for each model. This demo showcases how ridge regression can reduce the variance of coefficients when there is multicollinearity between the features, making it a valuable tool in certain regression scenarios." |
|
) |
|
|
|
|
|
gr.Markdown(""" |
|
## Select Training Points for Training Features(X_train) and Training Labels(Y_train) |
|
|
|
Example: |
|
- X_train_x = 2.0 |
|
- X_train_y = 0.5 |
|
- Y_train_x = 1.5 |
|
- Y_train_y = 2.5 |
|
|
|
This example demonstrates selecting the Training Features as (2.0, 0.5) and (1.5, 2.5) for Training Labels. You can adjust the sliders to choose different coordinates for your training set. |
|
""") |
|
gr.Markdown( |
|
"In regression tasks, we split the available data into a training set and a test set. The training set is used to train the regression model, and the test set is used to evaluate its performance. Here, you can select the coordinates of the training points that form the training set." |
|
) |
|
with gr.Row(): |
|
with gr.Column(): |
|
gr.Markdown("X_train consists of training points (X_train_x, X_train_y)") |
|
X_train_x = gr.Slider( |
|
value=0.5, minimum=0, maximum=3, step=0.1, label="X_train_x" |
|
) |
|
X_train_y = gr.Slider( |
|
value=1, minimum=0, maximum=3, step=0.1, label="X_train_y" |
|
) |
|
with gr.Column(): |
|
gr.Markdown("Y_train consists of training points (Y_train_x, Y_train_y)") |
|
Y_train_x = gr.Slider( |
|
value=0.5, minimum=0, maximum=3, step=0.1, label="Y_train_x" |
|
) |
|
Y_train_y = gr.Slider( |
|
value=1, minimum=0, maximum=3, step=0.1, label="Y_train_y" |
|
) |
|
|
|
|
|
gr.Markdown("## Select Test Point (X_test)") |
|
gr.Markdown( |
|
"To evaluate the trained regression model, we need a test point that is not part of the training set. Here, you can select the coordinates of the test point, which will be used to predict the target value based on the learned regression function." |
|
) |
|
with gr.Row(): |
|
X_test_x = gr.Slider(value=0, minimum=0, maximum=3, step=0.1, label="X_test_x") |
|
X_test_y = gr.Slider(value=2, minimum=0, maximum=3, step=0.1, label="X_test_y") |
|
|
|
|
|
gr.Markdown("## Select Classifier Parameters") |
|
gr.Markdown( |
|
"In this demo, we compare two regression models: Ordinary Least Squares (OLS) and Ridge Regression. You can adjust the 'alpha' parameter for the Ridge Regression model, which controls the amount of regularization. Higher values of alpha correspond to stronger regularization, reducing the variance of the coefficients." |
|
) |
|
alpha = gr.Slider(value=0.5, minimum=0, maximum=3, step=0.1, label="alpha") |
|
|
|
|
|
gr.Button("Plot").click( |
|
plot_it, |
|
inputs=[X_train_x, X_train_y, Y_train_x, Y_train_y, X_test_x, X_test_y, alpha], |
|
outputs=gr.Plot(), |
|
) |
|
|
|
demo.launch() |
|
|