Spaces:
Sleeping
Sleeping
Update matplotlib configuration
Browse files
app.py
CHANGED
|
@@ -1,52 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from sklearn.datasets import make_circles
|
| 2 |
from sklearn.model_selection import train_test_split
|
| 3 |
from sklearn.decomposition import PCA, KernelPCA
|
|
|
|
|
|
|
|
|
|
| 4 |
import gradio as gr
|
| 5 |
|
| 6 |
-
X, y = make_circles(n_samples=1_000, factor=0.3, noise=0.05, random_state=0)
|
| 7 |
-
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, random_state=0)
|
| 8 |
-
|
| 9 |
def fit_plot(n_comp, gamma, alpha):
|
| 10 |
-
|
|
|
|
| 11 |
pca = PCA(n_components=n_comp)
|
| 12 |
-
kernel_pca = KernelPCA(
|
| 13 |
-
n_components=None, kernel="rbf", gamma=gamma, fit_inverse_transform=True, alpha=alpha
|
| 14 |
-
)
|
| 15 |
-
|
| 16 |
X_test_pca = pca.fit(X_train).transform(X_test)
|
| 17 |
X_test_kernel_pca = kernel_pca.fit(X_train).transform(X_test)
|
| 18 |
-
|
| 19 |
-
fig1, (orig_data_ax, pca_proj_ax, kernel_pca_proj_ax) = plt.subplots(
|
| 20 |
-
ncols=3, figsize=(14, 4)
|
| 21 |
-
)
|
| 22 |
-
|
| 23 |
orig_data_ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test)
|
| 24 |
orig_data_ax.set_ylabel("Feature #1")
|
| 25 |
orig_data_ax.set_xlabel("Feature #0")
|
| 26 |
orig_data_ax.set_title("Testing data")
|
| 27 |
-
|
| 28 |
pca_proj_ax.scatter(X_test_pca[:, 0], X_test_pca[:, 1], c=y_test)
|
| 29 |
pca_proj_ax.set_ylabel("Principal component #1")
|
| 30 |
pca_proj_ax.set_xlabel("Principal component #0")
|
| 31 |
pca_proj_ax.set_title("Projection of testing data\n using PCA")
|
| 32 |
-
|
| 33 |
kernel_pca_proj_ax.scatter(X_test_kernel_pca[:, 0], X_test_kernel_pca[:, 1], c=y_test)
|
| 34 |
kernel_pca_proj_ax.set_ylabel("Principal component #1")
|
| 35 |
kernel_pca_proj_ax.set_xlabel("Principal component #0")
|
| 36 |
_ = kernel_pca_proj_ax.set_title("Projection of testing data\n using KernelPCA")
|
| 37 |
-
|
| 38 |
-
return fig1
|
| 39 |
-
|
| 40 |
|
| 41 |
with gr.Blocks() as demo:
|
| 42 |
gr.Markdown("## PCA vs Kernel PCA")
|
| 43 |
-
gr.Markdown("Demo is based on the [Kernel PCA](https://scikit-learn.org/stable/auto_examples/decomposition/plot_kernel_pca.html#sphx-glr-auto-examples-decomposition-plot-kernel-pca-py")
|
| 44 |
with gr.Row():
|
| 45 |
p1 = gr.Slider(0, 10, label="Number of PCs", value=2, step=1)
|
| 46 |
p2 = gr.Slider(0, 10, label="Kernel coefficient", value=10, step=1e-3)
|
| 47 |
p3 = gr.Slider(0, 1, label="Hyperparameter for ridge regression", value=0.1, step=1e-3)
|
| 48 |
-
|
| 49 |
btn = gr.Button(value="Submit")
|
| 50 |
-
btn.click(fit_plot, inputs=
|
| 51 |
|
| 52 |
demo.launch()
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Demo is based on the [Kernel PCA] - (https://scikit-learn.org/stable/auto_examples/decomposition/plot_kernel_pca.html#sphx-glr-auto-examples-decomposition-plot-kernel-pca-py
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
from sklearn.datasets import make_circles
|
| 6 |
from sklearn.model_selection import train_test_split
|
| 7 |
from sklearn.decomposition import PCA, KernelPCA
|
| 8 |
+
import matplotlib
|
| 9 |
+
matplotlib.use('agg')
|
| 10 |
+
import matplotlib.pyplot as plt
|
| 11 |
import gradio as gr
|
| 12 |
|
|
|
|
|
|
|
|
|
|
| 13 |
def fit_plot(n_comp, gamma, alpha):
|
| 14 |
+
X, y = make_circles(n_samples=1_000, factor=0.3, noise=0.05, random_state=0)
|
| 15 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, random_state=0)
|
| 16 |
pca = PCA(n_components=n_comp)
|
| 17 |
+
kernel_pca = KernelPCA(n_components=None, kernel="rbf", gamma=gamma, fit_inverse_transform=True, alpha=alpha)
|
|
|
|
|
|
|
|
|
|
| 18 |
X_test_pca = pca.fit(X_train).transform(X_test)
|
| 19 |
X_test_kernel_pca = kernel_pca.fit(X_train).transform(X_test)
|
| 20 |
+
fig, (orig_data_ax, pca_proj_ax, kernel_pca_proj_ax) = plt.subplots(ncols=3, figsize=(14, 4))
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
orig_data_ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test)
|
| 22 |
orig_data_ax.set_ylabel("Feature #1")
|
| 23 |
orig_data_ax.set_xlabel("Feature #0")
|
| 24 |
orig_data_ax.set_title("Testing data")
|
|
|
|
| 25 |
pca_proj_ax.scatter(X_test_pca[:, 0], X_test_pca[:, 1], c=y_test)
|
| 26 |
pca_proj_ax.set_ylabel("Principal component #1")
|
| 27 |
pca_proj_ax.set_xlabel("Principal component #0")
|
| 28 |
pca_proj_ax.set_title("Projection of testing data\n using PCA")
|
|
|
|
| 29 |
kernel_pca_proj_ax.scatter(X_test_kernel_pca[:, 0], X_test_kernel_pca[:, 1], c=y_test)
|
| 30 |
kernel_pca_proj_ax.set_ylabel("Principal component #1")
|
| 31 |
kernel_pca_proj_ax.set_xlabel("Principal component #0")
|
| 32 |
_ = kernel_pca_proj_ax.set_title("Projection of testing data\n using KernelPCA")
|
| 33 |
+
return fig
|
|
|
|
|
|
|
| 34 |
|
| 35 |
with gr.Blocks() as demo:
|
| 36 |
gr.Markdown("## PCA vs Kernel PCA")
|
|
|
|
| 37 |
with gr.Row():
|
| 38 |
p1 = gr.Slider(0, 10, label="Number of PCs", value=2, step=1)
|
| 39 |
p2 = gr.Slider(0, 10, label="Kernel coefficient", value=10, step=1e-3)
|
| 40 |
p3 = gr.Slider(0, 1, label="Hyperparameter for ridge regression", value=0.1, step=1e-3)
|
|
|
|
| 41 |
btn = gr.Button(value="Submit")
|
| 42 |
+
btn.click(fit_plot, inputs=[p1,p2,p3], outputs=gr.Plot(label="Projecting data with PCA and Kernel PCA "))
|
| 43 |
|
| 44 |
demo.launch()
|