Spaces:
Sleeping
Sleeping
| """ | |
| Demo is based on the [Kernel PCA] - (https://scikit-learn.org/stable/auto_examples/decomposition/plot_kernel_pca.html#sphx-glr-auto-examples-decomposition-plot-kernel-pca-py | |
| """ | |
| from sklearn.datasets import make_circles | |
| from sklearn.model_selection import train_test_split | |
| from sklearn.decomposition import PCA, KernelPCA | |
| import matplotlib | |
| matplotlib.use('agg') | |
| import matplotlib.pyplot as plt | |
| import gradio as gr | |
| def fit_plot(kernel, gamma, alpha, degree, coef0): | |
| X, y = make_circles(n_samples=1_000, factor=0.3, noise=0.05, random_state=0) | |
| X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, random_state=0) | |
| pca = PCA(n_components=2) | |
| if kernel=="linear": | |
| kernel_pca = KernelPCA(n_components=2, kernel=kernel, fit_inverse_transform=True, alpha=alpha) | |
| elif kernel=="poly": | |
| kernel_pca = KernelPCA(n_components=2, kernel=kernel, gamma=gamma, degree=degree, coef0=coef0, fit_inverse_transform=True, alpha=alpha) | |
| elif kernel=="rbf": | |
| kernel_pca = KernelPCA(n_components=2, kernel=kernel, gamma=gamma, fit_inverse_transform=True, alpha=alpha) | |
| elif kernel=="cosine": | |
| kernel_pca = KernelPCA(n_components=2, kernel=kernel, fit_inverse_transform=True, alpha=alpha) | |
| X_test_pca = pca.fit(X_train).transform(X_test) | |
| X_test_kernel_pca = kernel_pca.fit(X_train).transform(X_test) | |
| fig, (orig_data_ax, pca_proj_ax, kernel_pca_proj_ax) = plt.subplots(ncols=3, figsize=(14, 4)) | |
| orig_data_ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test) | |
| orig_data_ax.set_ylabel("Feature #1") | |
| orig_data_ax.set_xlabel("Feature #0") | |
| orig_data_ax.set_title("Testing data") | |
| pca_proj_ax.scatter(X_test_pca[:, 0], X_test_pca[:, 1], c=y_test) | |
| pca_proj_ax.set_ylabel("Principal component #1") | |
| pca_proj_ax.set_xlabel("Principal component #0") | |
| pca_proj_ax.set_title("Projection of testing data\n using PCA") | |
| kernel_pca_proj_ax.scatter(X_test_kernel_pca[:, 0], X_test_kernel_pca[:, 1], c=y_test) | |
| kernel_pca_proj_ax.set_ylabel("Principal component #1") | |
| kernel_pca_proj_ax.set_xlabel("Principal component #0") | |
| _ = kernel_pca_proj_ax.set_title("Projection of testing data\n using KernelPCA") | |
| return fig | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## PCA vs Kernel PCA") | |
| #state = gr.State([]) | |
| with gr.Row(variant='panel').style(equal_height=True): | |
| p1 = gr.Dropdown(choices=["linear", "poly", "rbf", "cosine"], label="Kernel", value="rbf", interactive=True) | |
| with gr.Row(variant='panel').style(equal_height=True): | |
| p2 = gr.Slider(0, 10, label="Kernel coefficient (for rbf, poly and sigmoid kernels)", value=None, step=1e-3, interactive=True) | |
| p3 = gr.Slider(0, 1, label="Alpha of ridge regression (for non-precomputed kernels)", value=1, step=1e-3, interactive=True) | |
| with gr.Row(variant='panel').style(equal_height=True): | |
| p4 = gr.Slider(0, 10, label="Degree (for poly kernel)", value=3, step=1, interactive=True) | |
| p5 = gr.Slider(0, 10, label="Independent term (for poly and sigmoid kernels)", value=1, step=1e-1, interactive=True) | |
| btn = gr.Button(value="Submit") | |
| out = gr.Plot(label="Projecting data with PCA and Kernel PCA") | |
| btn.click(fit_plot, inputs=[p1,p2,p3,p4,p5], outputs=out) | |
| demo.launch() |