Spaces:
Sleeping
Sleeping
import streamlit as st | |
import numpy as np | |
import matplotlib.pyplot as plt | |
from sklearn.datasets import make_circles, make_moons, make_classification | |
from sklearn.model_selection import train_test_split | |
from sklearn.preprocessing import StandardScaler | |
from tensorflow.keras.models import Sequential | |
from tensorflow.keras.layers import Dense | |
from tensorflow.keras.optimizers import Adam | |
st.set_page_config(page_title="TF Playground", layout="wide") | |
st.title("🧠 TensorFlow Playground") | |
# Sidebar controls | |
st.sidebar.header("1. Dataset Options") | |
dataset = st.sidebar.selectbox("Select Dataset", ["circle", "moons", "linear","guassian"]) | |
noise = st.sidebar.slider("Noise Level", 0.0, 0.5, 0.1, 0.01) | |
perc_train = st.sidebar.slider("Train/Test Split %", 10, 90, 50) | |
st.sidebar.header("2. Network Settings") | |
layers = st.sidebar.text_input("Neural Network Layers (e.g., 4,2)", "4,2") | |
activation = st.sidebar.selectbox("Activation Function", ["tanh", "relu", "sigmoid"]) | |
learning_rate = st.sidebar.slider("Learning Rate", 0.001, 0.1, 0.03) | |
epochs = st.sidebar.slider("Epochs", 10, 300, 100) | |
batch_size = st.sidebar.slider("Batch Size", 1, 100, 10) | |
# Generate dataset | |
if dataset == "circle": | |
X, y = make_circles(n_samples=500, noise=noise, factor=0.5, random_state=0) | |
elif dataset == "moons": | |
X, y = make_moons(n_samples=500, noise=noise, random_state=0) | |
else: | |
X, y = make_classification(n_samples=500, n_features=2, n_redundant=0, n_clusters_per_class=1, | |
n_informative=2, n_classes=2, class_sep=1.0, flip_y=noise, random_state=0) | |
X = StandardScaler().fit_transform(X) | |
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=(100 - perc_train)/100, random_state=42) | |
# Build model | |
model = Sequential() | |
layer_sizes = [int(n.strip()) for n in layers.split(",") if n.strip().isdigit()] | |
model.add(Dense(layer_sizes[0], input_dim=2, activation=activation)) | |
for size in layer_sizes[1:]: | |
model.add(Dense(size, activation=activation)) | |
model.add(Dense(1, activation="sigmoid")) | |
model.compile(optimizer=Adam(learning_rate=learning_rate), loss="binary_crossentropy", metrics=["accuracy"]) | |
with st.spinner("Training model..."): | |
history = model.fit(X_train, y_train, validation_data=(X_test, y_test), | |
epochs=epochs, batch_size=batch_size, verbose=0) | |
train_acc = model.evaluate(X_train, y_train, verbose=0)[1] | |
test_acc = model.evaluate(X_test, y_test, verbose=0)[1] | |
st.success(f"Train Accuracy: {train_acc:.3f} | Test Accuracy: {test_acc:.3f}") | |
# Accuracy plot | |
fig, ax = plt.subplots() | |
ax.plot(history.history["accuracy"], label="Train") | |
ax.plot(history.history["val_accuracy"], label="Test") | |
ax.set_title("Accuracy") | |
ax.set_xlabel("Epoch") | |
ax.set_ylabel("Accuracy") | |
ax.legend() | |
st.pyplot(fig) | |
# Decision boundary | |
def plot_boundary(X, y, model, ax): | |
h = 0.02 | |
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1 | |
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1 | |
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), | |
np.arange(y_min, y_max, h)) | |
grid = np.c_[xx.ravel(), yy.ravel()] | |
preds = model.predict(grid) | |
preds = preds.reshape(xx.shape) | |
ax.contourf(xx, yy, preds, cmap=plt.cm.RdBu, alpha=0.6) | |
ax.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.RdBu, edgecolors='k') | |
fig2, ax2 = plt.subplots() | |
plot_boundary(X_test, y_test, model, ax2) | |
ax2.set_title("Decision Boundary") | |
st.pyplot(fig2) | |