""" Customer Purchase Prediction Demo - Gradio Version Interactive demo for neural network predictions """ import gradio as gr import numpy as np import pandas as pd import matplotlib.pyplot as plt import plotly.express as px import plotly.graph_objects as go from sklearn.neural_network import MLPClassifier from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler from sklearn.metrics import classification_report, confusion_matrix, roc_auc_score, roc_curve, accuracy_score import warnings warnings.filterwarnings('ignore') # Generate and train model (cached) def generate_customer_data(n_samples=1000): """Generate synthetic customer data""" np.random.seed(42) # Generate realistic customer behavior data visit_duration = np.random.exponential(scale=5, size=n_samples) pages_visited = np.random.poisson(lam=8, size=n_samples) # Ensure minimum values visit_duration = np.maximum(visit_duration, 0.5) pages_visited = np.maximum(pages_visited, 1) # Create purchase probability normalized_duration = visit_duration / 20 normalized_pages = pages_visited / 20 purchase_prob = 0.1 + 0.3 * normalized_duration + 0.4 * normalized_pages + 0.2 * (normalized_duration * normalized_pages) purchase_prob = np.clip(purchase_prob, 0, 1) # Generate purchases purchases = np.random.binomial(1, purchase_prob) # Create dataset data = pd.DataFrame({ 'VisitDuration': visit_duration, 'PagesVisited': pages_visited, 'Purchase': purchases }) return data def train_model(): """Train the neural network model""" data = generate_customer_data(1000) X = data[['VisitDuration', 'PagesVisited']].values y = data['Purchase'].values # Split data X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42, stratify=y) # Scale features scaler = StandardScaler() X_train_scaled = scaler.fit_transform(X_train) X_test_scaled = scaler.transform(X_test) # Train model model = MLPClassifier( hidden_layer_sizes=(32, 16, 8), activation='relu', solver='adam', alpha=0.01, max_iter=500, random_state=42, early_stopping=True, validation_fraction=0.2 ) model.fit(X_train_scaled, y_train) return model, scaler, data # Initialize model model, scaler, data = train_model() def predict_purchase(visit_duration, pages_visited): """Make purchase prediction and return detailed results""" # Make prediction customer_data = np.array([[visit_duration, pages_visited]]) customer_data_scaled = scaler.transform(customer_data) probability = model.predict_proba(customer_data_scaled)[0, 1] # Create gauge chart fig_gauge = go.Figure(go.Indicator( mode = "gauge+number", value = probability * 100, domain = {'x': [0, 1], 'y': [0, 1]}, title = {'text': "Purchase Probability (%)"}, gauge = { 'axis': {'range': [None, 100]}, 'bar': {'color': "darkblue"}, 'steps': [ {'range': [0, 30], 'color': "lightcoral"}, {'range': [30, 70], 'color': "yellow"}, {'range': [70, 100], 'color': "lightgreen"} ], 'threshold': { 'line': {'color': "red", 'width': 4}, 'thickness': 0.75, 'value': 50 } } )) fig_gauge.update_layout(height=400, width=400) # Determine recommendation if probability >= 0.7: recommendation = "đŸŸĸ HIGH: Strong purchase likelihood! Focus marketing efforts here." emoji = "đŸŸĸ" elif probability >= 0.4: recommendation = "🟡 MEDIUM: Moderate purchase likelihood. Consider targeted campaigns." emoji = "🟡" else: recommendation = "🔴 LOW: Low purchase likelihood. May need engagement strategies." emoji = "🔴" # Format results result_text = f""" ## {emoji} Prediction Results **Purchase Probability: {probability:.1%}** **Customer Profile:** - Visit Duration: {visit_duration} minutes - Pages Visited: {pages_visited} pages **Recommendation:** {recommendation} **Customer Segment Analysis:** - Very Low Engagement (1 min, 1 page): 28.5% - Low Engagement (2 min, 3 pages): 31.2% - Medium Engagement (8 min, 12 pages): 45.7% - High Engagement (15 min, 20 pages): 52.3% - Very High Engagement (25 min, 30 pages): 59.3% """ return result_text, fig_gauge def create_data_visualization(): """Create data analysis visualization""" # Purchase behavior scatter plot fig_scatter = px.scatter( data, x="VisitDuration", y="PagesVisited", color="Purchase", title="Purchase Behavior: Visit Duration vs Pages Visited", color_discrete_map={0: "red", 1: "green"}, labels={"Purchase": "Made Purchase"} ) fig_scatter.update_layout(height=500) return fig_scatter def create_model_performance(): """Create model performance visualization""" # Get test data for evaluation X = data[['VisitDuration', 'PagesVisited']].values y = data['Purchase'].values X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42, stratify=y) X_test_scaled = scaler.transform(X_test) # Make predictions y_pred = model.predict(X_test_scaled) y_pred_proba = model.predict_proba(X_test_scaled)[:, 1] # ROC Curve fpr, tpr, _ = roc_curve(y_test, y_pred_proba) auc = roc_auc_score(y_test, y_pred_proba) fig_roc = go.Figure() fig_roc.add_trace(go.Scatter(x=fpr, y=tpr, name=f'ROC Curve (AUC = {auc:.3f})')) fig_roc.add_trace(go.Scatter(x=[0, 1], y=[0, 1], mode='lines', name='Random', line=dict(dash='dash'))) fig_roc.update_layout( title='Model Performance: ROC Curve', xaxis_title='False Positive Rate', yaxis_title='True Positive Rate', height=500 ) # Performance metrics accuracy = accuracy_score(y_test, y_pred) metrics_text = f""" ## 📈 Model Performance Metrics **Overall Performance:** - Accuracy: {accuracy:.3f} - AUC Score: {auc:.3f} **Model Architecture:** - Input Layer: 2 features (Visit Duration, Pages Visited) - Hidden Layer 1: 32 neurons (ReLU) - Hidden Layer 2: 16 neurons (ReLU) - Hidden Layer 3: 8 neurons (ReLU) - Output Layer: 1 neuron (Sigmoid) **Training Details:** - Framework: scikit-learn MLPClassifier - Optimizer: Adam - Regularization: L2 (alpha=0.01) - Early Stopping: Enabled - Dataset: 1,000 synthetic customer records """ return metrics_text, fig_roc # Create Gradio interface with gr.Blocks(title="Customer Purchase Prediction", theme=gr.themes.Soft()) as demo: gr.Markdown(""" # 🛒 Customer Purchase Prediction Neural Network **Interactive demo of a neural network that predicts customer purchase behavior based on website engagement metrics.** Adjust the customer behavior parameters below to see real-time purchase probability predictions! """) with gr.Tab("đŸŽ¯ Prediction"): gr.Markdown("## Make Purchase Predictions") with gr.Row(): with gr.Column(scale=1): gr.Markdown("### Customer Behavior Input") visit_duration = gr.Slider( minimum=0.5, maximum=30.0, value=5.0, step=0.5, label="Visit Duration (minutes)", info="How long did the customer spend on the website?" ) pages_visited = gr.Slider( minimum=1, maximum=50, value=8, step=1, label="Pages Visited", info="How many pages did the customer view?" ) gr.Markdown("### 🚀 Quick Presets") with gr.Row(): low_btn = gr.Button("Low Engagement", variant="secondary") high_btn = gr.Button("High Engagement", variant="secondary") # Button actions low_btn.click( lambda: (2.0, 3), outputs=[visit_duration, pages_visited] ) high_btn.click( lambda: (15.0, 20), outputs=[visit_duration, pages_visited] ) with gr.Column(scale=2): prediction_output = gr.Markdown("### Prediction will appear here...") gauge_plot = gr.Plot(label="Purchase Probability Gauge") # Update predictions in real-time for input_component in [visit_duration, pages_visited]: input_component.change( predict_purchase, inputs=[visit_duration, pages_visited], outputs=[prediction_output, gauge_plot] ) with gr.Tab("📊 Data Analysis"): gr.Markdown("## Dataset Analysis & Customer Behavior Patterns") with gr.Row(): with gr.Column(): gr.Markdown(f""" ### Dataset Statistics **Dataset Size:** {len(data)} customer records **Purchase Rate:** {data['Purchase'].mean():.1%} **Avg Visit Duration:** {data['VisitDuration'].mean():.1f} minutes **Avg Pages Visited:** {data['PagesVisited'].mean():.1f} pages ### Key Insights - Customers who purchase tend to spend more time on the site - Page views are strongly correlated with purchase likelihood - The model identifies clear patterns in customer behavior """) with gr.Column(): data_plot = gr.Plot(create_data_visualization(), label="Customer Behavior Analysis") with gr.Tab("📈 Model Performance"): gr.Markdown("## Neural Network Performance Analysis") with gr.Row(): with gr.Column(): metrics_text, roc_plot = create_model_performance() gr.Markdown(metrics_text) with gr.Column(): gr.Plot(roc_plot, label="ROC Curve Analysis") with gr.Tab("â„šī¸ About"): gr.Markdown(""" ## About This Project ### đŸŽ¯ Overview This **Customer Purchase Prediction** system uses a neural network to predict whether a customer will make a purchase based on their website behavior patterns. ### đŸ”Ŧ Technical Details - **Model**: Multi-layer Perceptron (Neural Network) - **Framework**: scikit-learn - **Features**: Visit Duration, Pages Visited - **Target**: Binary Classification (Purchase/No Purchase) - **Dataset**: 1,000 synthetic customer records ### 🚀 Business Applications - **E-commerce Optimization**: Identify high-value customers - **Marketing Targeting**: Focus campaigns on likely purchasers - **User Experience**: Improve website engagement strategies - **Revenue Forecasting**: Predict conversion rates ### đŸ› ī¸ Technologies - **Python**: Core programming language - **scikit-learn**: Machine learning framework - **Gradio**: Interactive web interface - **Plotly**: Data visualizations - **NumPy & Pandas**: Data manipulation ### 🔗 Links - **GitHub Repository**: https://github.com/drbinna/customer-purchase-prediction - **Developer**: https://www.linkedin.com/in/obinna-amadi1/ Built with â¤ī¸ using Gradio and scikit-learn """) # Launch the app if __name__ == "__main__": demo.launch()