|
import gradio as gr |
|
import numpy as np |
|
import pandas as pd |
|
import matplotlib.pyplot as plt |
|
from sklearn.ensemble import IsolationForest |
|
from sklearn.preprocessing import StandardScaler |
|
from sklearn.linear_model import LinearRegression |
|
from sklearn.metrics import mean_squared_error, r2_score |
|
import warnings |
|
import io |
|
import json |
|
warnings.filterwarnings('ignore') |
|
|
|
class F1TelemetryAnalyzer: |
|
def __init__(self): |
|
self.scaler = StandardScaler() |
|
self.anomaly_detector = IsolationForest(contamination=0.1, random_state=42) |
|
self.tire_model = LinearRegression() |
|
self.fuel_model = LinearRegression() |
|
self.is_trained = False |
|
self.current_data = None |
|
self.required_columns = ['speed', 'throttle', 'brake_pressure', 'tire_temp', 'engine_temp'] |
|
self.optional_columns = ['lap_time', 'lap_number', 'tire_degradation', 'fuel_remaining'] |
|
|
|
def generate_sample_data(self, num_samples=1000): |
|
"""Generate realistic F1 telemetry data""" |
|
np.random.seed(42) |
|
|
|
|
|
lap_time = np.random.normal(90, 5, num_samples) |
|
speed = np.random.normal(200, 30, num_samples) |
|
throttle = np.random.uniform(0, 100, num_samples) |
|
brake_pressure = np.random.uniform(0, 100, num_samples) |
|
tire_temp = np.random.normal(80, 15, num_samples) |
|
engine_temp = np.random.normal(95, 10, num_samples) |
|
|
|
|
|
speed = np.clip(speed + throttle * 0.5 - brake_pressure * 0.3, 50, 300) |
|
tire_temp = np.clip(tire_temp + speed * 0.1 + throttle * 0.2, 40, 120) |
|
engine_temp = np.clip(engine_temp + throttle * 0.15 + speed * 0.05, 70, 130) |
|
|
|
|
|
lap_number = np.random.randint(1, 60, num_samples) |
|
|
|
|
|
tire_degradation = 100 - (lap_number * 0.8 + np.random.normal(0, 2, num_samples)) |
|
tire_degradation = np.clip(tire_degradation, 60, 100) |
|
|
|
|
|
fuel_remaining = 100 - (lap_number * 1.5 + np.random.normal(0, 3, num_samples)) |
|
fuel_remaining = np.clip(fuel_remaining, 0, 100) |
|
|
|
|
|
anomaly_indices = np.random.choice(num_samples, size=int(num_samples * 0.05), replace=False) |
|
speed[anomaly_indices] = np.random.uniform(20, 50, len(anomaly_indices)) |
|
tire_temp[anomaly_indices] = np.random.uniform(130, 150, len(anomaly_indices)) |
|
|
|
return pd.DataFrame({ |
|
'lap_time': lap_time, |
|
'speed': speed, |
|
'throttle': throttle, |
|
'brake_pressure': brake_pressure, |
|
'tire_temp': tire_temp, |
|
'engine_temp': engine_temp, |
|
'lap_number': lap_number, |
|
'tire_degradation': tire_degradation, |
|
'fuel_remaining': fuel_remaining |
|
}) |
|
|
|
def parse_uploaded_file(self, file): |
|
"""Parse uploaded file and return DataFrame""" |
|
try: |
|
file_extension = file.name.split('.')[-1].lower() |
|
|
|
if file_extension == 'csv': |
|
df = pd.read_csv(file.name) |
|
elif file_extension in ['xlsx', 'xls']: |
|
df = pd.read_excel(file.name) |
|
elif file_extension == 'json': |
|
df = pd.read_json(file.name) |
|
else: |
|
return None, "Unsupported file format. Please upload CSV, Excel, or JSON files." |
|
|
|
return df, f"File loaded successfully! Shape: {df.shape}" |
|
except Exception as e: |
|
return None, f"Error loading file: {str(e)}" |
|
|
|
def get_column_suggestions(self, df): |
|
"""Suggest column mappings based on common telemetry column names""" |
|
suggestions = {} |
|
column_names = df.columns.str.lower() |
|
|
|
|
|
mapping_patterns = { |
|
'speed': ['speed', 'velocity', 'spd', 'v'], |
|
'throttle': ['throttle', 'thr', 'accelerator', 'gas'], |
|
'brake_pressure': ['brake', 'brk', 'brake_pressure', 'brake_force'], |
|
'tire_temp': ['tire_temp', 'tyre_temp', 'tire_temperature', 'tyre_temperature', 'temp_tire'], |
|
'engine_temp': ['engine_temp', 'engine_temperature', 'water_temp', 'coolant_temp'], |
|
'lap_time': ['lap_time', 'laptime', 'time', 'sector_time'], |
|
'lap_number': ['lap', 'lap_number', 'lap_num', 'lap_count'], |
|
'tire_degradation': ['tire_deg', 'tyre_deg', 'tire_wear', 'tyre_wear'], |
|
'fuel_remaining': ['fuel', 'fuel_remaining', 'fuel_level', 'fuel_load'] |
|
} |
|
|
|
for telemetry_field, patterns in mapping_patterns.items(): |
|
for pattern in patterns: |
|
matches = column_names[column_names.str.contains(pattern, na=False)] |
|
if len(matches) > 0: |
|
suggestions[telemetry_field] = df.columns[matches.index[0]] |
|
break |
|
|
|
return suggestions |
|
|
|
def validate_mapped_data(self, df, column_mapping): |
|
"""Validate that mapped data meets requirements""" |
|
missing_required = [] |
|
for col in self.required_columns: |
|
if col not in column_mapping or column_mapping[col] is None: |
|
missing_required.append(col) |
|
|
|
if missing_required: |
|
return False, f"Missing required columns: {', '.join(missing_required)}" |
|
|
|
|
|
for telemetry_col, df_col in column_mapping.items(): |
|
if df_col and df_col not in df.columns: |
|
return False, f"Column '{df_col}' not found in uploaded data" |
|
|
|
return True, "Data validation successful" |
|
|
|
def process_uploaded_data(self, df, column_mapping): |
|
"""Process uploaded data with column mapping""" |
|
processed_df = pd.DataFrame() |
|
|
|
|
|
for telemetry_col, df_col in column_mapping.items(): |
|
if df_col and df_col in df.columns: |
|
processed_df[telemetry_col] = df[df_col] |
|
|
|
|
|
if 'lap_time' not in processed_df.columns: |
|
processed_df['lap_time'] = np.random.normal(90, 5, len(processed_df)) |
|
|
|
if 'lap_number' not in processed_df.columns: |
|
processed_df['lap_number'] = range(1, len(processed_df) + 1) |
|
|
|
if 'tire_degradation' not in processed_df.columns: |
|
|
|
if 'lap_number' in processed_df.columns: |
|
processed_df['tire_degradation'] = 100 - (processed_df['lap_number'] * 0.8) |
|
else: |
|
processed_df['tire_degradation'] = np.random.uniform(70, 100, len(processed_df)) |
|
|
|
if 'fuel_remaining' not in processed_df.columns: |
|
|
|
if 'lap_number' in processed_df.columns: |
|
processed_df['fuel_remaining'] = 100 - (processed_df['lap_number'] * 1.5) |
|
else: |
|
processed_df['fuel_remaining'] = np.random.uniform(50, 100, len(processed_df)) |
|
|
|
|
|
processed_df = processed_df.dropna() |
|
|
|
|
|
processed_df['speed'] = np.clip(processed_df['speed'], 0, 400) |
|
processed_df['throttle'] = np.clip(processed_df['throttle'], 0, 100) |
|
processed_df['brake_pressure'] = np.clip(processed_df['brake_pressure'], 0, 100) |
|
processed_df['tire_temp'] = np.clip(processed_df['tire_temp'], 20, 200) |
|
processed_df['engine_temp'] = np.clip(processed_df['engine_temp'], 50, 150) |
|
|
|
return processed_df |
|
|
|
def detect_anomalies(self, data): |
|
"""Detect anomalies in telemetry data""" |
|
features = ['speed', 'throttle', 'brake_pressure', 'tire_temp', 'engine_temp'] |
|
X = data[features] |
|
|
|
|
|
anomalies = self.anomaly_detector.fit_predict(X) |
|
data['anomaly'] = anomalies |
|
|
|
return data |
|
|
|
def train_predictive_models(self, data): |
|
"""Train tire degradation and fuel consumption models""" |
|
|
|
features = ['lap_number', 'speed', 'throttle', 'tire_temp', 'engine_temp'] |
|
X = data[features] |
|
|
|
|
|
y_tire = data['tire_degradation'] |
|
self.tire_model.fit(X, y_tire) |
|
|
|
|
|
y_fuel = data['fuel_remaining'] |
|
self.fuel_model.fit(X, y_fuel) |
|
|
|
self.is_trained = True |
|
|
|
|
|
tire_pred = self.tire_model.predict(X) |
|
fuel_pred = self.fuel_model.predict(X) |
|
|
|
tire_r2 = r2_score(y_tire, tire_pred) |
|
fuel_r2 = r2_score(y_fuel, fuel_pred) |
|
|
|
return tire_r2, fuel_r2 |
|
|
|
def predict_performance(self, lap_number, speed, throttle, tire_temp, engine_temp): |
|
"""Predict tire degradation and fuel consumption""" |
|
if not self.is_trained: |
|
return "Model not trained yet!", "" |
|
|
|
features = np.array([[lap_number, speed, throttle, tire_temp, engine_temp]]) |
|
|
|
tire_pred = self.tire_model.predict(features)[0] |
|
fuel_pred = self.fuel_model.predict(features)[0] |
|
|
|
return f"Predicted Tire Performance: {tire_pred:.1f}%", f"Predicted Fuel Remaining: {fuel_pred:.1f}%" |
|
|
|
def create_visualizations(self, data): |
|
"""Create telemetry visualizations""" |
|
fig, axes = plt.subplots(2, 2, figsize=(15, 12)) |
|
|
|
|
|
normal_data = data[data['anomaly'] == 1] |
|
anomaly_data = data[data['anomaly'] == -1] |
|
|
|
axes[0, 0].scatter(normal_data['speed'], normal_data['lap_time'], |
|
alpha=0.6, label='Normal', color='blue') |
|
axes[0, 0].scatter(anomaly_data['speed'], anomaly_data['lap_time'], |
|
alpha=0.8, label='Anomaly', color='red') |
|
axes[0, 0].set_xlabel('Speed (km/h)') |
|
axes[0, 0].set_ylabel('Lap Time (s)') |
|
axes[0, 0].set_title('Speed vs Lap Time (Anomaly Detection)') |
|
axes[0, 0].legend() |
|
axes[0, 0].grid(True, alpha=0.3) |
|
|
|
|
|
axes[0, 1].hist(normal_data['tire_temp'], bins=30, alpha=0.7, label='Normal', color='blue') |
|
axes[0, 1].hist(anomaly_data['tire_temp'], bins=30, alpha=0.7, label='Anomaly', color='red') |
|
axes[0, 1].set_xlabel('Tire Temperature (°C)') |
|
axes[0, 1].set_ylabel('Frequency') |
|
axes[0, 1].set_title('Tire Temperature Distribution') |
|
axes[0, 1].legend() |
|
axes[0, 1].grid(True, alpha=0.3) |
|
|
|
|
|
axes[1, 0].scatter(data['lap_number'], data['tire_degradation'], alpha=0.6, color='green') |
|
axes[1, 0].set_xlabel('Lap Number') |
|
axes[1, 0].set_ylabel('Tire Performance (%)') |
|
axes[1, 0].set_title('Tire Degradation Over Race') |
|
axes[1, 0].grid(True, alpha=0.3) |
|
|
|
|
|
axes[1, 1].scatter(data['lap_number'], data['fuel_remaining'], alpha=0.6, color='orange') |
|
axes[1, 1].set_xlabel('Lap Number') |
|
axes[1, 1].set_ylabel('Fuel Remaining (%)') |
|
axes[1, 1].set_title('Fuel Consumption Over Race') |
|
axes[1, 1].grid(True, alpha=0.3) |
|
|
|
plt.tight_layout() |
|
return fig |
|
|
|
|
|
analyzer = F1TelemetryAnalyzer() |
|
|
|
def load_file(file): |
|
"""Load and preview uploaded file""" |
|
if file is None: |
|
return None, "No file uploaded", {}, "" |
|
|
|
df, message = analyzer.parse_uploaded_file(file) |
|
if df is None: |
|
return None, message, {}, "" |
|
|
|
|
|
suggestions = analyzer.get_column_suggestions(df) |
|
|
|
|
|
preview = df.head(10).to_string() |
|
|
|
return df, message, suggestions, f"Data Preview (first 10 rows):\n{preview}" |
|
|
|
def analyze_uploaded_data(df, speed_col, throttle_col, brake_col, tire_temp_col, engine_temp_col, |
|
lap_time_col, lap_num_col, tire_deg_col, fuel_col): |
|
"""Analyze uploaded telemetry data""" |
|
if df is None: |
|
return None, "No data loaded. Please upload a file first." |
|
|
|
|
|
column_mapping = { |
|
'speed': speed_col, |
|
'throttle': throttle_col, |
|
'brake_pressure': brake_col, |
|
'tire_temp': tire_temp_col, |
|
'engine_temp': engine_temp_col, |
|
'lap_time': lap_time_col, |
|
'lap_number': lap_num_col, |
|
'tire_degradation': tire_deg_col, |
|
'fuel_remaining': fuel_col |
|
} |
|
|
|
|
|
is_valid, validation_message = analyzer.validate_mapped_data(df, column_mapping) |
|
if not is_valid: |
|
return None, validation_message |
|
|
|
|
|
try: |
|
processed_data = analyzer.process_uploaded_data(df, column_mapping) |
|
analyzer.current_data = processed_data |
|
|
|
|
|
processed_data = analyzer.detect_anomalies(processed_data) |
|
|
|
|
|
tire_r2, fuel_r2 = analyzer.train_predictive_models(processed_data) |
|
|
|
|
|
fig = analyzer.create_visualizations(processed_data) |
|
|
|
|
|
total_samples = len(processed_data) |
|
anomalies_detected = len(processed_data[processed_data['anomaly'] == -1]) |
|
anomaly_percentage = (anomalies_detected / total_samples) * 100 |
|
|
|
report = f""" |
|
## F1 Telemetry Analysis Report (Uploaded Data) |
|
|
|
**Data Summary:** |
|
- Total samples analyzed: {total_samples} |
|
- Anomalies detected: {anomalies_detected} ({anomaly_percentage:.1f}%) |
|
|
|
**Model Performance:** |
|
- Tire Degradation Model R²: {tire_r2:.3f} |
|
- Fuel Consumption Model R²: {fuel_r2:.3f} |
|
|
|
**Key Insights:** |
|
- Average lap time: {processed_data['lap_time'].mean():.1f} seconds |
|
- Average speed: {processed_data['speed'].mean():.1f} km/h |
|
- Maximum tire temperature: {processed_data['tire_temp'].max():.1f}°C |
|
- Minimum tire performance: {processed_data['tire_degradation'].min():.1f}% |
|
|
|
**Anomaly Analysis:** |
|
- Anomalies primarily detected in: Low speed conditions and high tire temperatures |
|
- Recommended action: Investigate cooling systems and potential mechanical issues |
|
""" |
|
|
|
return fig, report |
|
|
|
except Exception as e: |
|
return None, f"Error processing data: {str(e)}" |
|
|
|
def analyze_sample_data(): |
|
"""Analyze sample telemetry data""" |
|
|
|
data = analyzer.generate_sample_data(1000) |
|
analyzer.current_data = data |
|
|
|
|
|
data = analyzer.detect_anomalies(data) |
|
|
|
|
|
tire_r2, fuel_r2 = analyzer.train_predictive_models(data) |
|
|
|
|
|
fig = analyzer.create_visualizations(data) |
|
|
|
|
|
total_samples = len(data) |
|
anomalies_detected = len(data[data['anomaly'] == -1]) |
|
anomaly_percentage = (anomalies_detected / total_samples) * 100 |
|
|
|
report = f""" |
|
## F1 Telemetry Analysis Report (Sample Data) |
|
|
|
**Data Summary:** |
|
- Total samples analyzed: {total_samples} |
|
- Anomalies detected: {anomalies_detected} ({anomaly_percentage:.1f}%) |
|
|
|
**Model Performance:** |
|
- Tire Degradation Model R²: {tire_r2:.3f} |
|
- Fuel Consumption Model R²: {fuel_r2:.3f} |
|
|
|
**Key Insights:** |
|
- Average lap time: {data['lap_time'].mean():.1f} seconds |
|
- Average speed: {data['speed'].mean():.1f} km/h |
|
- Maximum tire temperature: {data['tire_temp'].max():.1f}°C |
|
- Minimum tire performance: {data['tire_degradation'].min():.1f}% |
|
|
|
**Anomaly Analysis:** |
|
- Anomalies primarily detected in: Low speed conditions and high tire temperatures |
|
- Recommended action: Investigate cooling systems and potential mechanical issues |
|
""" |
|
|
|
return fig, report |
|
|
|
def predict_telemetry(lap_number, speed, throttle, tire_temp, engine_temp): |
|
"""Predict tire and fuel performance""" |
|
tire_pred, fuel_pred = analyzer.predict_performance(lap_number, speed, throttle, tire_temp, engine_temp) |
|
return tire_pred, fuel_pred |
|
|
|
def update_column_dropdowns(df, suggestions): |
|
"""Update dropdown options based on loaded data""" |
|
if df is None: |
|
return [gr.Dropdown(choices=[], value=None)] * 9 |
|
|
|
columns = [""] + list(df.columns) |
|
|
|
return [ |
|
gr.Dropdown(choices=columns, value=suggestions.get('speed', ''), label="Speed Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('throttle', ''), label="Throttle Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('brake_pressure', ''), label="Brake Pressure Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('tire_temp', ''), label="Tire Temperature Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('engine_temp', ''), label="Engine Temperature Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('lap_time', ''), label="Lap Time Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('lap_number', ''), label="Lap Number Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('tire_degradation', ''), label="Tire Degradation Column"), |
|
gr.Dropdown(choices=columns, value=suggestions.get('fuel_remaining', ''), label="Fuel Remaining Column") |
|
] |
|
|
|
|
|
with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as demo: |
|
gr.Markdown("# F1 Telemetry Data Analyzer") |
|
gr.Markdown("Advanced AI-powered analysis of Formula 1 telemetry data with anomaly detection and predictive modeling.") |
|
gr.Markdown("**Choose your data source:** Upload your own telemetry files or generate synthetic data for testing!") |
|
|
|
|
|
uploaded_df = gr.State(None) |
|
|
|
with gr.Tab("Upload Real Data"): |
|
gr.Markdown("### Upload your own telemetry data files") |
|
gr.Markdown("**Supported formats:** CSV, Excel (.xlsx/.xls), JSON | **Perfect for:** Real racing data, simulator exports, custom datasets") |
|
|
|
with gr.Row(): |
|
file_upload = gr.File( |
|
label="Upload Telemetry Data", |
|
file_types=[".csv", ".xlsx", ".xls", ".json"], |
|
type="filepath" |
|
) |
|
|
|
load_status = gr.Textbox(label="Load Status", interactive=False) |
|
data_preview = gr.Textbox(label="Data Preview", lines=10, interactive=False) |
|
|
|
gr.Markdown("### Map Your Data Columns") |
|
gr.Markdown("**Required columns** (marked with *): Speed*, Throttle*, Brake Pressure*, Tire Temperature*, Engine Temperature*") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
speed_col = gr.Dropdown(label="Speed Column *", choices=[], value="") |
|
throttle_col = gr.Dropdown(label="Throttle Column *", choices=[], value="") |
|
brake_col = gr.Dropdown(label="Brake Pressure Column *", choices=[], value="") |
|
tire_temp_col = gr.Dropdown(label="Tire Temperature Column *", choices=[], value="") |
|
engine_temp_col = gr.Dropdown(label="Engine Temperature Column *", choices=[], value="") |
|
|
|
with gr.Column(): |
|
lap_time_col = gr.Dropdown(label="Lap Time Column", choices=[], value="") |
|
lap_num_col = gr.Dropdown(label="Lap Number Column", choices=[], value="") |
|
tire_deg_col = gr.Dropdown(label="Tire Degradation Column", choices=[], value="") |
|
fuel_col = gr.Dropdown(label="Fuel Remaining Column", choices=[], value="") |
|
|
|
analyze_uploaded_btn = gr.Button("🔍 Analyze Uploaded Data", variant="primary") |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=2): |
|
uploaded_plot_output = gr.Plot(label="Telemetry Visualizations") |
|
with gr.Column(scale=1): |
|
uploaded_report_output = gr.Markdown(label="Analysis Report") |
|
|
|
|
|
file_upload.upload( |
|
load_file, |
|
inputs=[file_upload], |
|
outputs=[uploaded_df, load_status, gr.State(), data_preview] |
|
).then( |
|
lambda df, suggestions: update_column_dropdowns(df, suggestions), |
|
inputs=[uploaded_df, gr.State()], |
|
outputs=[speed_col, throttle_col, brake_col, tire_temp_col, engine_temp_col, |
|
lap_time_col, lap_num_col, tire_deg_col, fuel_col] |
|
) |
|
|
|
|
|
analyze_uploaded_btn.click( |
|
analyze_uploaded_data, |
|
inputs=[uploaded_df, speed_col, throttle_col, brake_col, tire_temp_col, engine_temp_col, |
|
lap_time_col, lap_num_col, tire_deg_col, fuel_col], |
|
outputs=[uploaded_plot_output, uploaded_report_output] |
|
) |
|
|
|
with gr.Tab("Sample Data Analysis"): |
|
gr.Markdown("### Generate and analyze synthetic telemetry data") |
|
gr.Markdown("**Perfect for testing and learning!** Generate realistic F1 telemetry data with built-in anomalies and patterns.") |
|
analyze_btn = gr.Button("Generate & Analyze Sample Data", variant="primary") |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=2): |
|
plot_output = gr.Plot(label="Telemetry Visualizations") |
|
with gr.Column(scale=1): |
|
report_output = gr.Markdown(label="Analysis Report") |
|
|
|
analyze_btn.click( |
|
analyze_sample_data, |
|
outputs=[plot_output, report_output] |
|
) |
|
|
|
with gr.Tab("Performance Prediction"): |
|
gr.Markdown("### Predict tire performance and fuel consumption") |
|
gr.Markdown("*Note: Run analysis first to train the models*") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
lap_input = gr.Slider(1, 60, value=10, label="Lap Number") |
|
speed_input = gr.Slider(50, 300, value=200, label="Speed (km/h)") |
|
throttle_input = gr.Slider(0, 100, value=75, label="Throttle (%)") |
|
tire_temp_input = gr.Slider(40, 120, value=80, label="Tire Temperature (°C)") |
|
engine_temp_input = gr.Slider(70, 130, value=95, label="Engine Temperature (°C)") |
|
|
|
predict_btn = gr.Button("🎯 Predict Performance", variant="secondary") |
|
|
|
with gr.Column(): |
|
tire_pred_output = gr.Textbox(label="Tire Performance Prediction") |
|
fuel_pred_output = gr.Textbox(label="Fuel Consumption Prediction") |
|
|
|
predict_btn.click( |
|
predict_telemetry, |
|
inputs=[lap_input, speed_input, throttle_input, tire_temp_input, engine_temp_input], |
|
outputs=[tire_pred_output, fuel_pred_output] |
|
) |
|
|
|
with gr.Tab("About"): |
|
gr.Markdown(""" |
|
## About This Tool |
|
|
|
This F1 Telemetry Data Analyzer demonstrates advanced AI techniques used in Formula 1 racing: |
|
|
|
**Data Upload Features:** |
|
- Support for CSV, Excel, and JSON file formats |
|
- Automatic column detection and mapping suggestions |
|
- Data validation and cleaning |
|
- Flexible data structure handling |
|
|
|
**Synthetic Data Generation:** |
|
- Generate realistic F1 telemetry data for testing |
|
- Built-in anomalies and realistic correlations |
|
- Perfect for learning and demonstration |
|
- No data upload required |
|
|
|
**Anomaly Detection:** |
|
- Uses Isolation Forest algorithm to detect unusual patterns in telemetry data |
|
- Identifies potential mechanical issues or performance anomalies |
|
- Helps engineers spot problems before they become critical |
|
|
|
**Predictive Modeling:** |
|
- Machine learning models predict tire degradation and fuel consumption |
|
- Based on real-time telemetry inputs (speed, throttle, temperatures) |
|
- Enables strategic decision-making during races |
|
|
|
**Key Features:** |
|
- Real-time telemetry processing simulation |
|
- Advanced visualization of racing data |
|
- Performance prediction for race strategy |
|
- Anomaly detection for preventive maintenance |
|
- Upload and analyze your own telemetry data |
|
|
|
**Technical Stack:** |
|
- Python with scikit-learn for ML models |
|
- Isolation Forest for anomaly detection |
|
- Linear regression for performance prediction |
|
- Matplotlib for advanced visualizations |
|
- Gradio for interactive web interface |
|
|
|
**Data Format Requirements:** |
|
- **Required columns:** Speed, Throttle, Brake Pressure, Tire Temperature, Engine Temperature |
|
- **Optional columns:** Lap Time, Lap Number, Tire Degradation, Fuel Remaining |
|
- Missing optional columns will be estimated automatically |
|
""") |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |