CCockrum commited on
Commit
696c1f9
·
verified ·
1 Parent(s): e0873b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +298 -8
app.py CHANGED
@@ -7,6 +7,8 @@ from sklearn.preprocessing import StandardScaler
7
  from sklearn.linear_model import LinearRegression
8
  from sklearn.metrics import mean_squared_error, r2_score
9
  import warnings
 
 
10
  warnings.filterwarnings('ignore')
11
 
12
  class F1TelemetryAnalyzer:
@@ -16,6 +18,9 @@ class F1TelemetryAnalyzer:
16
  self.tire_model = LinearRegression()
17
  self.fuel_model = LinearRegression()
18
  self.is_trained = False
 
 
 
19
 
20
  def generate_sample_data(self, num_samples=1000):
21
  """Generate realistic F1 telemetry data"""
@@ -62,6 +67,110 @@ class F1TelemetryAnalyzer:
62
  'fuel_remaining': fuel_remaining
63
  })
64
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  def detect_anomalies(self, data):
66
  """Detect anomalies in telemetry data"""
67
  features = ['speed', 'throttle', 'brake_pressure', 'tire_temp', 'engine_temp']
@@ -157,10 +266,98 @@ class F1TelemetryAnalyzer:
157
  # Initialize the analyzer
158
  analyzer = F1TelemetryAnalyzer()
159
 
160
- def analyze_telemetry():
161
- """Main function to run telemetry analysis"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  # Generate sample data
163
  data = analyzer.generate_sample_data(1000)
 
164
 
165
  # Detect anomalies
166
  data = analyzer.detect_anomalies(data)
@@ -177,7 +374,7 @@ def analyze_telemetry():
177
  anomaly_percentage = (anomalies_detected / total_samples) * 100
178
 
179
  report = f"""
180
- ## F1 Telemetry Analysis Report
181
 
182
  **Data Summary:**
183
  - Total samples analyzed: {total_samples}
@@ -205,14 +402,95 @@ def predict_telemetry(lap_number, speed, throttle, tire_temp, engine_temp):
205
  tire_pred, fuel_pred = analyzer.predict_performance(lap_number, speed, throttle, tire_temp, engine_temp)
206
  return tire_pred, fuel_pred
207
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
  # Create Gradio interface
209
  with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as demo:
210
  gr.Markdown("# F1 Telemetry Data Analyzer")
211
  gr.Markdown("Advanced AI-powered analysis of Formula 1 telemetry data with anomaly detection and predictive modeling.")
212
 
213
- with gr.Tab("Data Analysis"):
214
- gr.Markdown("### Generate and analyze telemetry data")
215
- analyze_btn = gr.Button("Analyze Telemetry Data", variant="primary")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
 
217
  with gr.Row():
218
  with gr.Column(scale=2):
@@ -221,13 +499,13 @@ with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as de
221
  report_output = gr.Markdown(label="Analysis Report")
222
 
223
  analyze_btn.click(
224
- analyze_telemetry,
225
  outputs=[plot_output, report_output]
226
  )
227
 
228
  with gr.Tab("Performance Prediction"):
229
  gr.Markdown("### Predict tire performance and fuel consumption")
230
- gr.Markdown("*Note: Run the analysis first to train the models*")
231
 
232
  with gr.Row():
233
  with gr.Column():
@@ -255,6 +533,12 @@ with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as de
255
 
256
  This F1 Telemetry Data Analyzer demonstrates advanced AI techniques used in Formula 1 racing:
257
 
 
 
 
 
 
 
258
  **Anomaly Detection:**
259
  - Uses Isolation Forest algorithm to detect unusual patterns in telemetry data
260
  - Identifies potential mechanical issues or performance anomalies
@@ -270,6 +554,7 @@ with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as de
270
  - Advanced visualization of racing data
271
  - Performance prediction for race strategy
272
  - Anomaly detection for preventive maintenance
 
273
 
274
  **Technical Stack:**
275
  - Python with scikit-learn for ML models
@@ -277,6 +562,11 @@ with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as de
277
  - Linear regression for performance prediction
278
  - Matplotlib for advanced visualizations
279
  - Gradio for interactive web interface
 
 
 
 
 
280
  """)
281
 
282
  if __name__ == "__main__":
 
7
  from sklearn.linear_model import LinearRegression
8
  from sklearn.metrics import mean_squared_error, r2_score
9
  import warnings
10
+ import io
11
+ import json
12
  warnings.filterwarnings('ignore')
13
 
14
  class F1TelemetryAnalyzer:
 
18
  self.tire_model = LinearRegression()
19
  self.fuel_model = LinearRegression()
20
  self.is_trained = False
21
+ self.current_data = None
22
+ self.required_columns = ['speed', 'throttle', 'brake_pressure', 'tire_temp', 'engine_temp']
23
+ self.optional_columns = ['lap_time', 'lap_number', 'tire_degradation', 'fuel_remaining']
24
 
25
  def generate_sample_data(self, num_samples=1000):
26
  """Generate realistic F1 telemetry data"""
 
67
  'fuel_remaining': fuel_remaining
68
  })
69
 
70
+ def parse_uploaded_file(self, file):
71
+ """Parse uploaded file and return DataFrame"""
72
+ try:
73
+ file_extension = file.name.split('.')[-1].lower()
74
+
75
+ if file_extension == 'csv':
76
+ df = pd.read_csv(file.name)
77
+ elif file_extension in ['xlsx', 'xls']:
78
+ df = pd.read_excel(file.name)
79
+ elif file_extension == 'json':
80
+ df = pd.read_json(file.name)
81
+ else:
82
+ return None, "Unsupported file format. Please upload CSV, Excel, or JSON files."
83
+
84
+ return df, f"File loaded successfully! Shape: {df.shape}"
85
+ except Exception as e:
86
+ return None, f"Error loading file: {str(e)}"
87
+
88
+ def get_column_suggestions(self, df):
89
+ """Suggest column mappings based on common telemetry column names"""
90
+ suggestions = {}
91
+ column_names = df.columns.str.lower()
92
+
93
+ # Common mapping patterns
94
+ mapping_patterns = {
95
+ 'speed': ['speed', 'velocity', 'spd', 'v'],
96
+ 'throttle': ['throttle', 'thr', 'accelerator', 'gas'],
97
+ 'brake_pressure': ['brake', 'brk', 'brake_pressure', 'brake_force'],
98
+ 'tire_temp': ['tire_temp', 'tyre_temp', 'tire_temperature', 'tyre_temperature', 'temp_tire'],
99
+ 'engine_temp': ['engine_temp', 'engine_temperature', 'water_temp', 'coolant_temp'],
100
+ 'lap_time': ['lap_time', 'laptime', 'time', 'sector_time'],
101
+ 'lap_number': ['lap', 'lap_number', 'lap_num', 'lap_count'],
102
+ 'tire_degradation': ['tire_deg', 'tyre_deg', 'tire_wear', 'tyre_wear'],
103
+ 'fuel_remaining': ['fuel', 'fuel_remaining', 'fuel_level', 'fuel_load']
104
+ }
105
+
106
+ for telemetry_field, patterns in mapping_patterns.items():
107
+ for pattern in patterns:
108
+ matches = column_names[column_names.str.contains(pattern, na=False)]
109
+ if len(matches) > 0:
110
+ suggestions[telemetry_field] = df.columns[matches.index[0]]
111
+ break
112
+
113
+ return suggestions
114
+
115
+ def validate_mapped_data(self, df, column_mapping):
116
+ """Validate that mapped data meets requirements"""
117
+ missing_required = []
118
+ for col in self.required_columns:
119
+ if col not in column_mapping or column_mapping[col] is None:
120
+ missing_required.append(col)
121
+
122
+ if missing_required:
123
+ return False, f"Missing required columns: {', '.join(missing_required)}"
124
+
125
+ # Check if mapped columns exist in DataFrame
126
+ for telemetry_col, df_col in column_mapping.items():
127
+ if df_col and df_col not in df.columns:
128
+ return False, f"Column '{df_col}' not found in uploaded data"
129
+
130
+ return True, "Data validation successful"
131
+
132
+ def process_uploaded_data(self, df, column_mapping):
133
+ """Process uploaded data with column mapping"""
134
+ processed_df = pd.DataFrame()
135
+
136
+ # Map columns
137
+ for telemetry_col, df_col in column_mapping.items():
138
+ if df_col and df_col in df.columns:
139
+ processed_df[telemetry_col] = df[df_col]
140
+
141
+ # Fill missing optional columns with defaults or calculated values
142
+ if 'lap_time' not in processed_df.columns:
143
+ processed_df['lap_time'] = np.random.normal(90, 5, len(processed_df))
144
+
145
+ if 'lap_number' not in processed_df.columns:
146
+ processed_df['lap_number'] = range(1, len(processed_df) + 1)
147
+
148
+ if 'tire_degradation' not in processed_df.columns:
149
+ # Estimate tire degradation based on available data
150
+ if 'lap_number' in processed_df.columns:
151
+ processed_df['tire_degradation'] = 100 - (processed_df['lap_number'] * 0.8)
152
+ else:
153
+ processed_df['tire_degradation'] = np.random.uniform(70, 100, len(processed_df))
154
+
155
+ if 'fuel_remaining' not in processed_df.columns:
156
+ # Estimate fuel consumption based on available data
157
+ if 'lap_number' in processed_df.columns:
158
+ processed_df['fuel_remaining'] = 100 - (processed_df['lap_number'] * 1.5)
159
+ else:
160
+ processed_df['fuel_remaining'] = np.random.uniform(50, 100, len(processed_df))
161
+
162
+ # Clean data
163
+ processed_df = processed_df.dropna()
164
+
165
+ # Clip values to reasonable ranges
166
+ processed_df['speed'] = np.clip(processed_df['speed'], 0, 400)
167
+ processed_df['throttle'] = np.clip(processed_df['throttle'], 0, 100)
168
+ processed_df['brake_pressure'] = np.clip(processed_df['brake_pressure'], 0, 100)
169
+ processed_df['tire_temp'] = np.clip(processed_df['tire_temp'], 20, 200)
170
+ processed_df['engine_temp'] = np.clip(processed_df['engine_temp'], 50, 150)
171
+
172
+ return processed_df
173
+
174
  def detect_anomalies(self, data):
175
  """Detect anomalies in telemetry data"""
176
  features = ['speed', 'throttle', 'brake_pressure', 'tire_temp', 'engine_temp']
 
266
  # Initialize the analyzer
267
  analyzer = F1TelemetryAnalyzer()
268
 
269
+ def load_file(file):
270
+ """Load and preview uploaded file"""
271
+ if file is None:
272
+ return None, "No file uploaded", {}, ""
273
+
274
+ df, message = analyzer.parse_uploaded_file(file)
275
+ if df is None:
276
+ return None, message, {}, ""
277
+
278
+ # Get column suggestions
279
+ suggestions = analyzer.get_column_suggestions(df)
280
+
281
+ # Create preview
282
+ preview = df.head(10).to_string()
283
+
284
+ return df, message, suggestions, f"Data Preview (first 10 rows):\n{preview}"
285
+
286
+ def analyze_uploaded_data(df, speed_col, throttle_col, brake_col, tire_temp_col, engine_temp_col,
287
+ lap_time_col, lap_num_col, tire_deg_col, fuel_col):
288
+ """Analyze uploaded telemetry data"""
289
+ if df is None:
290
+ return None, "No data loaded. Please upload a file first."
291
+
292
+ # Create column mapping
293
+ column_mapping = {
294
+ 'speed': speed_col,
295
+ 'throttle': throttle_col,
296
+ 'brake_pressure': brake_col,
297
+ 'tire_temp': tire_temp_col,
298
+ 'engine_temp': engine_temp_col,
299
+ 'lap_time': lap_time_col,
300
+ 'lap_number': lap_num_col,
301
+ 'tire_degradation': tire_deg_col,
302
+ 'fuel_remaining': fuel_col
303
+ }
304
+
305
+ # Validate mapping
306
+ is_valid, validation_message = analyzer.validate_mapped_data(df, column_mapping)
307
+ if not is_valid:
308
+ return None, validation_message
309
+
310
+ # Process data
311
+ try:
312
+ processed_data = analyzer.process_uploaded_data(df, column_mapping)
313
+ analyzer.current_data = processed_data
314
+
315
+ # Detect anomalies
316
+ processed_data = analyzer.detect_anomalies(processed_data)
317
+
318
+ # Train models
319
+ tire_r2, fuel_r2 = analyzer.train_predictive_models(processed_data)
320
+
321
+ # Create visualizations
322
+ fig = analyzer.create_visualizations(processed_data)
323
+
324
+ # Generate report
325
+ total_samples = len(processed_data)
326
+ anomalies_detected = len(processed_data[processed_data['anomaly'] == -1])
327
+ anomaly_percentage = (anomalies_detected / total_samples) * 100
328
+
329
+ report = f"""
330
+ ## F1 Telemetry Analysis Report (Uploaded Data)
331
+
332
+ **Data Summary:**
333
+ - Total samples analyzed: {total_samples}
334
+ - Anomalies detected: {anomalies_detected} ({anomaly_percentage:.1f}%)
335
+
336
+ **Model Performance:**
337
+ - Tire Degradation Model R²: {tire_r2:.3f}
338
+ - Fuel Consumption Model R²: {fuel_r2:.3f}
339
+
340
+ **Key Insights:**
341
+ - Average lap time: {processed_data['lap_time'].mean():.1f} seconds
342
+ - Average speed: {processed_data['speed'].mean():.1f} km/h
343
+ - Maximum tire temperature: {processed_data['tire_temp'].max():.1f}°C
344
+ - Minimum tire performance: {processed_data['tire_degradation'].min():.1f}%
345
+
346
+ **Anomaly Analysis:**
347
+ - Anomalies primarily detected in: Low speed conditions and high tire temperatures
348
+ - Recommended action: Investigate cooling systems and potential mechanical issues
349
+ """
350
+
351
+ return fig, report
352
+
353
+ except Exception as e:
354
+ return None, f"Error processing data: {str(e)}"
355
+
356
+ def analyze_sample_data():
357
+ """Analyze sample telemetry data"""
358
  # Generate sample data
359
  data = analyzer.generate_sample_data(1000)
360
+ analyzer.current_data = data
361
 
362
  # Detect anomalies
363
  data = analyzer.detect_anomalies(data)
 
374
  anomaly_percentage = (anomalies_detected / total_samples) * 100
375
 
376
  report = f"""
377
+ ## F1 Telemetry Analysis Report (Sample Data)
378
 
379
  **Data Summary:**
380
  - Total samples analyzed: {total_samples}
 
402
  tire_pred, fuel_pred = analyzer.predict_performance(lap_number, speed, throttle, tire_temp, engine_temp)
403
  return tire_pred, fuel_pred
404
 
405
+ def update_column_dropdowns(df, suggestions):
406
+ """Update dropdown options based on loaded data"""
407
+ if df is None:
408
+ return [gr.Dropdown(choices=[], value=None)] * 9
409
+
410
+ columns = [""] + list(df.columns)
411
+
412
+ return [
413
+ gr.Dropdown(choices=columns, value=suggestions.get('speed', ''), label="Speed Column"),
414
+ gr.Dropdown(choices=columns, value=suggestions.get('throttle', ''), label="Throttle Column"),
415
+ gr.Dropdown(choices=columns, value=suggestions.get('brake_pressure', ''), label="Brake Pressure Column"),
416
+ gr.Dropdown(choices=columns, value=suggestions.get('tire_temp', ''), label="Tire Temperature Column"),
417
+ gr.Dropdown(choices=columns, value=suggestions.get('engine_temp', ''), label="Engine Temperature Column"),
418
+ gr.Dropdown(choices=columns, value=suggestions.get('lap_time', ''), label="Lap Time Column"),
419
+ gr.Dropdown(choices=columns, value=suggestions.get('lap_number', ''), label="Lap Number Column"),
420
+ gr.Dropdown(choices=columns, value=suggestions.get('tire_degradation', ''), label="Tire Degradation Column"),
421
+ gr.Dropdown(choices=columns, value=suggestions.get('fuel_remaining', ''), label="Fuel Remaining Column")
422
+ ]
423
+
424
  # Create Gradio interface
425
  with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as demo:
426
  gr.Markdown("# F1 Telemetry Data Analyzer")
427
  gr.Markdown("Advanced AI-powered analysis of Formula 1 telemetry data with anomaly detection and predictive modeling.")
428
 
429
+ # Store dataframe in state
430
+ uploaded_df = gr.State(None)
431
+
432
+ with gr.Tab("Upload Data"):
433
+ gr.Markdown("### Upload your telemetry data files")
434
+ gr.Markdown("**Supported formats:** CSV, Excel (.xlsx/.xls), JSON")
435
+
436
+ with gr.Row():
437
+ file_upload = gr.File(
438
+ label="Upload Telemetry Data",
439
+ file_types=[".csv", ".xlsx", ".xls", ".json"],
440
+ type="filepath"
441
+ )
442
+
443
+ load_status = gr.Textbox(label="Load Status", interactive=False)
444
+ data_preview = gr.Textbox(label="Data Preview", lines=10, interactive=False)
445
+
446
+ gr.Markdown("### Map Your Data Columns")
447
+ gr.Markdown("**Required columns** (marked with *): Speed*, Throttle*, Brake Pressure*, Tire Temperature*, Engine Temperature*")
448
+
449
+ with gr.Row():
450
+ with gr.Column():
451
+ speed_col = gr.Dropdown(label="Speed Column *", choices=[], value="")
452
+ throttle_col = gr.Dropdown(label="Throttle Column *", choices=[], value="")
453
+ brake_col = gr.Dropdown(label="Brake Pressure Column *", choices=[], value="")
454
+ tire_temp_col = gr.Dropdown(label="Tire Temperature Column *", choices=[], value="")
455
+ engine_temp_col = gr.Dropdown(label="Engine Temperature Column *", choices=[], value="")
456
+
457
+ with gr.Column():
458
+ lap_time_col = gr.Dropdown(label="Lap Time Column", choices=[], value="")
459
+ lap_num_col = gr.Dropdown(label="Lap Number Column", choices=[], value="")
460
+ tire_deg_col = gr.Dropdown(label="Tire Degradation Column", choices=[], value="")
461
+ fuel_col = gr.Dropdown(label="Fuel Remaining Column", choices=[], value="")
462
+
463
+ analyze_uploaded_btn = gr.Button("🔍 Analyze Uploaded Data", variant="primary")
464
+
465
+ with gr.Row():
466
+ with gr.Column(scale=2):
467
+ uploaded_plot_output = gr.Plot(label="Telemetry Visualizations")
468
+ with gr.Column(scale=1):
469
+ uploaded_report_output = gr.Markdown(label="Analysis Report")
470
+
471
+ # File upload event
472
+ file_upload.upload(
473
+ load_file,
474
+ inputs=[file_upload],
475
+ outputs=[uploaded_df, load_status, gr.State(), data_preview]
476
+ ).then(
477
+ lambda df, suggestions: update_column_dropdowns(df, suggestions),
478
+ inputs=[uploaded_df, gr.State()],
479
+ outputs=[speed_col, throttle_col, brake_col, tire_temp_col, engine_temp_col,
480
+ lap_time_col, lap_num_col, tire_deg_col, fuel_col]
481
+ )
482
+
483
+ # Analyze uploaded data
484
+ analyze_uploaded_btn.click(
485
+ analyze_uploaded_data,
486
+ inputs=[uploaded_df, speed_col, throttle_col, brake_col, tire_temp_col, engine_temp_col,
487
+ lap_time_col, lap_num_col, tire_deg_col, fuel_col],
488
+ outputs=[uploaded_plot_output, uploaded_report_output]
489
+ )
490
+
491
+ with gr.Tab("Sample Data Analysis"):
492
+ gr.Markdown("### Generate and analyze sample telemetry data")
493
+ analyze_btn = gr.Button("Analyze Sample Data", variant="primary")
494
 
495
  with gr.Row():
496
  with gr.Column(scale=2):
 
499
  report_output = gr.Markdown(label="Analysis Report")
500
 
501
  analyze_btn.click(
502
+ analyze_sample_data,
503
  outputs=[plot_output, report_output]
504
  )
505
 
506
  with gr.Tab("Performance Prediction"):
507
  gr.Markdown("### Predict tire performance and fuel consumption")
508
+ gr.Markdown("*Note: Run analysis first to train the models*")
509
 
510
  with gr.Row():
511
  with gr.Column():
 
533
 
534
  This F1 Telemetry Data Analyzer demonstrates advanced AI techniques used in Formula 1 racing:
535
 
536
+ **Data Upload Features:**
537
+ - Support for CSV, Excel, and JSON file formats
538
+ - Automatic column detection and mapping suggestions
539
+ - Data validation and cleaning
540
+ - Flexible data structure handling
541
+
542
  **Anomaly Detection:**
543
  - Uses Isolation Forest algorithm to detect unusual patterns in telemetry data
544
  - Identifies potential mechanical issues or performance anomalies
 
554
  - Advanced visualization of racing data
555
  - Performance prediction for race strategy
556
  - Anomaly detection for preventive maintenance
557
+ - Upload and analyze your own telemetry data
558
 
559
  **Technical Stack:**
560
  - Python with scikit-learn for ML models
 
562
  - Linear regression for performance prediction
563
  - Matplotlib for advanced visualizations
564
  - Gradio for interactive web interface
565
+
566
+ **Data Format Requirements:**
567
+ - **Required columns:** Speed, Throttle, Brake Pressure, Tire Temperature, Engine Temperature
568
+ - **Optional columns:** Lap Time, Lap Number, Tire Degradation, Fuel Remaining
569
+ - Missing optional columns will be estimated automatically
570
  """)
571
 
572
  if __name__ == "__main__":