File size: 11,795 Bytes
6e2e787
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
import gradio as gr
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.ensemble import IsolationForest
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, r2_score
import warnings
warnings.filterwarnings('ignore')

class F1TelemetryAnalyzer:
    def __init__(self):
        self.scaler = StandardScaler()
        self.anomaly_detector = IsolationForest(contamination=0.1, random_state=42)
        self.tire_model = LinearRegression()
        self.fuel_model = LinearRegression()
        self.is_trained = False
        
    def generate_sample_data(self, num_samples=1000):
        """Generate realistic F1 telemetry data"""
        np.random.seed(42)
        
        # Base parameters
        lap_time = np.random.normal(90, 5, num_samples)  # seconds
        speed = np.random.normal(200, 30, num_samples)  # km/h
        throttle = np.random.uniform(0, 100, num_samples)  # %
        brake_pressure = np.random.uniform(0, 100, num_samples)  # %
        tire_temp = np.random.normal(80, 15, num_samples)  # °C
        engine_temp = np.random.normal(95, 10, num_samples)  # °C
        
        # Introduce some realistic correlations
        speed = np.clip(speed + throttle * 0.5 - brake_pressure * 0.3, 50, 300)
        tire_temp = np.clip(tire_temp + speed * 0.1 + throttle * 0.2, 40, 120)
        engine_temp = np.clip(engine_temp + throttle * 0.15 + speed * 0.05, 70, 130)
        
        # Lap number for degradation modeling
        lap_number = np.random.randint(1, 60, num_samples)
        
        # Tire degradation (performance decreases over laps)
        tire_degradation = 100 - (lap_number * 0.8 + np.random.normal(0, 2, num_samples))
        tire_degradation = np.clip(tire_degradation, 60, 100)
        
        # Fuel consumption (decreases with laps)
        fuel_remaining = 100 - (lap_number * 1.5 + np.random.normal(0, 3, num_samples))
        fuel_remaining = np.clip(fuel_remaining, 0, 100)
        
        # Add some anomalies
        anomaly_indices = np.random.choice(num_samples, size=int(num_samples * 0.05), replace=False)
        speed[anomaly_indices] = np.random.uniform(20, 50, len(anomaly_indices))  # Very slow speeds
        tire_temp[anomaly_indices] = np.random.uniform(130, 150, len(anomaly_indices))  # Overheating
        
        return pd.DataFrame({
            'lap_time': lap_time,
            'speed': speed,
            'throttle': throttle,
            'brake_pressure': brake_pressure,
            'tire_temp': tire_temp,
            'engine_temp': engine_temp,
            'lap_number': lap_number,
            'tire_degradation': tire_degradation,
            'fuel_remaining': fuel_remaining
        })
    
    def detect_anomalies(self, data):
        """Detect anomalies in telemetry data"""
        features = ['speed', 'throttle', 'brake_pressure', 'tire_temp', 'engine_temp']
        X = data[features]
        
        # Fit and predict anomalies
        anomalies = self.anomaly_detector.fit_predict(X)
        data['anomaly'] = anomalies
        
        return data
    
    def train_predictive_models(self, data):
        """Train tire degradation and fuel consumption models"""
        # Prepare features for prediction
        features = ['lap_number', 'speed', 'throttle', 'tire_temp', 'engine_temp']
        X = data[features]
        
        # Train tire degradation model
        y_tire = data['tire_degradation']
        self.tire_model.fit(X, y_tire)
        
        # Train fuel consumption model
        y_fuel = data['fuel_remaining']
        self.fuel_model.fit(X, y_fuel)
        
        self.is_trained = True
        
        # Calculate model performance
        tire_pred = self.tire_model.predict(X)
        fuel_pred = self.fuel_model.predict(X)
        
        tire_r2 = r2_score(y_tire, tire_pred)
        fuel_r2 = r2_score(y_fuel, fuel_pred)
        
        return tire_r2, fuel_r2
    
    def predict_performance(self, lap_number, speed, throttle, tire_temp, engine_temp):
        """Predict tire degradation and fuel consumption"""
        if not self.is_trained:
            return "Model not trained yet!", ""
        
        features = np.array([[lap_number, speed, throttle, tire_temp, engine_temp]])
        
        tire_pred = self.tire_model.predict(features)[0]
        fuel_pred = self.fuel_model.predict(features)[0]
        
        return f"Predicted Tire Performance: {tire_pred:.1f}%", f"Predicted Fuel Remaining: {fuel_pred:.1f}%"
    
    def create_visualizations(self, data):
        """Create telemetry visualizations"""
        fig, axes = plt.subplots(2, 2, figsize=(15, 12))
        
        # Speed vs Lap Time
        normal_data = data[data['anomaly'] == 1]
        anomaly_data = data[data['anomaly'] == -1]
        
        axes[0, 0].scatter(normal_data['speed'], normal_data['lap_time'], 
                          alpha=0.6, label='Normal', color='blue')
        axes[0, 0].scatter(anomaly_data['speed'], anomaly_data['lap_time'], 
                          alpha=0.8, label='Anomaly', color='red')
        axes[0, 0].set_xlabel('Speed (km/h)')
        axes[0, 0].set_ylabel('Lap Time (s)')
        axes[0, 0].set_title('Speed vs Lap Time (Anomaly Detection)')
        axes[0, 0].legend()
        axes[0, 0].grid(True, alpha=0.3)
        
        # Tire Temperature Distribution
        axes[0, 1].hist(normal_data['tire_temp'], bins=30, alpha=0.7, label='Normal', color='blue')
        axes[0, 1].hist(anomaly_data['tire_temp'], bins=30, alpha=0.7, label='Anomaly', color='red')
        axes[0, 1].set_xlabel('Tire Temperature (°C)')
        axes[0, 1].set_ylabel('Frequency')
        axes[0, 1].set_title('Tire Temperature Distribution')
        axes[0, 1].legend()
        axes[0, 1].grid(True, alpha=0.3)
        
        # Tire Degradation over Laps
        axes[1, 0].scatter(data['lap_number'], data['tire_degradation'], alpha=0.6, color='green')
        axes[1, 0].set_xlabel('Lap Number')
        axes[1, 0].set_ylabel('Tire Performance (%)')
        axes[1, 0].set_title('Tire Degradation Over Race')
        axes[1, 0].grid(True, alpha=0.3)
        
        # Fuel Consumption
        axes[1, 1].scatter(data['lap_number'], data['fuel_remaining'], alpha=0.6, color='orange')
        axes[1, 1].set_xlabel('Lap Number')
        axes[1, 1].set_ylabel('Fuel Remaining (%)')
        axes[1, 1].set_title('Fuel Consumption Over Race')
        axes[1, 1].grid(True, alpha=0.3)
        
        plt.tight_layout()
        return fig

# Initialize the analyzer
analyzer = F1TelemetryAnalyzer()

def analyze_telemetry():
    """Main function to run telemetry analysis"""
    # Generate sample data
    data = analyzer.generate_sample_data(1000)
    
    # Detect anomalies
    data = analyzer.detect_anomalies(data)
    
    # Train predictive models
    tire_r2, fuel_r2 = analyzer.train_predictive_models(data)
    
    # Create visualizations
    fig = analyzer.create_visualizations(data)
    
    # Generate summary report
    total_samples = len(data)
    anomalies_detected = len(data[data['anomaly'] == -1])
    anomaly_percentage = (anomalies_detected / total_samples) * 100
    
    report = f"""
    ## F1 Telemetry Analysis Report
    
    **Data Summary:**
    - Total samples analyzed: {total_samples}
    - Anomalies detected: {anomalies_detected} ({anomaly_percentage:.1f}%)
    
    **Model Performance:**
    - Tire Degradation Model R²: {tire_r2:.3f}
    - Fuel Consumption Model R²: {fuel_r2:.3f}
    
    **Key Insights:**
    - Average lap time: {data['lap_time'].mean():.1f} seconds
    - Average speed: {data['speed'].mean():.1f} km/h
    - Maximum tire temperature: {data['tire_temp'].max():.1f}°C
    - Minimum tire performance: {data['tire_degradation'].min():.1f}%
    
    **Anomaly Analysis:**
    - Anomalies primarily detected in: Low speed conditions and high tire temperatures
    - Recommended action: Investigate cooling systems and potential mechanical issues
    """
    
    return fig, report

def predict_telemetry(lap_number, speed, throttle, tire_temp, engine_temp):
    """Predict tire and fuel performance"""
    tire_pred, fuel_pred = analyzer.predict_performance(lap_number, speed, throttle, tire_temp, engine_temp)
    return tire_pred, fuel_pred

# Create Gradio interface
with gr.Blocks(title="F1 Telemetry Data Analyzer", theme=gr.themes.Soft()) as demo:
    gr.Markdown("# 🏎️ F1 Telemetry Data Analyzer")
    gr.Markdown("Advanced AI-powered analysis of Formula 1 telemetry data with anomaly detection and predictive modeling.")
    
    with gr.Tab("📊 Data Analysis"):
        gr.Markdown("### Generate and analyze telemetry data")
        analyze_btn = gr.Button("🔍 Analyze Telemetry Data", variant="primary")
        
        with gr.Row():
            with gr.Column(scale=2):
                plot_output = gr.Plot(label="Telemetry Visualizations")
            with gr.Column(scale=1):
                report_output = gr.Markdown(label="Analysis Report")
        
        analyze_btn.click(
            analyze_telemetry,
            outputs=[plot_output, report_output]
        )
    
    with gr.Tab("🔮 Performance Prediction"):
        gr.Markdown("### Predict tire performance and fuel consumption")
        gr.Markdown("*Note: Run the analysis first to train the models*")
        
        with gr.Row():
            with gr.Column():
                lap_input = gr.Slider(1, 60, value=10, label="Lap Number")
                speed_input = gr.Slider(50, 300, value=200, label="Speed (km/h)")
                throttle_input = gr.Slider(0, 100, value=75, label="Throttle (%)")
                tire_temp_input = gr.Slider(40, 120, value=80, label="Tire Temperature (°C)")
                engine_temp_input = gr.Slider(70, 130, value=95, label="Engine Temperature (°C)")
                
                predict_btn = gr.Button("🎯 Predict Performance", variant="secondary")
            
            with gr.Column():
                tire_pred_output = gr.Textbox(label="Tire Performance Prediction")
                fuel_pred_output = gr.Textbox(label="Fuel Consumption Prediction")
        
        predict_btn.click(
            predict_telemetry,
            inputs=[lap_input, speed_input, throttle_input, tire_temp_input, engine_temp_input],
            outputs=[tire_pred_output, fuel_pred_output]
        )
    
    with gr.Tab("ℹ️ About"):
        gr.Markdown("""
        ## About This Tool
        
        This F1 Telemetry Data Analyzer demonstrates advanced AI techniques used in Formula 1 racing:
        
        **🔍 Anomaly Detection:**
        - Uses Isolation Forest algorithm to detect unusual patterns in telemetry data
        - Identifies potential mechanical issues or performance anomalies
        - Helps engineers spot problems before they become critical
        
        **📈 Predictive Modeling:**
        - Machine learning models predict tire degradation and fuel consumption
        - Based on real-time telemetry inputs (speed, throttle, temperatures)
        - Enables strategic decision-making during races
        
        **🎯 Key Features:**
        - Real-time telemetry processing simulation
        - Advanced visualization of racing data
        - Performance prediction for race strategy
        - Anomaly detection for preventive maintenance
        
        **🏗️ Technical Stack:**
        - Python with scikit-learn for ML models
        - Isolation Forest for anomaly detection
        - Linear regression for performance prediction
        - Matplotlib for advanced visualizations
        - Gradio for interactive web interface
        """)

if __name__ == "__main__":
    demo.launch()