|
from flask import Flask, render_template, request, jsonify, send_file |
|
import threading |
|
import time |
|
import os |
|
import json |
|
from datetime import datetime |
|
from google import genai |
|
from pydantic import BaseModel, Field |
|
import enum |
|
import uuid |
|
from typing import List |
|
|
|
app = Flask(__name__) |
|
|
|
|
|
GOOGLE_API_KEY = "AIzaSyAMYpF67aqFnWDJESWOx1dC-w3sEU29VcM" |
|
MODEL_ID = "gemini-2.0-flash" |
|
UPLOAD_FOLDER = 'uploads' |
|
RESULTS_FOLDER = 'results' |
|
|
|
|
|
os.makedirs(UPLOAD_FOLDER, exist_ok=True) |
|
os.makedirs(RESULTS_FOLDER, exist_ok=True) |
|
|
|
|
|
class TranslationPair(BaseModel): |
|
fang: str = Field(description="Phrase en langue fang") |
|
francais: str = Field(description="Traduction française de la phrase") |
|
|
|
class SyntheticDataResponse(BaseModel): |
|
request_number: int = Field(description="Numéro de la requête") |
|
generated_pairs: List[TranslationPair] = Field(description="Liste des paires de traduction générées") |
|
timestamp: str = Field(description="Horodatage de la génération") |
|
|
|
class Config: |
|
|
|
fields = { |
|
"request_number": {"title": "Numéro de requête"}, |
|
"generated_pairs": {"title": "Paires générées"}, |
|
"timestamp": {"title": "Horodatage"} |
|
} |
|
|
|
|
|
class TaskManager: |
|
def __init__(self): |
|
self.tasks = {} |
|
|
|
def create_task(self, task_id): |
|
self.tasks[task_id] = { |
|
'status': 'running', |
|
'progress': 0, |
|
'total': 470, |
|
'results_file': f'results_{task_id}.json', |
|
'start_time': datetime.now(), |
|
'errors': [], |
|
'last_update': datetime.now(), |
|
'all_data': [] |
|
} |
|
|
|
def update_progress(self, task_id, progress, data=None): |
|
if task_id in self.tasks: |
|
self.tasks[task_id]['progress'] = progress |
|
self.tasks[task_id]['last_update'] = datetime.now() |
|
if data: |
|
self.tasks[task_id]['all_data'].append(data) |
|
|
|
def add_error(self, task_id, error): |
|
if task_id in self.tasks: |
|
self.tasks[task_id]['errors'].append(error) |
|
|
|
def complete_task(self, task_id): |
|
if task_id in self.tasks: |
|
self.tasks[task_id]['status'] = 'completed' |
|
self.tasks[task_id]['last_update'] = datetime.now() |
|
|
|
def get_task(self, task_id): |
|
return self.tasks.get(task_id) |
|
|
|
task_manager = TaskManager() |
|
|
|
def generate_synthetic_data(file_path, task_id): |
|
"""Fonction qui exécute les 470 requêtes en arrière-plan avec sortie JSON structurée""" |
|
try: |
|
|
|
client = genai.Client(api_key=GOOGLE_API_KEY) |
|
|
|
|
|
with open(file_path, 'rb') as f: |
|
uploaded_file = client.files.upload(file=f) |
|
|
|
|
|
prompt = """À partir du contenu de ce fichier, génère exactement 400 nouvelles paires de phrases : |
|
- Une phrase en langue fang |
|
- Sa traduction en français |
|
|
|
Varie les structures grammaticales, les contextes et le vocabulaire pour créer des données d'entraînement diversifiées. |
|
Assure-toi que chaque paire soit cohérente et naturelle dans les deux langues.""" |
|
|
|
|
|
results_file = os.path.join(RESULTS_FOLDER, f'results_{task_id}.json') |
|
|
|
|
|
all_results = { |
|
"metadata": { |
|
"task_id": task_id, |
|
"start_time": datetime.now().isoformat(), |
|
"total_requests": 470, |
|
"model_used": MODEL_ID, |
|
"schema_version": "1.0" |
|
}, |
|
"requests": [], |
|
"summary": { |
|
"total_pairs": 0, |
|
"completed_requests": 0, |
|
"failed_requests": 0, |
|
"errors": [] |
|
} |
|
} |
|
|
|
for i in range(470): |
|
try: |
|
print(f"Traitement de la requête {i+1}/470...") |
|
|
|
|
|
response = client.models.generate_content( |
|
model=MODEL_ID, |
|
contents=[uploaded_file, prompt], |
|
config={ |
|
'response_mime_type': 'application/json', |
|
'response_schema': SyntheticDataResponse, |
|
} |
|
) |
|
|
|
|
|
try: |
|
|
|
if hasattr(response, 'parsed') and response.parsed: |
|
structured_data = response.parsed |
|
request_data = { |
|
"request_number": i + 1, |
|
"timestamp": datetime.now().isoformat(), |
|
"response": { |
|
"request_number": structured_data.request_number, |
|
"generated_pairs": [ |
|
{"fang": pair.fang, "francais": pair.francais} |
|
for pair in structured_data.generated_pairs |
|
], |
|
"timestamp": structured_data.timestamp |
|
}, |
|
"pairs_count": len(structured_data.generated_pairs), |
|
"status": "success" |
|
} |
|
else: |
|
|
|
response_json = json.loads(response.text) |
|
request_data = { |
|
"request_number": i + 1, |
|
"timestamp": datetime.now().isoformat(), |
|
"response": response_json, |
|
"pairs_count": len(response_json.get("generated_pairs", [])), |
|
"status": "success" |
|
} |
|
|
|
all_results["requests"].append(request_data) |
|
all_results["summary"]["total_pairs"] += request_data["pairs_count"] |
|
all_results["summary"]["completed_requests"] += 1 |
|
|
|
except (json.JSONDecodeError, AttributeError) as parse_error: |
|
|
|
error_data = { |
|
"request_number": i + 1, |
|
"timestamp": datetime.now().isoformat(), |
|
"raw_response": response.text, |
|
"pairs_count": 0, |
|
"status": "parse_error", |
|
"error": str(parse_error) |
|
} |
|
all_results["requests"].append(error_data) |
|
all_results["summary"]["failed_requests"] += 1 |
|
|
|
error_msg = f"Erreur de parsing requête {i+1}: {str(parse_error)}" |
|
task_manager.add_error(task_id, error_msg) |
|
all_results["summary"]["errors"].append({ |
|
"request_number": i + 1, |
|
"error": error_msg, |
|
"timestamp": datetime.now().isoformat() |
|
}) |
|
|
|
|
|
with open(results_file, 'w', encoding='utf-8') as f: |
|
json.dump(all_results, f, ensure_ascii=False, indent=2) |
|
|
|
|
|
task_manager.update_progress(task_id, i + 1) |
|
|
|
print(f"Requête {i+1}/470 complétée avec {request_data.get('pairs_count', 0)} paires") |
|
|
|
|
|
time.sleep(2) |
|
|
|
except Exception as e: |
|
|
|
error_msg = f"Erreur requête {i+1}: {str(e)}" |
|
task_manager.add_error(task_id, error_msg) |
|
|
|
error_data = { |
|
"request_number": i + 1, |
|
"timestamp": datetime.now().isoformat(), |
|
"pairs_count": 0, |
|
"status": "request_error", |
|
"error": error_msg |
|
} |
|
all_results["requests"].append(error_data) |
|
all_results["summary"]["failed_requests"] += 1 |
|
all_results["summary"]["errors"].append({ |
|
"request_number": i + 1, |
|
"error": error_msg, |
|
"timestamp": datetime.now().isoformat() |
|
}) |
|
|
|
|
|
with open(results_file, 'w', encoding='utf-8') as f: |
|
json.dump(all_results, f, ensure_ascii=False, indent=2) |
|
|
|
print(error_msg) |
|
|
|
|
|
time.sleep(5) |
|
|
|
|
|
all_results["metadata"]["end_time"] = datetime.now().isoformat() |
|
start_time = datetime.fromisoformat(all_results["metadata"]["start_time"]) |
|
duration = (datetime.now() - start_time).total_seconds() |
|
all_results["metadata"]["duration_seconds"] = duration |
|
all_results["metadata"]["duration_minutes"] = round(duration / 60, 2) |
|
|
|
|
|
all_results["summary"]["success_rate"] = round( |
|
(all_results["summary"]["completed_requests"] / 470) * 100, 2 |
|
) |
|
|
|
with open(results_file, 'w', encoding='utf-8') as f: |
|
json.dump(all_results, f, ensure_ascii=False, indent=2) |
|
|
|
task_manager.complete_task(task_id) |
|
print(f"Tâche {task_id} terminée avec succès") |
|
print(f"Total de paires générées: {all_results['summary']['total_pairs']}") |
|
print(f"Taux de succès: {all_results['summary']['success_rate']}%") |
|
|
|
except Exception as e: |
|
error_msg = f"Erreur générale: {str(e)}" |
|
task_manager.add_error(task_id, error_msg) |
|
print(error_msg) |
|
|
|
@app.route('/') |
|
def index(): |
|
return render_template('index.html') |
|
|
|
@app.route('/upload', methods=['POST']) |
|
def upload_file(): |
|
if 'file' not in request.files: |
|
return jsonify({'error': 'Aucun fichier sélectionné'}), 400 |
|
|
|
file = request.files['file'] |
|
if file.filename == '': |
|
return jsonify({'error': 'Aucun fichier sélectionné'}), 400 |
|
|
|
if file: |
|
|
|
task_id = str(uuid.uuid4()) |
|
|
|
|
|
filename = f"input_{task_id}.txt" |
|
file_path = os.path.join(UPLOAD_FOLDER, filename) |
|
file.save(file_path) |
|
|
|
|
|
task_manager.create_task(task_id) |
|
|
|
|
|
thread = threading.Thread( |
|
target=generate_synthetic_data, |
|
args=(file_path, task_id) |
|
) |
|
thread.daemon = True |
|
thread.start() |
|
|
|
return jsonify({ |
|
'task_id': task_id, |
|
'message': 'Traitement démarré en arrière-plan', |
|
'estimated_duration': '15-20 minutes' |
|
}) |
|
|
|
@app.route('/status/<task_id>') |
|
def get_status(task_id): |
|
task = task_manager.get_task(task_id) |
|
if not task: |
|
return jsonify({'error': 'Tâche non trouvée'}), 404 |
|
|
|
return jsonify({ |
|
'status': task['status'], |
|
'progress': task['progress'], |
|
'total': task['total'], |
|
'percentage': round((task['progress'] / task['total']) * 100, 2), |
|
'errors_count': len(task['errors']), |
|
'start_time': task['start_time'].strftime('%Y-%m-%d %H:%M:%S'), |
|
'last_update': task['last_update'].strftime('%Y-%m-%d %H:%M:%S'), |
|
'estimated_remaining': max(0, (task['total'] - task['progress']) * 2) if task['status'] == 'running' else 0 |
|
}) |
|
|
|
@app.route('/download/<task_id>') |
|
def download_results(task_id): |
|
task = task_manager.get_task(task_id) |
|
if not task: |
|
return jsonify({'error': 'Tâche non trouvée'}), 404 |
|
|
|
results_file = os.path.join(RESULTS_FOLDER, f'results_{task_id}.json') |
|
|
|
if not os.path.exists(results_file): |
|
return jsonify({'error': 'Fichier de résultats non trouvé'}), 404 |
|
|
|
|
|
is_partial = request.args.get('partial', 'false').lower() == 'true' |
|
|
|
if is_partial and task['status'] == 'running': |
|
|
|
temp_file = os.path.join(RESULTS_FOLDER, f'temp_results_{task_id}.json') |
|
|
|
try: |
|
|
|
with open(results_file, 'r', encoding='utf-8') as f: |
|
current_data = json.load(f) |
|
|
|
|
|
current_data["partial_download"] = { |
|
"downloaded_at": datetime.now().isoformat(), |
|
"is_partial": True, |
|
"progress": f"{task['progress']}/{task['total']}", |
|
"percentage": round((task['progress'] / task['total']) * 100, 2) |
|
} |
|
|
|
|
|
with open(temp_file, 'w', encoding='utf-8') as f: |
|
json.dump(current_data, f, ensure_ascii=False, indent=2) |
|
|
|
return send_file( |
|
temp_file, |
|
as_attachment=True, |
|
download_name=f'donnees_synthetiques_partiel_{task_id}.json' |
|
) |
|
except Exception as e: |
|
return jsonify({'error': f'Erreur lors de la création du fichier partiel: {str(e)}'}), 500 |
|
|
|
|
|
download_name = f'donnees_synthetiques_{"complet" if task["status"] == "completed" else "actuel"}_{task_id}.json' |
|
|
|
return send_file( |
|
results_file, |
|
as_attachment=True, |
|
download_name=download_name |
|
) |
|
|
|
@app.route('/tasks') |
|
def list_tasks(): |
|
"""Liste toutes les tâches""" |
|
task_list = [] |
|
for task_id, task_info in task_manager.tasks.items(): |
|
task_list.append({ |
|
'id': task_id, |
|
'status': task_info['status'], |
|
'progress': task_info['progress'], |
|
'total': task_info['total'], |
|
'percentage': round((task_info['progress'] / task_info['total']) * 100, 2), |
|
'start_time': task_info['start_time'].strftime('%Y-%m-%d %H:%M:%S'), |
|
'last_update': task_info['last_update'].strftime('%Y-%m-%d %H:%M:%S'), |
|
'errors_count': len(task_info['errors']) |
|
}) |
|
|
|
|
|
task_list.sort(key=lambda x: x['start_time'], reverse=True) |
|
|
|
return jsonify(task_list) |
|
|
|
@app.route('/cleanup') |
|
def cleanup_temp_files(): |
|
"""Nettoyer les fichiers temporaires""" |
|
try: |
|
temp_files_deleted = 0 |
|
for filename in os.listdir(RESULTS_FOLDER): |
|
if filename.startswith('temp_results_') and filename.endswith('.json'): |
|
file_path = os.path.join(RESULTS_FOLDER, filename) |
|
os.remove(file_path) |
|
temp_files_deleted += 1 |
|
|
|
return jsonify({ |
|
'message': f'{temp_files_deleted} fichiers temporaires supprimés' |
|
}) |
|
except Exception as e: |
|
return jsonify({'error': f'Erreur lors du nettoyage: {str(e)}'}), 500 |
|
|
|
@app.route('/preview/<task_id>') |
|
def preview_results(task_id): |
|
"""Aperçu des résultats JSON avec statistiques""" |
|
task = task_manager.get_task(task_id) |
|
if not task: |
|
return jsonify({'error': 'Tâche non trouvée'}), 404 |
|
|
|
results_file = os.path.join(RESULTS_FOLDER, f'results_{task_id}.json') |
|
|
|
if not os.path.exists(results_file): |
|
return jsonify({'error': 'Fichier de résultats non trouvé'}), 404 |
|
|
|
try: |
|
with open(results_file, 'r', encoding='utf-8') as f: |
|
data = json.load(f) |
|
|
|
|
|
preview = { |
|
"metadata": data.get("metadata", {}), |
|
"summary": data.get("summary", {}), |
|
"sample_requests": data.get("requests", [])[:3], |
|
"total_requests": len(data.get("requests", [])), |
|
"structure_info": { |
|
"schema_used": "SyntheticDataResponse", |
|
"fields": ["request_number", "generated_pairs", "timestamp"], |
|
"pair_structure": ["fang", "francais"] |
|
} |
|
} |
|
|
|
return jsonify(preview) |
|
|
|
except Exception as e: |
|
return jsonify({'error': f'Erreur lors de la lecture du fichier: {str(e)}'}), 500 |
|
|
|
@app.route('/schema') |
|
def get_schema_info(): |
|
"""Endpoint pour obtenir des informations sur le schéma utilisé""" |
|
schema_info = { |
|
"schema_version": "1.0", |
|
"models": { |
|
"TranslationPair": { |
|
"fields": { |
|
"fang": "string - Phrase en langue fang", |
|
"francais": "string - Traduction française" |
|
} |
|
}, |
|
"SyntheticDataResponse": { |
|
"fields": { |
|
"request_number": "integer - Numéro de la requête", |
|
"generated_pairs": "array[TranslationPair] - Liste des paires générées", |
|
"timestamp": "string - Horodatage ISO 8601" |
|
} |
|
} |
|
}, |
|
"api_configuration": { |
|
"model": MODEL_ID, |
|
"response_mime_type": "application/json", |
|
"structured_output": True |
|
} |
|
} |
|
|
|
return jsonify(schema_info) |
|
|
|
if __name__ == '__main__': |
|
print("🚀 Démarrage du serveur avec configuration Gemini API optimisée...") |
|
print("📂 Dossiers créés:", UPLOAD_FOLDER, RESULTS_FOLDER) |
|
print("🌐 Application disponible sur: http://localhost:5000") |
|
print("📊 Sortie JSON structurée activée avec schémas Pydantic") |
|
print("🔧 Modèle utilisé:", MODEL_ID) |
|
print("📋 Endpoint de schéma disponible: /schema") |
|
app.run(debug=True, threaded=True) |