Docfile's picture
Update app.py
f8ae468 verified
raw
history blame
2.19 kB
from flask import Flask, render_template, request, jsonify, Response
import google.generativeai as genai
import os
from PIL import Image
import tempfile
import io
app = Flask(__name__)
# Configuration Gemini
token = os.environ.get("TOKEN")
genai.configure(api_key=token)
safety_settings = [
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
]
mm = """ resous cet exercice. tu répondras en détaillant au maximum ton procédé de calcul. réponse attendue uniquement en Latex"""
model = genai.GenerativeModel(
model_name="gemini-exp-1206",
safety_settings=safety_settings
)
@app.route('/')
def home():
return render_template('index.html')
@app.route('/generate', methods=['POST'])
def generate():
if 'image' not in request.files:
return jsonify({'error': 'No image uploaded'}), 400
image_file = request.files['image']
# Fonction pour générer des chunks de réponse
def generate_stream():
with tempfile.NamedTemporaryFile(delete=False, suffix='.png') as temp_file:
image_file.save(temp_file.name)
try:
image = Image.open(temp_file.name)
# Convertir l'image en bytes pour le streaming
img_byte_arr = io.BytesIO()
image.save(img_byte_arr, format='PNG')
img_byte_arr = img_byte_arr.getvalue()
# Générer le contenu en streaming
response = model.generate_content([mm, {"mime_type": "image/png", "data": img_byte_arr}], stream=True)
for chunk in response:
yield f"data: {chunk.text}\n\n"
except Exception as e:
yield f"data: Error: {str(e)}\n\n"
finally:
# Nettoyer le fichier temporaire
os.unlink(temp_file.name)
return Response(generate_stream(), mimetype='text/event-stream')
if __name__ == '__main__':
app.run(debug=True)