Docfile commited on
Commit
b53d6b9
·
verified ·
1 Parent(s): dd0179d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -21
app.py CHANGED
@@ -1,8 +1,9 @@
1
- from flask import Flask, render_template, request, jsonify
2
  import google.generativeai as genai
3
  import os
4
  from PIL import Image
5
  import tempfile
 
6
 
7
  app = Flask(__name__)
8
 
@@ -10,7 +11,6 @@ app = Flask(__name__)
10
  token = os.environ.get("TOKEN")
11
  genai.configure(api_key=token)
12
 
13
-
14
  safety_settings = [
15
  {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
16
  {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
@@ -21,7 +21,7 @@ safety_settings = [
21
  mm = """ resous cet exercice. tu répondras en détaillant au maximum ton procédé de calcul. réponse attendue uniquement en Latex"""
22
 
23
  model = genai.GenerativeModel(
24
- model_name="gemini-exp-1206",
25
  safety_settings=safety_settings
26
  )
27
 
@@ -33,25 +33,31 @@ def home():
33
  def generate():
34
  if 'image' not in request.files:
35
  return jsonify({'error': 'No image uploaded'}), 400
36
-
37
  image_file = request.files['image']
38
-
39
- # Sauvegarder temporairement l'image
40
- with tempfile.NamedTemporaryFile(delete=False) as temp_file:
41
- image_file.save(temp_file.name)
42
- try:
43
- image = Image.open(temp_file.name)
44
-
45
- # Générer le contenu
46
- response = model.generate_content([mm, image])
47
- result = response.text
48
-
49
- return jsonify({"result": result})
50
- except Exception as e:
51
- return jsonify({'error': str(e)}), 500
52
- finally:
53
- # Nettoyer le fichier temporaire
54
- os.unlink(temp_file.name)
 
 
 
 
 
 
55
 
56
  if __name__ == '__main__':
57
  app.run(debug=True)
 
1
+ from flask import Flask, render_template, request, jsonify, Response
2
  import google.generativeai as genai
3
  import os
4
  from PIL import Image
5
  import tempfile
6
+ import io
7
 
8
  app = Flask(__name__)
9
 
 
11
  token = os.environ.get("TOKEN")
12
  genai.configure(api_key=token)
13
 
 
14
  safety_settings = [
15
  {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
16
  {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
 
21
  mm = """ resous cet exercice. tu répondras en détaillant au maximum ton procédé de calcul. réponse attendue uniquement en Latex"""
22
 
23
  model = genai.GenerativeModel(
24
+ model_name="gemini-pro-vision",
25
  safety_settings=safety_settings
26
  )
27
 
 
33
  def generate():
34
  if 'image' not in request.files:
35
  return jsonify({'error': 'No image uploaded'}), 400
36
+
37
  image_file = request.files['image']
38
+
39
+ # Fonction pour générer des chunks de réponse
40
+ def generate_stream():
41
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.png') as temp_file:
42
+ image_file.save(temp_file.name)
43
+ try:
44
+ image = Image.open(temp_file.name)
45
+ # Convertir l'image en bytes pour le streaming
46
+ img_byte_arr = io.BytesIO()
47
+ image.save(img_byte_arr, format='PNG')
48
+ img_byte_arr = img_byte_arr.getvalue()
49
+
50
+ # Générer le contenu en streaming
51
+ response = model.generate_content([mm, {"mime_type": "image/png", "data": img_byte_arr}], stream=True)
52
+ for chunk in response:
53
+ yield f"data: {chunk.text}\n\n"
54
+ except Exception as e:
55
+ yield f"data: Error: {str(e)}\n\n"
56
+ finally:
57
+ # Nettoyer le fichier temporaire
58
+ os.unlink(temp_file.name)
59
+
60
+ return Response(generate_stream(), mimetype='text/event-stream')
61
 
62
  if __name__ == '__main__':
63
  app.run(debug=True)