Cloud110702's picture
Upload 2 files
022342b verified
raw
history blame
2.96 kB
from flask import Flask, request, jsonify
import numpy as np
import tensorflow as tf
from tensorflow.lite.python.interpreter import Interpreter
import os
import google.generativeai as genai
app = Flask(__name__)
# Load the TensorFlow Lite model
interpreter = Interpreter(model_path="model.tflite")
interpreter.allocate_tensors()
# Get input and output details
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# Define categories
data_cat = ['disposable cups', 'paper', 'plastic bottle']
img_height, img_width = 224, 224
# Configure Gemini API
GEMINI_API_KEY = os.getenv('GEMINI_API_KEY', 'AIzaSyBx0A7BA-nKVZOiVn39JXzdGKgeGQqwAFg')
genai.configure(api_key=GEMINI_API_KEY)
# Initialize Gemini model
gemini_model = genai.GenerativeModel('gemini-pro')
@app.route('/predict', methods=['POST'])
def predict():
if 'image' not in request.files:
return jsonify({"error": "No image uploaded"}), 400
file = request.files['image']
try:
# Preprocess the image
img = tf.image.decode_image(file.read(), channels=3)
img = tf.image.resize(img, [img_height, img_width])
img_bat = np.expand_dims(img, 0).astype(np.float32)
# Set input tensor
interpreter.set_tensor(input_details[0]['index'], img_bat)
# Run inference
interpreter.invoke()
# Get the result
output_data = interpreter.get_tensor(output_details[0]['index'])
predicted_class = data_cat[np.argmax(output_data)]
confidence = np.max(output_data) * 100
# Generate sustainability insights with Gemini API
prompt = f"""
You are a sustainability-focused AI. Analyze the {predicted_class} (solid dry waste)
and generate the top three innovative, eco-friendly recommendations for repurposing it.
Each recommendation should:
- Provide a title
- Be practical and easy to implement
- Be environmentally beneficial
- Include a one or two-sentence explanation
Format each recommendation with a clear title followed by the explanation on a new line.
"""
try:
# Generate response using the correct method
response = gemini_model.generate_content(prompt)
insights = response.text.strip() # Assuming generate_content returns a string or a response with 'text'
except Exception as e:
insights = f"Error generating insights: {str(e)}"
print(f"Gemini API error: {str(e)}") # For debugging
# Prepare the response
return jsonify({
"class": predicted_class,
"confidence": confidence,
"insights": insights
})
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
app.run(debug=True)