from flask import Flask, request, jsonify, render_template_string
from flask_cors import CORS
from google import genai
from google.genai import types
import os
import io
import httpx
import uuid
from datetime import datetime, timezone, timedelta
from dotenv import load_dotenv
import json
# Load environment variables
load_dotenv()
app = Flask(__name__)
CORS(app)
# Initialize Gemini client
client = genai.Client(api_key=os.getenv('GOOGLE_API_KEY'))
# In-memory storage for demo (in production, use a database)
document_caches = {}
user_sessions = {}
# HTML template for the web interface
HTML_TEMPLATE = """
Smart Document Analysis Platform
📚 Smart Document Analysis Platform
Upload PDF documents once, ask questions forever with Gemini API caching
📤 Upload PDF Document
📄
Drag and drop your PDF file here, or click to select
Or provide a URL:
Processing your PDF... This may take a moment.
💬 Ask Questions
✅ Document Cached Successfully!
Your PDF has been cached using Gemini API. You can now ask multiple questions without re-uploading.
Cache ID:
Tokens Cached:
👋 Hello! I'm ready to analyze your PDF documents. Upload a document to get started!
"""
# ... (imports and initial setup) ...
@app.route('/')
def index():
return render_template_string(HTML_TEMPLATE)
# Add health check endpoint
@app.route('/health', methods=['GET'])
def health_check():
# A simple endpoint to check if the application is running
return jsonify({"status": "healthy"}), 200
@app.route('/upload', methods=['POST'])
def upload_file():
try:
if 'file' not in request.files:
return jsonify({'success': False, 'error': 'No file provided'})
file = request.files['file']
if file.filename == '':
return jsonify({'success': False, 'error': 'No file selected'})
# Read file content
file_content = file.read()
file_io = io.BytesIO(file_content)
# --- CORRECTED FILE UPLOAD CALL ---
# Upload to Gemini File API using the correct method client.upload_file
# Pass the file content as a tuple (filename, file-like object, mime_type)
# This replaces the incorrect client.files.upload call
try:
document = client.upload_file(
file=(file.filename, file_io, 'application/pdf'),
display_name=file.filename # Optional: provide a display name
)
print(f"File uploaded successfully: {document.name}") # Log for debugging
except Exception as upload_error:
return jsonify({'success': False, 'error': f'Error uploading file to Gemini API: {str(upload_error)}'})
# --- END CORRECTED FILE UPLOAD CALL ---
# Create cache with system instruction
try:
system_instruction = "You are an expert document analyzer. Provide detailed, accurate answers based on the uploaded document content. Always be helpful and thorough in your responses."
# Use the correct model format as per documentation
model = 'models/gemini-2.0-flash-001'
print(f"Attempting to create cache for file: {document.name}") # Log
cache = client.caches.create(
model=model,
config=types.CreateCachedContentConfig(
display_name=f'pdf document cache: {file.filename}', # Use filename in display_name
system_instruction=system_instruction,
contents=[document], # document is the File object returned by upload_file
ttl="3600s", # 1 hour TTL
)
)
print(f"Cache created successfully: {cache.name}") # Log
# Store cache info
cache_id = str(uuid.uuid4())
document_caches[cache_id] = {
'cache_name': cache.name,
'document_name': file.filename,
'created_at': datetime.now().isoformat()
}
# Get token count from cache metadata if available
token_count = 'Unknown'
if hasattr(cache, 'usage_metadata') and cache.usage_metadata:
token_count = getattr(cache.usage_metadata, 'cached_token_count', 'Unknown')
return jsonify({
'success': True,
'cache_id': cache_id,
'token_count': token_count
})
except Exception as cache_error:
print(f"Cache creation failed: {str(cache_error)}") # Log the cache error
# If caching fails due to small content, provide alternative approach
# Note: The exact error message might vary, checking substring is a bit fragile
# A better way might be to count tokens first, but requires API call
if "Cached content is too small" in str(cache_error) or "minimum" in str(cache_error).lower():
# Attempt to delete the uploaded file if caching failed (optional but good cleanup)
try:
client.files.delete(document.name)
print(f"Cleaned up uploaded file {document.name} after caching failure.")
except Exception as cleanup_error:
print(f"Failed to clean up file {document.name}: {cleanup_error}")
return jsonify({
'success': False,
'error': 'PDF content is too small for caching. Please upload a larger document. Minimum token count varies by model, but is typically 1024+.',
'suggestion': 'Try uploading a longer document or combine multiple documents.'
})
else:
# Attempt to delete the uploaded file if caching failed
try:
client.files.delete(document.name)
print(f"Cleaned up uploaded file {document.name} after caching failure.")
except Exception as cleanup_error:
print(f"Failed to clean up file {document.name}: {cleanup_error}")
raise cache_error # Re-raise other errors
except Exception as e:
print(f"An unexpected error occurred during upload: {str(e)}") # Log general errors
return jsonify({'success': False, 'error': str(e)})
@app.route('/upload-url', methods=['POST'])
def upload_from_url():
try:
data = request.get_json()
url = data.get('url')
if not url:
return jsonify({'success': False, 'error': 'No URL provided'})
# Download file from URL
try:
response = httpx.get(url)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
except httpx.HTTPStatusError as e:
return jsonify({'success': False, 'error': f'HTTP error downloading file from URL: {e.response.status_code} - {e.response.text}'})
except httpx.RequestError as e:
return jsonify({'success': False, 'error': f'Error downloading file from URL: {e}'})
file_io = io.BytesIO(response.content)
# --- CORRECTED FILE UPLOAD CALL ---
# Upload to Gemini File API using the correct method client.upload_file
# Pass the file content as a tuple (filename, file-like object, mime_type)
# Use a generic filename for the file-like object
try:
document = client.upload_file(
file=('downloaded_document.pdf', file_io, 'application/pdf'), # Use a placeholder filename
display_name=url # Use the URL as display name
)
print(f"File uploaded successfully: {document.name}") # Log
except Exception as upload_error:
return jsonify({'success': False, 'error': f'Error uploading file to Gemini API: {str(upload_error)}'})
# --- END CORRECTED FILE UPLOAD CALL ---
# Create cache with system instruction
try:
system_instruction = "You are an expert document analyzer. Provide detailed, accurate answers based on the uploaded document content. Always be helpful and thorough in your responses."
# Use the correct model format as per documentation
model = 'models/gemini-2.0-flash-001'
print(f"Attempting to create cache for file: {document.name}") # Log
cache = client.caches.create(
model=model,
config=types.CreateCachedContentConfig(
display_name=f'pdf document cache: {url}', # Use URL in display_name
system_instruction=system_instruction,
contents=[document], # document is the File object returned by upload_file
ttl="3600s", # 1 hour TTL
)
)
print(f"Cache created successfully: {cache.name}") # Log
# Store cache info
cache_id = str(uuid.uuid4())
document_caches[cache_id] = {
'cache_name': cache.name,
'document_name': url,
'created_at': datetime.now().isoformat()
}
# Get token count from cache metadata if available
token_count = 'Unknown'
if hasattr(cache, 'usage_metadata') and cache.usage_metadata:
token_count = getattr(cache.usage_metadata, 'cached_token_count', 'Unknown')
return jsonify({
'success': True,
'cache_id': cache_id,
'token_count': token_count
})
except Exception as cache_error:
print(f"Cache creation failed: {str(cache_error)}") # Log the cache error
# If caching fails due to small content, provide alternative approach
if "Cached content is too small" in str(cache_error) or "minimum" in str(cache_error).lower():
# Attempt to delete the uploaded file if caching failed (optional but good cleanup)
try:
client.files.delete(document.name)
print(f"Cleaned up uploaded file {document.name} after caching failure.")
except Exception as cleanup_error:
print(f"Failed to clean up file {document.name}: {cleanup_error}")
return jsonify({
'success': False,
'error': 'PDF content is too small for caching. Please upload a larger document. Minimum token count varies by model, but is typically 1024+.',
'suggestion': 'Try uploading a longer document or combine multiple documents.'
})
else:
# Attempt to delete the uploaded file if caching failed
try:
client.files.delete(document.name)
print(f"Cleaned up uploaded file {document.name} after caching failure.")
except Exception as cleanup_error:
print(f"Failed to clean up file {document.name}: {cleanup_error}")
raise cache_error # Re-raise other errors
except Exception as e:
print(f"An unexpected error occurred during URL upload: {str(e)}") # Log general errors
return jsonify({'success': False, 'error': str(e)})
# ... (ask_question, list_caches, delete_cache routes remain largely the same) ...
@app.route('/ask', methods=['POST'])
def ask_question():
try:
data = request.get_json()
question = data.get('question')
cache_id = data.get('cache_id')
if not question or not cache_id:
return jsonify({'success': False, 'error': 'Missing question or cache_id'})
if cache_id not in document_caches:
# Check if the cache still exists in Gemini API if it's not in our local map
# This adds robustness if the server restarts or cache expires
try:
cache_info_api = client.caches.get(name=document_caches[cache_id]['cache_name']) # Need cache_name from stored info
# If get succeeds, update local cache (or handle this differently)
# For simplicity here, let's just fail if not in local map as it's in-memory
return jsonify({'success': False, 'error': 'Cache not found or expired. Please upload the document again.'})
except Exception as get_error:
# If get fails, it's definitely gone
if cache_id in document_caches: # Clean up local entry if API confirms deletion/expiry
del document_caches[cache_id]
return jsonify({'success': False, 'error': 'Cache not found or expired. Please upload the document again.'})
cache_info = document_caches[cache_id]
# Generate response using cached content with correct model format
response = client.models.generate_content(
model='models/gemini-2.0-flash-001',
contents=question, # User's question
generation_config=types.GenerateContentConfig( # generation_config takes GenerateContentConfig
cached_content=cache_info['cache_name']
)
)
# Check if response has parts before accessing .text
answer = "Could not generate response."
if response and response.candidates and response.candidates[0].content and response.candidates[0].content.parts:
answer = "".join(part.text for part in response.candidates[0].content.parts if hasattr(part, 'text'))
elif response and response.prompt_feedback and response.prompt_feedback.block_reason:
answer = f"Request blocked: {response.prompt_feedback.block_reason.name}"
else:
print(f"Unexpected response structure: {response}") # Log unexpected structure
return jsonify({
'success': True,
'answer': answer
})
except Exception as e:
print(f"An error occurred during question asking: {str(e)}") # Log errors
return jsonify({'success': False, 'error': str(e)})
# ... (list_caches, delete_cache remain largely the same) ...
@app.route('/cache/', methods=['DELETE'])
def delete_cache(cache_id):
try:
if cache_id not in document_caches:
return jsonify({'success': False, 'error': 'Cache not found'})
cache_info = document_caches[cache_id]
# Delete from Gemini API
try:
client.caches.delete(cache_info['cache_name'])
print(f"Gemini cache deleted: {cache_info['cache_name']}") # Log
except Exception as delete_error:
print(f"Error deleting Gemini cache {cache_info['cache_name']}: {delete_error}") # Log
# Decide if you want to fail if API deletion fails or just remove local entry
# For robustness, maybe log and still remove local entry? Or return error?
# Let's return an error for now.
return jsonify({'success': False, 'error': f'Failed to delete cache from API: {str(delete_error)}'})
# Remove from local storage
del document_caches[cache_id]
print(f"Local cache entry deleted for ID: {cache_id}") # Log
return jsonify({'success': True, 'message': 'Cache deleted successfully'})
except Exception as e:
print(f"An unexpected error occurred during cache deletion: {str(e)}") # Log
return jsonify({'success': False, 'error': str(e)})
if __name__ == '__main__':
import os
# Ensure GOOGLE_API_KEY is set
if not os.getenv('GOOGLE_API_KEY'):
print("Error: GOOGLE_API_KEY environment variable not set.")
# exit(1) # Or handle appropriately
# For local testing with debug=True, you might pass it directly or ensure your .env is loaded
pass # Allow running without key for now if needed, but API calls will fail
port = int(os.environ.get("PORT", 7860))
print(f"Starting Flask app on port {port}") # Log start
# In production, set debug=False
app.run(debug=True, host='0.0.0.0', port=port)