FuturesonyAi / app.py
Futuresony's picture
Update app.py
fbd9c6a verified
raw
history blame
979 Bytes
from flask import Flask, request, jsonify, render_template
from huggingface_hub import InferenceClient
import os
# Initialize the Flask app
app = Flask(__name__)
# Initialize the Hugging Face Inference Client
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
@app.route("/")
def home():
# Render the HTML template
return render_template("index.html")
@app.route("/message", methods=["POST"])
def fetch_message():
data = request.json
message = data.get("text", "")
if not message:
return jsonify({"error": "No input provided."}), 400
# Process the message using the Hugging Face model
try:
response = client.text_generation(message)
return jsonify({"response": response})
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
# Use PORT environment variable or default to 7860
port = int(os.getenv("PORT", 7860))
app.run(host="0.0.0.0", port=port)