File size: 922 Bytes
4a85dca
f4a8cd0
4a85dca
f4a8cd0
 
 
 
5117938
f4a8cd0
 
5117938
 
fbd9c6a
 
 
 
 
 
 
 
 
 
f3046d5
8cb560a
56a0180
fbd9c6a
3bca740
fbd9c6a
f3046d5
f4a8cd0
48e2ffd
 
3e31893
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
from flask import Flask, request, jsonify, render_template
from huggingface_hub import InferenceClient
import os

# Initialize the Flask app
app = Flask(__name__)

# Initialize the Hugging Face Inference Client
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")

@app.route("/")
def home():
    # Render the HTML template
    return render_template("index.html")

@app.route("/message", methods=["POST"])
def fetch_message():
    data = request.json
    message = data.get("text", "")
    if not message:
        return jsonify({"error": "No input provided."}), 400

        

    # Process the message using the Hugging Face model
    try:
        response = client.text_generation(message)
        return jsonify({"response": response})
    
if __name__ == "__main__":
    # Use PORT environment variable or default to 7860
    port = int(os.getenv("PORT", 7860))
   
    app.run(host="0.0.0.0", port=port)