File size: 1,902 Bytes
f4a8cd0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48e2ffd
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from flask import Flask, render_template, request, jsonify
from huggingface_hub import InferenceClient

# Initialize the Flask app
app = Flask(__name__)

# Initialize the Hugging Face Inference Client (Replace with your actual model identifier)
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")

# Parameters from the image
MAX_TOKENS = 1520
TEMPERATURE = 0.7
TOP_P = 0.95

# In-memory storage for active chats (to maintain chat history)
chat_history = {}

@app.route("/")
def home():
    return render_template("editor.html")

@app.route("/generate_code", methods=["POST"])
def generate_code():
    # Get the user ID (or session) and the prompt
    user_id = request.json.get("user_id")
    prompt = request.json.get("prompt")

    # Get chat history for the user or initialize it
    if user_id not in chat_history:
        chat_history[user_id] = []

    # Append the user's prompt to the chat history
    chat_history[user_id].append({"role": "user", "content": prompt})

    # System message
    system_message = "You are a friendly chatbot."

    # Build the messages for the model
    messages = [{"role": "system", "content": system_message}]
    messages.extend(chat_history[user_id])  # Add previous conversation history

    # Generate the response
    generated_code = ""
    for msg in client.chat_completion(
        messages=messages,
        max_tokens=MAX_TOKENS,
        temperature=TEMPERATURE,
        top_p=TOP_P,
        stream=True,
    ):
        token = msg.choices[0].delta.content
        generated_code += token

    # Save the assistant's response to the chat history
    chat_history[user_id].append({"role": "assistant", "content": generated_code})

    return jsonify({"code": generated_code})

if __name__ == "__main__":
    # Use PORT environment variable or default to 7860
    port = int(os.getenv("PORT", 7860))
    app.run(host="0.0.0.0", port=port)