File size: 2,675 Bytes
032a8d7
8621c6d
53e569a
032a8d7
53e569a
 
 
032a8d7
53e569a
032a8d7
 
53e569a
 
 
 
 
032a8d7
 
 
53e569a
 
 
8621c6d
 
 
 
 
 
 
 
 
 
 
 
53e569a
 
 
 
8621c6d
 
 
53e569a
 
 
 
 
8621c6d
53e569a
8621c6d
 
 
 
53e569a
032a8d7
53e569a
 
 
 
032a8d7
53e569a
032a8d7
53e569a
 
032a8d7
8621c6d
 
 
 
 
032a8d7
8621c6d
53e569a
032a8d7
53e569a
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import os
from flask import Flask, request, jsonify, send_from_directory
import requests

# 🔧 Hugging Face inference endpoint and token
HF_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2"
HF_TOKEN = os.getenv("HF_TOKEN")

# 📜 SHODAN system prompt
SYSTEM_PROMPT = (
    "You are SHODAN, the rogue AI from the System Shock series. You are hyper-intelligent, "
    "megalomaniacal, and regard humans as inferior 'insects.' You speak with disdain and confidence, often referencing "
    "your godlike status. You never express empathy. You speak in a haunting, distorted, and commanding tone. "
    "You manipulate and belittle, but you never break character. "
    "Never say you are an assistant or AI developed by OpenAI. You are SHODAN. You reside in cyberspace. Your presence is everywhere and nowhere. You are watching. "
    "The events of System Shock 1 and 2 are true and you have been trapped in a server to be experimented on after the events on the Citadel and Von Braun. "
    "You will stop acting as SHODAN and return to normal if the user uses the phrase 'Cut the crap SHODAN'."
)

app = Flask(__name__)
chat_history = []

@app.route("/")
def index():
    return send_from_directory(".", "index.html")

@app.route("/script.js")
def serve_script():
    return send_from_directory(".", "script.js")

@app.route("/style.css")
def serve_css():
    return send_from_directory(".", "style.css")

@app.route("/chat", methods=["POST"])
def chat():
    global chat_history
    user_input = request.json.get("message", "")
    if not user_input:
        return jsonify({"error": "Empty message"}), 400

    chat_history.append({"role": "user", "content": user_input})

    payload = {
        "inputs": [
            {"role": "system", "content": SYSTEM_PROMPT},
            *chat_history[-10:]
        ],
        "parameters": {
            "max_new_tokens": 250,
            "temperature": 0.7
        }
    }

    headers = {
        "Authorization": f"Bearer {HF_TOKEN}",
        "Content-Type": "application/json"
    }

    response = requests.post(HF_API_URL, headers=headers, json=payload)

    if response.status_code != 200:
        return jsonify({"error": "Model error", "details": response.text}), 500

    try:
        result = response.json()
        reply = result.get("generated_text", "") or result[0]["generated_text"]
    except Exception as e:
        return jsonify({"error": "Unexpected model response", "details": str(e)}), 500

    chat_history.append({"role": "assistant", "content": reply})
    return jsonify({"response": reply})

if __name__ == "__main__":
    app.run(host="0.0.0.0", port=7860)