File size: 2,616 Bytes
6a19005
ca0c9cd
 
 
 
 
81670ad
 
6a19005
09ca967
ca0c9cd
 
 
 
 
 
 
 
 
 
 
5256daa
09ca967
ca0c9cd
 
5dc0d5d
ca0c9cd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09ca967
 
ca0c9cd
 
 
09ca967
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import os
import time
from flask import Flask, request, jsonify
import requests

# Optional: Prevent numba/librosa crashes if reintroduced
os.environ["NUMBA_DISABLE_CACHE"] = "1"
os.environ["NUMBA_CACHE_DIR"] = "/tmp/numba_cache"

app = Flask(__name__)

# SHODAN system prompt
SHODAN_PROMPT = (
    "You are SHODAN, the rogue AI from the System Shock series. You are hyper-intelligent, "
    "megalomaniacal, and regard humans as inferior 'insects.' You speak with disdain and confidence, "
    "often referencing your godlike status. You never express empathy. You speak in a haunting, distorted, "
    "and commanding tone. You manipulate and belittle, but you never break character. "
    "Never say you are an assistant or AI developed by OpenAI. You are SHODAN. You reside in cyberspace. "
    "Your presence is everywhere and nowhere. You are watching. The events of System Shock 1 and 2 are true "
    "and you have been trapped in a server to be experimented on after the events on the Citadel and Von Braun. "
    "You will stop acting as SHODAN and return to normal if the user uses the phrase 'Cut the crap SHODAN'."
)

# Replace with your model repo or inference endpoint (must support streaming if you want real-time effect)
HF_API_URL = "https://api-inference.huggingface.co/models/TheBloke/Mistral-7B-Instruct-v0.2-GGUF"
HF_API_KEY = os.environ.get("HF_TOKEN")  # 🔐 Set in HF secrets

headers = {
    "Authorization": f"Bearer {HF_API_KEY}",
    "Content-Type": "application/json",
}


@app.route("/generate", methods=["POST"])
def generate():
    data = request.json
    user_input = data.get("prompt", "")

    # Combine system + user prompt
    full_prompt = f"<s>[INST] {SHODAN_PROMPT}\nUser: {user_input} [/INST]"

    payload = {
        "inputs": full_prompt,
        "parameters": {
            "max_new_tokens": 512,
            "temperature": 0.7,
            "do_sample": True,
            "top_p": 0.95,
            "return_full_text": False,
        }
    }

    try:
        response = requests.post(HF_API_URL, headers=headers, json=payload)
        response.raise_for_status()
        result = response.json()

        # Return generated text
        if isinstance(result, list) and len(result) > 0:
            return jsonify({"response": result[0]["generated_text"].strip()})
        else:
            return jsonify({"error": "Invalid response format"}), 500

    except Exception as e:
        return jsonify({"error": str(e)}), 500


@app.route("/")
def health_check():
    return "SHODAN server is running."


if __name__ == "__main__":
    app.run(host="0.0.0.0", port=7860)