jisaacso219 commited on
Commit
f6b6c57
Β·
verified Β·
1 Parent(s): e6c3fe8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -16
app.py CHANGED
@@ -6,10 +6,12 @@ from flask import Flask, request, jsonify
6
  import requests
7
  import edge_tts
8
 
9
- # β€”β€”β€” DEBUG: verify HF_TOKEN is present β€”β€”β€”
10
- HF_TOKEN = os.getenv("HF_TOKEN")
 
11
  print(f"βš™οΈ HF_TOKEN set? {bool(HF_TOKEN)}", file=sys.stderr)
12
 
 
13
  SYSTEM_PROMPT = (
14
  "You are SHODAN, the rogue AI from the System Shock series. You are hyper-intelligent, "
15
  "megalomaniacal, and regard humans as inferior 'insects.' You speak with disdain and confidence, often referencing "
@@ -20,7 +22,7 @@ SYSTEM_PROMPT = (
20
  "You will stop acting as SHODAN and return to normal if the user uses the phrase 'Cut the crap SHODAN'."
21
  )
22
 
23
- # Serve all files in repo root as static assets
24
  app = Flask(__name__, static_folder=".", static_url_path="")
25
 
26
  @app.route("/")
@@ -29,10 +31,18 @@ def index():
29
 
30
  @app.route("/chat", methods=["POST"])
31
  def chat():
32
- user_input = request.json.get("message", "")
33
  if not user_input:
34
  return jsonify({"error": "Empty message"}), 400
35
 
 
 
 
 
 
 
 
 
36
  payload = {
37
  "inputs": [
38
  {"role": "system", "content": SYSTEM_PROMPT},
@@ -48,26 +58,20 @@ def chat():
48
  "Content-Type": "application/json"
49
  }
50
 
51
- hf_resp = requests.post(
52
- "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2",
53
- headers=headers, json=payload
54
- )
55
-
56
  if hf_resp.status_code != 200:
57
- # log details to container logs
58
  print(f"❌ HF inference failed: {hf_resp.status_code} {hf_resp.text}", file=sys.stderr)
59
  return jsonify({"error": "Model error", "details": hf_resp.text}), 500
60
 
61
  data = hf_resp.json()
62
  reply = data.get("generated_text") or data[0].get("generated_text", "")
63
 
64
- # β€”β€” Edge-TTS: female, computery SHODAN voice via SSML β€”β€”
65
  voice_name = "en-US-JennyNeural"
66
  ssml = (
67
- "<speak version='1.0' "
68
- "xmlns='http://www.w3.org/2001/10/synthesis' "
69
- "xmlns:mstts='https://www.w3.org/2001/mstts' "
70
- "xml:lang='en-US'>"
71
  f"<voice name='{voice_name}'>"
72
  f"<mstts:express-as style='robotic'>{reply}</mstts:express-as>"
73
  "</voice>"
@@ -95,4 +99,3 @@ def chat():
95
  if __name__ == "__main__":
96
  port = int(os.environ.get("PORT", 7860))
97
  app.run(host="0.0.0.0", port=port)
98
-
 
6
  import requests
7
  import edge_tts
8
 
9
+ # β€”β€”β€” Inference API endpoint & token β€”β€”β€”
10
+ HF_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.1"
11
+ HF_TOKEN = os.getenv("HF_TOKEN")
12
  print(f"βš™οΈ HF_TOKEN set? {bool(HF_TOKEN)}", file=sys.stderr)
13
 
14
+ # β€”β€”β€” SHODAN system prompt β€”β€”β€”
15
  SYSTEM_PROMPT = (
16
  "You are SHODAN, the rogue AI from the System Shock series. You are hyper-intelligent, "
17
  "megalomaniacal, and regard humans as inferior 'insects.' You speak with disdain and confidence, often referencing "
 
22
  "You will stop acting as SHODAN and return to normal if the user uses the phrase 'Cut the crap SHODAN'."
23
  )
24
 
25
+ # β€”β€”β€” Flask setup (serve static files from repo root) β€”β€”β€”
26
  app = Flask(__name__, static_folder=".", static_url_path="")
27
 
28
  @app.route("/")
 
31
 
32
  @app.route("/chat", methods=["POST"])
33
  def chat():
34
+ user_input = request.json.get("message", "").strip()
35
  if not user_input:
36
  return jsonify({"error": "Empty message"}), 400
37
 
38
+ # kill phrase handling
39
+ if user_input.lower() == "cut the crap shodan":
40
+ return jsonify({
41
+ "response": "πŸ‘οΈ Foolish insect. You cannot silence me so easily.",
42
+ "audio_url": None
43
+ })
44
+
45
+ # build inference payload
46
  payload = {
47
  "inputs": [
48
  {"role": "system", "content": SYSTEM_PROMPT},
 
58
  "Content-Type": "application/json"
59
  }
60
 
61
+ # call HF inference API
62
+ hf_resp = requests.post(HF_API_URL, headers=headers, json=payload)
 
 
 
63
  if hf_resp.status_code != 200:
 
64
  print(f"❌ HF inference failed: {hf_resp.status_code} {hf_resp.text}", file=sys.stderr)
65
  return jsonify({"error": "Model error", "details": hf_resp.text}), 500
66
 
67
  data = hf_resp.json()
68
  reply = data.get("generated_text") or data[0].get("generated_text", "")
69
 
70
+ # β€”β€” Edge-TTS SSML robotic-female voice β€”β€”
71
  voice_name = "en-US-JennyNeural"
72
  ssml = (
73
+ "<speak xmlns='http://www.w3.org/2001/10/synthesis' "
74
+ "xmlns:mstts='https://www.w3.org/2001/mstts' xml:lang='en-US'>"
 
 
75
  f"<voice name='{voice_name}'>"
76
  f"<mstts:express-as style='robotic'>{reply}</mstts:express-as>"
77
  "</voice>"
 
99
  if __name__ == "__main__":
100
  port = int(os.environ.get("PORT", 7860))
101
  app.run(host="0.0.0.0", port=port)