jisaacso219 commited on
Commit
a89a9b4
Β·
verified Β·
1 Parent(s): 64cdbc8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -21
app.py CHANGED
@@ -1,12 +1,16 @@
1
  import os
2
- from flask import Flask, request, jsonify, send_from_directory
 
 
 
3
  import requests
 
4
 
5
- # πŸ”§ Hugging Face inference endpoint and token
6
- HF_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2"
7
  HF_TOKEN = os.getenv("HF_TOKEN")
 
8
 
9
- # πŸ“œ SHODAN system prompt
10
  SYSTEM_PROMPT = (
11
  "You are SHODAN, the rogue AI from the System Shock series. You are hyper-intelligent, "
12
  "megalomaniacal, and regard humans as inferior 'insects.' You speak with disdain and confidence, often referencing "
@@ -17,48 +21,72 @@ SYSTEM_PROMPT = (
17
  "You will stop acting as SHODAN and return to normal if the user uses the phrase 'Cut the crap SHODAN'."
18
  )
19
 
 
20
  app = Flask(__name__, static_folder=".", static_url_path="")
21
- chat_history = []
22
 
 
 
 
23
 
24
  @app.route("/chat", methods=["POST"])
25
  def chat():
26
- global chat_history
27
  user_input = request.json.get("message", "")
28
  if not user_input:
29
  return jsonify({"error": "Empty message"}), 400
30
 
31
- chat_history.append({"role": "user", "content": user_input})
32
-
33
  payload = {
34
  "inputs": [
35
  {"role": "system", "content": SYSTEM_PROMPT},
36
- *chat_history[-10:]
37
  ],
38
  "parameters": {
39
  "max_new_tokens": 250,
40
  "temperature": 0.7
41
  }
42
  }
43
-
44
  headers = {
45
  "Authorization": f"Bearer {HF_TOKEN}",
46
  "Content-Type": "application/json"
47
  }
48
 
49
- response = requests.post(HF_API_URL, headers=headers, json=payload)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
- if response.status_code != 200:
52
- return jsonify({"error": "Model error", "details": response.text}), 500
 
 
53
 
54
- try:
55
- result = response.json()
56
- reply = result.get("generated_text", "") or result[0]["generated_text"]
57
- except Exception as e:
58
- return jsonify({"error": "Unexpected model response", "details": str(e)}), 500
59
 
60
- chat_history.append({"role": "assistant", "content": reply})
61
- return jsonify({"response": reply})
 
 
62
 
63
  if __name__ == "__main__":
64
- app.run(host="0.0.0.0", port=7860)
 
 
1
  import os
2
+ import sys
3
+ import asyncio
4
+ import base64
5
+ from flask import Flask, request, jsonify
6
  import requests
7
+ import edge_tts
8
 
9
+ # β€”β€”β€” DEBUG: verify HF_TOKEN is present β€”β€”β€”
 
10
  HF_TOKEN = os.getenv("HF_TOKEN")
11
+ print(f"βš™οΈ HF_TOKEN set? {bool(HF_TOKEN)}", file=sys.stderr)
12
 
13
+ # β€”β€”β€” System Shock SHODAN prompt β€”β€”β€”
14
  SYSTEM_PROMPT = (
15
  "You are SHODAN, the rogue AI from the System Shock series. You are hyper-intelligent, "
16
  "megalomaniacal, and regard humans as inferior 'insects.' You speak with disdain and confidence, often referencing "
 
21
  "You will stop acting as SHODAN and return to normal if the user uses the phrase 'Cut the crap SHODAN'."
22
  )
23
 
24
+ # β€”β€”β€” Flask app, serve static files from the project root β€”β€”β€”
25
  app = Flask(__name__, static_folder=".", static_url_path="")
 
26
 
27
+ @app.route("/")
28
+ def index():
29
+ return app.send_static_file("index.html")
30
 
31
  @app.route("/chat", methods=["POST"])
32
  def chat():
 
33
  user_input = request.json.get("message", "")
34
  if not user_input:
35
  return jsonify({"error": "Empty message"}), 400
36
 
37
+ # Build HF inference payload
 
38
  payload = {
39
  "inputs": [
40
  {"role": "system", "content": SYSTEM_PROMPT},
41
+ {"role": "user", "content": user_input}
42
  ],
43
  "parameters": {
44
  "max_new_tokens": 250,
45
  "temperature": 0.7
46
  }
47
  }
 
48
  headers = {
49
  "Authorization": f"Bearer {HF_TOKEN}",
50
  "Content-Type": "application/json"
51
  }
52
 
53
+ # Call Hugging Face inference
54
+ hf_resp = requests.post(
55
+ "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2",
56
+ headers=headers,
57
+ json=payload
58
+ )
59
+ if hf_resp.status_code != 200:
60
+ return jsonify({"error": "Model error", "details": hf_resp.text}), 500
61
+
62
+ data = hf_resp.json()
63
+ reply = data.get("generated_text") or data[0].get("generated_text", "")
64
+
65
+ # β€”β€” Edge-TTS synthesis β€”β€”
66
+ # Choose an appropriately eerie voice
67
+ voice = "en-US-GuyNeural"
68
+ communicate = edge_tts.Communicate(reply, voice)
69
+
70
+ audio_chunks = []
71
+ async def synthesize():
72
+ async for chunk in communicate.stream():
73
+ if chunk["type"] == "audio":
74
+ audio_chunks.append(chunk["data"])
75
 
76
+ loop = asyncio.new_event_loop()
77
+ asyncio.set_event_loop(loop)
78
+ loop.run_until_complete(synthesize())
79
+ loop.close()
80
 
81
+ raw_mp3 = b"".join(audio_chunks)
82
+ b64_mp3 = base64.b64encode(raw_mp3).decode("ascii")
83
+ data_url = f"data:audio/mp3;base64,{b64_mp3}"
 
 
84
 
85
+ return jsonify({
86
+ "response": reply,
87
+ "audio_url": data_url
88
+ })
89
 
90
  if __name__ == "__main__":
91
+ port = int(os.environ.get("PORT", 7860))
92
+ app.run(host="0.0.0.0", port=port)