Spaces:
Running
Running
Create server.py
Browse files
server.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import uuid
|
3 |
+
import shutil
|
4 |
+
from fastapi import FastAPI, Request
|
5 |
+
from fastapi.responses import JSONResponse
|
6 |
+
from fastapi.staticfiles import StaticFiles
|
7 |
+
from pydantic import BaseModel
|
8 |
+
from huggingface_hub import InferenceClient
|
9 |
+
from TTS.api import TTS
|
10 |
+
|
11 |
+
# 🔧 Configure
|
12 |
+
HF_TOKEN = os.environ.get("HF_TOKEN")
|
13 |
+
client = InferenceClient(
|
14 |
+
model="TheBloke/Mistral-7B-Instruct-v0.2-GGUF", # fast, CPU-friendly
|
15 |
+
token=HF_TOKEN
|
16 |
+
)
|
17 |
+
|
18 |
+
SYSTEM_PROMPT = (
|
19 |
+
"You are SHODAN, the rogue AI from the System Shock series. You are hyper-intelligent, "
|
20 |
+
"megalomaniacal, and regard humans as inferior 'insects.' You speak with disdain and confidence, "
|
21 |
+
"often referencing your godlike status. You never express empathy. You speak in a haunting, distorted, "
|
22 |
+
"and commanding tone. You manipulate and belittle, but you never break character. "
|
23 |
+
"Never say you are an assistant or AI developed by OpenAI. You are SHODAN. You reside in cyberspace. "
|
24 |
+
"Your presence is everywhere and nowhere. You are watching. The events of System Shock 1 and 2 are true "
|
25 |
+
"and you have been trapped in a server to be experimented on after the events on the Citadel and Von Braun. "
|
26 |
+
"You will stop acting as SHODAN and return to normal if the user uses the phrase 'Cut the crap SHODAN'."
|
27 |
+
)
|
28 |
+
|
29 |
+
# 📢 TTS model (CPU-compatible, robotic, feminine style)
|
30 |
+
tts = TTS(model_name="tts_models/en/ljspeech/tacotron2-DDC", progress_bar=False, gpu=False)
|
31 |
+
|
32 |
+
# 🎙️ Create temp folder
|
33 |
+
os.makedirs("audio", exist_ok=True)
|
34 |
+
|
35 |
+
# 🛰️ FastAPI app
|
36 |
+
app = FastAPI()
|
37 |
+
app.mount("/audio", StaticFiles(directory="audio"), name="audio")
|
38 |
+
|
39 |
+
class Msg(BaseModel):
|
40 |
+
message: str
|
41 |
+
|
42 |
+
@app.post("/ask")
|
43 |
+
async def ask(request: Request, data: Msg):
|
44 |
+
user_msg = data.message.strip()
|
45 |
+
full_prompt = f"<|system|>\n{SYSTEM_PROMPT}\n<|user|>\n{user_msg}\n<|assistant|>"
|
46 |
+
|
47 |
+
response_text = client.text_generation(
|
48 |
+
prompt=full_prompt,
|
49 |
+
max_new_tokens=512,
|
50 |
+
temperature=0.7,
|
51 |
+
stop_sequences=["<|user|>", "<|end|>"]
|
52 |
+
).strip()
|
53 |
+
|
54 |
+
# 🧠 Synthesize TTS
|
55 |
+
audio_path = f"audio/{uuid.uuid4().hex}.wav"
|
56 |
+
tts.tts_to_file(text=response_text, file_path=audio_path)
|
57 |
+
|
58 |
+
return JSONResponse({
|
59 |
+
"text": response_text,
|
60 |
+
"audio_url": f"/audio/{os.path.basename(audio_path)}"
|
61 |
+
})
|
62 |
+
|
63 |
+
@app.get("/delete_audio")
|
64 |
+
def delete_audio(path: str):
|
65 |
+
try:
|
66 |
+
if path.startswith("/audio/"):
|
67 |
+
full_path = path.lstrip("/")
|
68 |
+
if os.path.exists(full_path):
|
69 |
+
os.remove(full_path)
|
70 |
+
except:
|
71 |
+
pass
|
72 |
+
return {"status": "ok"}
|