|
|
|
import os |
|
import subprocess |
|
import sys |
|
import time |
|
import json |
|
from pathlib import Path |
|
import signal |
|
import threading |
|
import shutil |
|
import http.server |
|
import socketserver |
|
import urllib.request |
|
import urllib.error |
|
import gradio as gr |
|
|
|
|
|
IS_HF_SPACE = os.environ.get("SPACE_ID") is not None |
|
print(f"Running in HuggingFace Space: {IS_HF_SPACE}") |
|
|
|
|
|
TMP_DIR = Path("/tmp/ten_user") |
|
AGENTS_DIR = TMP_DIR / "agents" |
|
LOGS_DIR = TMP_DIR / "logs" |
|
NEXTJS_PORT = int(os.environ.get("UI_PORT", 3000)) |
|
API_PORT = int(os.environ.get("API_PORT", 8080)) |
|
GRADIO_PORT = int(os.environ.get("GRADIO_PORT", 7860)) |
|
|
|
|
|
if IS_HF_SPACE: |
|
|
|
UI_URL = f"https://{os.environ.get('SPACE_ID', 'unknown-space')}.hf.space/_next/iframe/3000" |
|
INTERNAL_HOST = "0.0.0.0" |
|
else: |
|
|
|
UI_URL = f"http://localhost:{NEXTJS_PORT}" |
|
INTERNAL_HOST = "localhost" |
|
|
|
def create_directories(): |
|
"""Создает необходимые директории""" |
|
print("Создание директорий...") |
|
|
|
TMP_DIR.mkdir(exist_ok=True, parents=True) |
|
AGENTS_DIR.mkdir(exist_ok=True, parents=True) |
|
LOGS_DIR.mkdir(exist_ok=True, parents=True) |
|
|
|
|
|
(LOGS_DIR / "server.log").touch() |
|
print(f"Директории созданы в {TMP_DIR}") |
|
|
|
def create_config_files(): |
|
"""Создает базовые файлы конфигурации""" |
|
print("Создание конфигурационных файлов...") |
|
|
|
|
|
property_data = { |
|
"name": "TEN Agent Demo", |
|
"version": "0.0.1", |
|
"extensions": ["openai_chatgpt", "elevenlabs_tts", "deepgram_asr"], |
|
"description": "TEN Agent on Hugging Face Space", |
|
"graphs": [ |
|
{ |
|
"name": "Voice Agent", |
|
"description": "Basic voice agent with OpenAI and ElevenLabs", |
|
"file": "voice_agent.json" |
|
}, |
|
{ |
|
"name": "Chat Agent", |
|
"description": "Simple chat agent with OpenAI", |
|
"file": "chat_agent.json" |
|
} |
|
] |
|
} |
|
|
|
with open(AGENTS_DIR / "property.json", "w") as f: |
|
json.dump(property_data, f, indent=2) |
|
|
|
|
|
voice_agent = { |
|
"_ten": {"version": "0.0.1"}, |
|
"nodes": [ |
|
{ |
|
"id": "start", |
|
"type": "start", |
|
"data": {"x": 100, "y": 100} |
|
}, |
|
{ |
|
"id": "openai_chatgpt", |
|
"type": "openai_chatgpt", |
|
"data": { |
|
"x": 300, |
|
"y": 200, |
|
"properties": { |
|
"model": "gpt-3.5-turbo", |
|
"temperature": 0.7, |
|
"system_prompt": "You are a helpful assistant." |
|
} |
|
} |
|
}, |
|
{ |
|
"id": "elevenlabs_tts", |
|
"type": "elevenlabs_tts", |
|
"data": { |
|
"x": 500, |
|
"y": 200, |
|
"properties": { |
|
"voice_id": "21m00Tcm4TlvDq8ikWAM" |
|
} |
|
} |
|
}, |
|
{ |
|
"id": "deepgram_asr", |
|
"type": "deepgram_asr", |
|
"data": { |
|
"x": 300, |
|
"y": 300, |
|
"properties": { |
|
"language": "ru" |
|
} |
|
} |
|
}, |
|
{ |
|
"id": "end", |
|
"type": "end", |
|
"data": {"x": 700, "y": 100} |
|
} |
|
], |
|
"edges": [ |
|
{"id": "start_to_chatgpt", "source": "start", "target": "openai_chatgpt"}, |
|
{"id": "chatgpt_to_tts", "source": "openai_chatgpt", "target": "elevenlabs_tts"}, |
|
{"id": "tts_to_end", "source": "elevenlabs_tts", "target": "end"}, |
|
{"id": "asr_to_chatgpt", "source": "deepgram_asr", "target": "openai_chatgpt"} |
|
], |
|
"groups": [], |
|
"templates": [], |
|
"root": "start" |
|
} |
|
|
|
with open(AGENTS_DIR / "voice_agent.json", "w") as f: |
|
json.dump(voice_agent, f, indent=2) |
|
|
|
|
|
chat_agent = { |
|
"_ten": {"version": "0.0.1"}, |
|
"nodes": [ |
|
{ |
|
"id": "start", |
|
"type": "start", |
|
"data": {"x": 100, "y": 100} |
|
}, |
|
{ |
|
"id": "openai_chatgpt", |
|
"type": "openai_chatgpt", |
|
"data": { |
|
"x": 300, |
|
"y": 200, |
|
"properties": { |
|
"model": "gpt-3.5-turbo", |
|
"temperature": 0.7, |
|
"system_prompt": "You are a helpful chat assistant." |
|
} |
|
} |
|
}, |
|
{ |
|
"id": "end", |
|
"type": "end", |
|
"data": {"x": 500, "y": 100} |
|
} |
|
], |
|
"edges": [ |
|
{"id": "start_to_chatgpt", "source": "start", "target": "openai_chatgpt"}, |
|
{"id": "chatgpt_to_end", "source": "openai_chatgpt", "target": "end"} |
|
], |
|
"groups": [], |
|
"templates": [], |
|
"root": "start" |
|
} |
|
|
|
with open(AGENTS_DIR / "chat_agent.json", "w") as f: |
|
json.dump(chat_agent, f, indent=2) |
|
|
|
print("Конфигурационные файлы созданы успешно") |
|
|
|
def start_api_server(): |
|
"""Запускает API сервер""" |
|
print("Запуск API сервера...") |
|
|
|
|
|
api_env = os.environ.copy() |
|
api_env["TEN_AGENT_DIR"] = str(AGENTS_DIR) |
|
api_env["API_PORT"] = str(API_PORT) |
|
|
|
|
|
if IS_HF_SPACE: |
|
print("Configuring API server for HuggingFace Space environment...") |
|
|
|
api_env["USE_WRAPPER"] = "true" |
|
|
|
api_env["TEN_LOG_DISABLE_FILE"] = "true" |
|
|
|
api_env["TMP_DIR"] = str(TMP_DIR) |
|
|
|
|
|
api_cmd = ["python", "api_wrapper.py"] |
|
print(f"Running API command: {' '.join(api_cmd)}") |
|
api_process = subprocess.Popen( |
|
api_cmd, |
|
env=api_env, |
|
stdout=subprocess.PIPE, |
|
stderr=subprocess.PIPE |
|
) |
|
|
|
|
|
time.sleep(2) |
|
|
|
|
|
if api_process.poll() is not None: |
|
stdout, stderr = api_process.communicate() |
|
print(f"API сервер не запустился!") |
|
print(f"STDOUT: {stdout.decode()}") |
|
print(f"STDERR: {stderr.decode()}") |
|
return None |
|
|
|
print(f"API server started and listening on port {API_PORT}") |
|
|
|
|
|
def log_output(process, prefix): |
|
for line in iter(process.stdout.readline, b''): |
|
print(f"[{prefix}] {line.decode().strip()}") |
|
for line in iter(process.stderr.readline, b''): |
|
print(f"[{prefix} ERROR] {line.decode().strip()}") |
|
|
|
log_thread = threading.Thread(target=log_output, args=(api_process, "API")) |
|
log_thread.daemon = True |
|
log_thread.start() |
|
|
|
return api_process |
|
|
|
def start_playground(): |
|
"""Запускает Playground UI через Next.js""" |
|
print("Запуск Playground UI...") |
|
|
|
|
|
ui_env = os.environ.copy() |
|
ui_env["PORT"] = str(NEXTJS_PORT) |
|
ui_env["AGENT_SERVER_URL"] = f"http://{INTERNAL_HOST}:{API_PORT}" |
|
ui_env["NEXT_PUBLIC_EDIT_GRAPH_MODE"] = "true" |
|
ui_env["NEXT_PUBLIC_DISABLE_CAMERA"] = "false" |
|
|
|
|
|
if IS_HF_SPACE: |
|
print("Configuring for HuggingFace Space environment...") |
|
ui_env["NEXT_PUBLIC_IS_HF_SPACE"] = "true" |
|
|
|
ui_env["NEXT_PUBLIC_DISABLE_CORS"] = "true" |
|
|
|
|
|
ui_cmd = "cd playground && npm run dev" |
|
print(f"Running UI command: {ui_cmd}") |
|
ui_process = subprocess.Popen( |
|
ui_cmd, |
|
env=ui_env, |
|
shell=True, |
|
stdout=subprocess.PIPE, |
|
stderr=subprocess.PIPE |
|
) |
|
|
|
|
|
time.sleep(5) |
|
|
|
|
|
if ui_process.poll() is not None: |
|
stdout, stderr = ui_process.communicate() |
|
print(f"Playground UI не запустился!") |
|
print(f"STDOUT: {stdout.decode()}") |
|
print(f"STDERR: {stderr.decode()}") |
|
return None |
|
|
|
|
|
def log_output(process, prefix): |
|
for line in iter(process.stdout.readline, b''): |
|
print(f"[{prefix}] {line.decode().strip()}") |
|
for line in iter(process.stderr.readline, b''): |
|
print(f"[{prefix} ERROR] {line.decode().strip()}") |
|
|
|
log_thread = threading.Thread(target=log_output, args=(ui_process, "UI")) |
|
log_thread.daemon = True |
|
log_thread.start() |
|
|
|
return ui_process |
|
|
|
def create_interface(): |
|
"""Создает Gradio интерфейс для редиректа""" |
|
with gr.Blocks() as demo: |
|
gr.Markdown("# TEN Agent на Hugging Face Space") |
|
gr.Markdown("## Загрузка приложения...") |
|
|
|
|
|
status_md = gr.Markdown("### Статус: Инициализация...") |
|
|
|
with gr.Row(): |
|
col1, col2 = gr.Column(), gr.Column() |
|
|
|
with col1: |
|
|
|
open_iframe = gr.Button("Показать TEN Agent в iframe") |
|
|
|
|
|
def show_iframe(): |
|
return f""" |
|
<div style="border: 1px solid #ccc; padding: 10px; border-radius: 5px;"> |
|
<iframe src="{UI_URL}" width="100%" height="600px" frameborder="0"></iframe> |
|
</div> |
|
""" |
|
|
|
iframe_area = gr.HTML() |
|
open_iframe.click(show_iframe, outputs=iframe_area) |
|
|
|
with col2: |
|
|
|
gr.Markdown(f""" |
|
### Открыть TEN Agent в новой вкладке: |
|
|
|
<a href="{UI_URL}" target="_blank" style="display: inline-block; padding: 10px 15px; background-color: #4CAF50; color: white; text-decoration: none; border-radius: 4px; margin: 10px 0;">Открыть TEN Agent UI</a> |
|
|
|
### ВАЖНО: |
|
|
|
Настройте API ключи в интерфейсе для полноценной работы: |
|
- OpenAI API Key |
|
- ElevenLabs API Key |
|
- Deepgram API Key |
|
- Agora App ID и Certificate |
|
""") |
|
|
|
|
|
with gr.Accordion("Системная информация", open=False): |
|
api_status = gr.Textbox(label="Статус API сервера", value="Запускается...", interactive=False) |
|
ui_status = gr.Textbox(label="Статус UI сервера", value="Запускается...", interactive=False) |
|
|
|
|
|
def update_status(): |
|
api_status_msg = "✅ Активен" if is_port_in_use(API_PORT) else "❌ Не активен" |
|
ui_status_msg = "✅ Активен" if is_port_in_use(NEXTJS_PORT) else "❌ Не активен" |
|
status_md_msg = f"### Статус: {'✅ Все системы работают' if is_port_in_use(API_PORT) and is_port_in_use(NEXTJS_PORT) else '⚠️ Есть проблемы'}" |
|
return [api_status_msg, ui_status_msg, status_md_msg] |
|
|
|
status_btn = gr.Button("Обновить статус") |
|
status_btn.click(update_status, outputs=[api_status, ui_status, status_md]) |
|
|
|
return demo |
|
|
|
|
|
def is_port_in_use(port): |
|
import socket |
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: |
|
try: |
|
s.connect(('localhost', port)) |
|
return True |
|
except: |
|
return False |
|
|
|
def main(): |
|
|
|
create_directories() |
|
create_config_files() |
|
|
|
|
|
api_process = start_api_server() |
|
if not api_process: |
|
print("Не удалось запустить API сервер") |
|
return |
|
|
|
|
|
ui_process = start_playground() |
|
if not ui_process: |
|
print("Не удалось запустить Playground UI") |
|
api_process.terminate() |
|
return |
|
|
|
|
|
demo = create_interface() |
|
|
|
|
|
demo.launch(server_port=GRADIO_PORT, server_name=INTERNAL_HOST, share=False) |
|
|
|
|
|
try: |
|
while True: |
|
if api_process.poll() is not None: |
|
print("API сервер остановлен") |
|
ui_process.terminate() |
|
break |
|
|
|
if ui_process.poll() is not None: |
|
print("UI остановлен") |
|
api_process.terminate() |
|
break |
|
|
|
time.sleep(1) |
|
except KeyboardInterrupt: |
|
print("Принудительная остановка...") |
|
api_process.terminate() |
|
ui_process.terminate() |
|
|
|
if __name__ == "__main__": |
|
|
|
signal.signal(signal.SIGINT, lambda sig, frame: sys.exit(0)) |
|
signal.signal(signal.SIGTERM, lambda sig, frame: sys.exit(0)) |
|
|
|
sys.exit(main()) |