AGI-Screenplay-Pro / app-backup.py
openfree's picture
Update app-backup.py
aa177b7 verified
raw
history blame
59.6 kB
import gradio as gr
import os
import json
import requests
from datetime import datetime
import time
from typing import List, Dict, Any, Generator, Tuple, Optional
import logging
import re
import tempfile
from pathlib import Path
import sqlite3
import hashlib
import threading
from contextlib import contextmanager
from dataclasses import dataclass, field
from collections import defaultdict
# --- ๋กœ๊น… ์„ค์ • ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# --- Document export imports ---
try:
from docx import Document
from docx.shared import Inches, Pt, RGBColor
from docx.enum.text import WD_ALIGN_PARAGRAPH
from docx.enum.style import WD_STYLE_TYPE
DOCX_AVAILABLE = True
except ImportError:
DOCX_AVAILABLE = False
logger.warning("python-docx not installed. DOCX export will be disabled.")
# --- ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๋ฐ ์ƒ์ˆ˜ ---
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "")
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "")
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions"
MODEL_ID = "dep89a2fld32mcm"
DB_PATH = "novel_sessions_v2.db"
# --- ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๊ฒ€์ฆ ---
if not FRIENDLI_TOKEN:
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.")
# ์‹ค์ œ ํ™˜๊ฒฝ์—์„œ๋Š” ์—ฌ๊ธฐ์„œ ํ”„๋กœ๊ทธ๋žจ์„ ์ข…๋ฃŒํ•ด์•ผ ํ•˜์ง€๋งŒ, ๋ฐ๋ชจ๋ฅผ ์œ„ํ•ด ๋”๋ฏธ ํ† ํฐ์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค.
FRIENDLI_TOKEN = "dummy_token_for_testing"
if not BRAVE_SEARCH_API_KEY:
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.")
# --- ์ „์—ญ ๋ณ€์ˆ˜ ---
db_lock = threading.Lock()
# ์ตœ์ ํ™”๋œ ๋‹จ๊ณ„ ๊ตฌ์„ฑ (25๋‹จ๊ณ„๋กœ ์••์ถ• ๋ฐ ๊ฐ•ํ™”)
OPTIMIZED_STAGES = [
("director", "๐ŸŽฌ ๊ฐ๋…์ž: ์ดˆ๊ธฐ ๊ธฐํš (์›น ๊ฒ€์ƒ‰ ํฌํ•จ)"),
("critic", "๐Ÿ“ ๋น„ํ‰๊ฐ€: ๊ธฐํš ๊ฒ€ํ†  (ํ…Œ๋งˆ ๋ฐ ์ผ๊ด€์„ฑ)"),
("director", "๐ŸŽฌ ๊ฐ๋…์ž: ์ˆ˜์ •๋œ ๋งˆ์Šคํ„ฐํ”Œ๋žœ"),
] + [
(f"writer{i}", f"โœ๏ธ ์ž‘๊ฐ€ {i}: ์ดˆ์•ˆ (ํŽ˜์ด์ง€ {(i-1)*3+1}-{i*3})")
for i in range(1, 11)
] + [
("critic", "๐Ÿ“ ๋น„ํ‰๊ฐ€: ์ค‘๊ฐ„ ๊ฒ€ํ†  (์ผ๊ด€์„ฑ ๋ฐ ํ…Œ๋งˆ ์œ ์ง€)"),
] + [
(f"writer{i}", f"โœ๏ธ ์ž‘๊ฐ€ {i}: ์ˆ˜์ •๋ณธ (ํŽ˜์ด์ง€ {(i-1)*3+1}-{i*3})")
for i in range(1, 11)
] + [
("critic", f"๐Ÿ“ ๋น„ํ‰๊ฐ€: ์ตœ์ข… ๊ฒ€ํ†  ๋ฐ ์ข…ํ•ฉ ๋ณด๊ณ ์„œ ์ž‘์„ฑ"),
]
# --- ๋ฐ์ดํ„ฐ ํด๋ž˜์Šค ---
@dataclass
class CharacterState:
"""์บ๋ฆญํ„ฐ์˜ ํ˜„์žฌ ์ƒํƒœ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” ๋ฐ์ดํ„ฐ ํด๋ž˜์Šค"""
name: str
alive: bool = True
location: str = ""
injuries: List[str] = field(default_factory=list)
emotional_state: str = ""
relationships: Dict[str, str] = field(default_factory=dict)
last_seen_chapter: int = 0
description: str = ""
role: str = ""
@dataclass
class PlotPoint:
"""ํ”Œ๋กฏ ํฌ์ธํŠธ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” ๋ฐ์ดํ„ฐ ํด๋ž˜์Šค"""
chapter: int
event_type: str
description: str
characters_involved: List[str]
impact_level: int
timestamp: str = ""
@dataclass
class TimelineEvent:
"""์‹œ๊ฐ„์„  ์ด๋ฒคํŠธ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” ๋ฐ์ดํ„ฐ ํด๋ž˜์Šค"""
chapter: int
time_reference: str
event_description: str
duration: str = ""
relative_time: str = ""
# --- ํ•ต์‹ฌ ๋กœ์ง ํด๋ž˜์Šค ---
class ConsistencyTracker:
"""์ผ๊ด€์„ฑ ์ถ”์  ์‹œ์Šคํ…œ"""
def __init__(self):
self.character_states: Dict[str, CharacterState] = {}
self.plot_points: List[PlotPoint] = []
self.timeline_events: List[TimelineEvent] = []
self.locations: Dict[str, str] = {}
self.established_facts: List[str] = []
self.content_hashes: Dict[str, int] = {} # ํ•ด์‹œ์™€ ํ•ด๋‹น ์ฑ•ํ„ฐ ๋ฒˆํ˜ธ๋ฅผ ์ €์žฅ
def register_character(self, character: CharacterState):
"""์ƒˆ ์บ๋ฆญํ„ฐ ๋“ฑ๋ก"""
self.character_states[character.name] = character
logger.info(f"Character registered: {character.name}")
def update_character_state(self, name: str, chapter: int, updates: Dict[str, Any]):
"""์บ๋ฆญํ„ฐ ์ƒํƒœ ์—…๋ฐ์ดํŠธ"""
if name not in self.character_states:
self.register_character(CharacterState(name=name, last_seen_chapter=chapter))
char = self.character_states[name]
for key, value in updates.items():
if hasattr(char, key):
setattr(char, key, value)
char.last_seen_chapter = chapter
def add_plot_point(self, plot_point: PlotPoint):
"""ํ”Œ๋กฏ ํฌ์ธํŠธ ์ถ”๊ฐ€"""
plot_point.timestamp = datetime.now().isoformat()
self.plot_points.append(plot_point)
def check_repetition(self, content: str, current_chapter: int) -> Tuple[bool, str]:
"""ํ–ฅ์ƒ๋œ ๋ฐ˜๋ณต ๋‚ด์šฉ ๊ฒ€์‚ฌ"""
sentences = re.split(r'[.!?]+', content)
for sentence in sentences:
sentence_strip = sentence.strip()
if len(sentence_strip) > 20: # ๋„ˆ๋ฌด ์งง์€ ๋ฌธ์žฅ์€ ๋ฌด์‹œ
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest()
if sentence_hash in self.content_hashes:
previous_chapter = self.content_hashes[sentence_hash]
# ๋ฐ”๋กœ ์ด์ „ ์ฑ•ํ„ฐ์™€์˜ ๋ฐ˜๋ณต์€ ํ—ˆ์šฉํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ, 2์ฑ•ํ„ฐ ์ด์ƒ ์ฐจ์ด๋‚  ๋•Œ๋งŒ ์˜ค๋ฅ˜๋กœ ๊ฐ„์ฃผ
if current_chapter > previous_chapter + 1:
return True, f"๋ฌธ์žฅ ๋ฐ˜๋ณต (์ฑ•ํ„ฐ {previous_chapter}๊ณผ ์œ ์‚ฌ): {sentence_strip[:50]}..."
# ์ƒˆ ๋‚ด์šฉ์˜ ํ•ด์‹œ ์ถ”๊ฐ€
for sentence in sentences:
sentence_strip = sentence.strip()
if len(sentence_strip) > 20:
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest()
self.content_hashes[sentence_hash] = current_chapter
return False, ""
def validate_consistency(self, chapter: int, content: str) -> List[str]:
"""์ผ๊ด€์„ฑ ๊ฒ€์ฆ"""
errors = []
# ์‚ฌ๋งํ•œ ์บ๋ฆญํ„ฐ ๋“ฑ์žฅ ๊ฒ€์‚ฌ
for char_name, char_state in self.character_states.items():
if char_name.lower() in content.lower() and not char_state.alive:
errors.append(f"โš ๏ธ ์‚ฌ๋งํ•œ ์บ๋ฆญํ„ฐ '{char_name}'์ด(๊ฐ€) ๋“ฑ์žฅํ–ˆ์Šต๋‹ˆ๋‹ค.")
# ๋‚ด์šฉ ๋ฐ˜๋ณต ๊ฒ€์‚ฌ
is_repetition, repeat_msg = self.check_repetition(content, chapter)
if is_repetition:
errors.append(f"๐Ÿ”„ {repeat_msg}")
return errors
def get_character_summary(self, chapter: int) -> str:
"""ํ˜„์žฌ ์ฑ•ํ„ฐ ๊ธฐ์ค€ ์บ๋ฆญํ„ฐ ์š”์•ฝ"""
summary = "\n=== ์บ๋ฆญํ„ฐ ํ˜„ํ™ฉ ์š”์•ฝ (์ด์ „ 2๊ฐœ ์ฑ•ํ„ฐ ๊ธฐ์ค€) ===\n"
active_chars = [char for char in self.character_states.values() if char.last_seen_chapter >= chapter - 2]
if not active_chars:
return "\n(์•„์ง ์ฃผ์š” ์บ๋ฆญํ„ฐ ์ •๋ณด๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.)\n"
for char in active_chars:
status = "์ƒ์กด" if char.alive else "์‚ฌ๋ง"
summary += f"โ€ข {char.name}: {status}"
if char.alive and char.location: summary += f" (์œ„์น˜: {char.location})"
if char.injuries: summary += f" (๋ถ€์ƒ: {', '.join(char.injuries[-1:])})"
summary += "\n"
return summary
def get_plot_summary(self, chapter: int) -> str:
"""ํ”Œ๋กฏ ์š”์•ฝ"""
summary = "\n=== ์ตœ๊ทผ ์ฃผ์š” ์‚ฌ๊ฑด ์š”์•ฝ ===\n"
recent_events = [p for p in self.plot_points if p.chapter >= chapter - 2]
if not recent_events:
return "\n(์•„์ง ์ฃผ์š” ์‚ฌ๊ฑด์ด ์—†์Šต๋‹ˆ๋‹ค.)\n"
for event in recent_events[-3:]: # ์ตœ๊ทผ 3๊ฐœ๋งŒ ํ‘œ์‹œ
summary += f"โ€ข [์ฑ•ํ„ฐ {event.chapter}] {event.description}\n"
return summary
class WebSearchIntegration:
"""์›น ๊ฒ€์ƒ‰ ๊ธฐ๋Šฅ (๊ฐ๋…์ž ๋‹จ๊ณ„์—์„œ๋งŒ ์‚ฌ์šฉ)"""
def __init__(self):
self.brave_api_key = BRAVE_SEARCH_API_KEY
self.search_url = "https://api.search.brave.com/res/v1/web/search"
self.enabled = bool(self.brave_api_key)
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]:
"""์›น ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰"""
if not self.enabled:
return []
headers = {
"Accept": "application/json",
"X-Subscription-Token": self.brave_api_key
}
params = {
"q": query,
"count": count,
"search_lang": "ko" if language == "Korean" else "en",
"text_decorations": False,
"safesearch": "moderate"
}
try:
response = requests.get(self.search_url, headers=headers, params=params, timeout=10)
response.raise_for_status()
results = response.json().get("web", {}).get("results", [])
logger.info(f"์›น ๊ฒ€์ƒ‰ ์„ฑ๊ณต: '{query}'์— ๋Œ€ํ•ด {len(results)}๊ฐœ ๊ฒฐ๊ณผ ๋ฐœ๊ฒฌ")
return results
except requests.exceptions.RequestException as e:
logger.error(f"์›น ๊ฒ€์ƒ‰ API ์˜ค๋ฅ˜: {e}")
return []
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str:
"""๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์—์„œ ๊ด€๋ จ ์ •๋ณด ์ถ”์ถœ"""
if not results:
return ""
extracted = []
total_chars = 0
for i, result in enumerate(results[:3], 1):
title = result.get("title", "")
description = result.get("description", "")
url = result.get("url", "")
info = f"[{i}] {title}\n{description}\nSource: {url}\n"
if total_chars + len(info) < max_chars:
extracted.append(info)
total_chars += len(info)
else:
break
return "\n---\n".join(extracted)
class NovelDatabase:
"""์†Œ์„ค ์„ธ์…˜ ๊ด€๋ฆฌ ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค"""
@staticmethod
def init_db():
with sqlite3.connect(DB_PATH) as conn:
conn.execute("PRAGMA journal_mode=WAL")
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS sessions (
session_id TEXT PRIMARY KEY,
user_query TEXT NOT NULL,
language TEXT NOT NULL,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
status TEXT DEFAULT 'active',
current_stage INTEGER DEFAULT 0,
final_novel TEXT,
consistency_report TEXT
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS stages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
stage_number INTEGER NOT NULL,
stage_name TEXT NOT NULL,
role TEXT NOT NULL,
content TEXT,
word_count INTEGER DEFAULT 0,
status TEXT DEFAULT 'pending',
consistency_score REAL DEFAULT 0.0,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id),
UNIQUE(session_id, stage_number)
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS character_states (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
character_name TEXT NOT NULL,
chapter INTEGER NOT NULL,
is_alive BOOLEAN DEFAULT TRUE,
location TEXT,
injuries TEXT,
emotional_state TEXT,
description TEXT,
created_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id)
)
''')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_id ON stages(session_id)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_stage_number ON stages(stage_number)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_char_session ON character_states(session_id)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_status ON sessions(status)')
conn.commit()
@staticmethod
@contextmanager
def get_db():
with db_lock:
conn = sqlite3.connect(DB_PATH, timeout=30.0)
conn.row_factory = sqlite3.Row
try:
yield conn
finally:
conn.close()
@staticmethod
def create_session(user_query: str, language: str) -> str:
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest()
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)',
(session_id, user_query, language)
)
conn.commit()
return session_id
@staticmethod
def save_stage(session_id: str, stage_number: int, stage_name: str,
role: str, content: str, status: str = 'complete',
consistency_score: float = 0.0):
word_count = len(content.split()) if content else 0
with NovelDatabase.get_db() as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id, stage_number)
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, consistency_score=?, updated_at=datetime('now')
''', (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score,
content, word_count, status, stage_name, consistency_score))
cursor.execute(
"UPDATE sessions SET updated_at = datetime('now'), current_stage = ? WHERE session_id = ?",
(stage_number, session_id)
)
conn.commit()
@staticmethod
def get_session(session_id: str) -> Optional[Dict]:
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone()
return dict(row) if row else None
@staticmethod
def get_latest_active_session() -> Optional[Dict]:
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute("SELECT * FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 1").fetchone()
return dict(row) if row else None
@staticmethod
def get_stages(session_id: str) -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall()
return [dict(row) for row in rows]
@staticmethod
def get_writer_content(session_id: str) -> str:
with NovelDatabase.get_db() as conn:
all_content = []
for writer_num in range(1, 11):
row = conn.cursor().execute(
"SELECT content FROM stages WHERE session_id = ? AND role = ? AND stage_name LIKE '%์ˆ˜์ •๋ณธ%' ORDER BY stage_number DESC LIMIT 1",
(session_id, f'writer{writer_num}')
).fetchone()
if row and row['content']:
all_content.append(row['content'].strip())
return '\n\n'.join(all_content)
@staticmethod
def update_final_novel(session_id: str, final_novel: str, consistency_report: str = ""):
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), consistency_report = ? WHERE session_id = ?",
(final_novel, consistency_report, session_id)
)
conn.commit()
@staticmethod
def get_active_sessions() -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute(
"SELECT session_id, user_query, language, created_at, current_stage FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10"
).fetchall()
return [dict(row) for row in rows]
class NovelWritingSystem:
"""์ตœ์ ํ™”๋œ ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ"""
def __init__(self):
self.token = FRIENDLI_TOKEN
self.api_url = API_URL
self.model_id = MODEL_ID
self.consistency_tracker = ConsistencyTracker()
self.web_search = WebSearchIntegration()
self.current_session_id = None
NovelDatabase.init_db()
def create_headers(self):
"""API ํ—ค๋” ์ƒ์„ฑ"""
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"}
# --- ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ ํ•จ์ˆ˜๋“ค (Thematic Guardian ๊ฐœ๋… ํ†ตํ•ฉ) ---
def create_director_initial_prompt(self, user_query: str, language: str) -> str:
"""๊ฐ๋…์ž ์ดˆ๊ธฐ ๊ธฐํš ํ”„๋กฌํ”„ํŠธ (์›น ๊ฒ€์ƒ‰ ๋ฐ ํ…Œ๋งˆ ์ œ์•ฝ ์กฐ๊ฑด ๊ฐ•ํ™”)"""
search_results_str = ""
if self.web_search.enabled:
queries = [f"{user_query} novel setting", f"{user_query} background information"]
search_results = self.web_search.search(queries[0], count=2, language=language)
if search_results:
search_results_str = self.web_search.extract_relevant_info(search_results)
lang_prompts = {
"Korean": {
"title": "๋‹น์‹ ์€ 30ํŽ˜์ด์ง€ ๋ถ„๋Ÿ‰์˜ ์ค‘ํŽธ ์†Œ์„ค์„ ๊ธฐํšํ•˜๋Š” ๋ฌธํ•™ ๊ฐ๋…์ž์ž…๋‹ˆ๋‹ค.",
"user_theme": "์‚ฌ์šฉ์ž ์ฃผ์ œ",
"plan_instruction": "๋‹ค์Œ ์š”์†Œ๋“ค์„ ํฌํ•จํ•œ ์ƒ์„ธํ•œ ์†Œ์„ค ๊ธฐํš์„ ์ž‘์„ฑํ•˜์„ธ์š”:",
"theme_section": "1. **์ฃผ์ œ์™€ ์žฅ๋ฅด ์„ค์ •**\n - ํ•ต์‹ฌ ์ฃผ์ œ์™€ ๋ฉ”์‹œ์ง€ (์‚ฌ์šฉ์ž ์˜๋„ ๊นŠ์ด ๋ฐ˜์˜)\n - ์žฅ๋ฅด ๋ฐ ๋ถ„์œ„๊ธฐ\n - ๋…์ž์ธต ๊ณ ๋ ค์‚ฌํ•ญ",
"char_section": "2. **์ฃผ์š” ๋“ฑ์žฅ์ธ๋ฌผ** (3-5๋ช…)\n | ์ด๋ฆ„ | ์—ญํ•  | ์„ฑ๊ฒฉ | ๋ฐฐ๊ฒฝ | ๋ชฉํ‘œ | ๊ฐˆ๋“ฑ |",
"setting_section": "3. **๋ฐฐ๊ฒฝ ์„ค์ •**\n - ์‹œ๊ณต๊ฐ„์  ๋ฐฐ๊ฒฝ\n - ์‚ฌํšŒ์ /๋ฌธํ™”์  ํ™˜๊ฒฝ\n - ์ฃผ์š” ์žฅ์†Œ๋“ค",
"plot_section": "4. **ํ”Œ๋กฏ ๊ตฌ์กฐ** (10๊ฐœ ํŒŒํŠธ, ๊ฐ 3ํŽ˜์ด์ง€ ๋ถ„๋Ÿ‰)\n | ํŒŒํŠธ | ํŽ˜์ด์ง€ | ์ฃผ์š” ์‚ฌ๊ฑด | ๊ธด์žฅ๋„ | ์บ๋ฆญํ„ฐ ๋ฐœ์ „ |",
"guideline_section": "5. **์ž‘๊ฐ€๋ณ„ ์ง€์นจ**\n - ์ผ๊ด€์„ฑ ์œ ์ง€๋ฅผ ์œ„ํ•œ ํ•ต์‹ฌ ์„ค์ •\n - ๋ฌธ์ฒด์™€ ํ†ค ๊ฐ€์ด๋“œ๋ผ์ธ",
"constraint_title": "โš ๏ธ๋งค์šฐ ์ค‘์š”ํ•œ ์ง€์‹œ์‚ฌํ•ญ: ํ•ต์‹ฌ ์ œ์•ฝ ์กฐ๊ฑดโš ๏ธ",
"constraint_body": "์ด ์†Œ์„ค์€ **AI๋กœ ์ธํ•ด ๋ชจ๋“  ๊ฒƒ์ด ์‰ฝ๊ฒŒ ํ•ด๊ฒฐ๋˜๋Š” ๊ธ์ •์ ์ด๊ณ  ๋‹จ์ˆœํ•œ ์ด์•ผ๊ธฐ๊ฐ€ ์•„๋‹™๋‹ˆ๋‹ค.**\n๋ฐ˜๋“œ์‹œ ์‚ฌ์šฉ์ž์˜ ์ฃผ์ œ์ธ '{query}'์— ๋‹ด๊ธด **ํ•ต์‹ฌ ๊ฐ์ •(์˜ˆ: ๋ถˆ์•ˆ, ์†Œ์™ธ๊ฐ, ์ƒ์‹ค๊ฐ, ์„ธ๋Œ€ ๊ฐˆ๋“ฑ ๋“ฑ)์„ ์ค‘์‹ฌ์œผ๋กœ ์„œ์‚ฌ๋ฅผ ์ „๊ฐœํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค.**\nAI๋‚˜ ํŠน์ • ๊ธฐ์ˆ ์€ ํŽธ๋ฆฌํ•œ ๋„๊ตฌ๊ฐ€ ์•„๋‹ˆ๋ผ, ์ฃผ์ธ๊ณต์—๊ฒŒ **๊ฐˆ๋“ฑ๊ณผ ์ƒ์‹ค๊ฐ์„ ์•ˆ๊ฒจ์ฃผ๋Š” ํ•ต์‹ฌ ์›์ธ**์œผ๋กœ ์ž‘์šฉํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค.\n์ด ์ œ์•ฝ ์กฐ๊ฑด์„ ์ ˆ๋Œ€ ๋ฒ—์–ด๋‚˜์ง€ ๋งˆ์‹ญ์‹œ์˜ค.",
"final_instruction": "์ฐฝ์˜์ ์ด๊ณ  ๊นŠ์ด ์žˆ๋Š” ์†Œ์„ค์ด ๋  ์ˆ˜ ์žˆ๋„๋ก ์ƒ์„ธํ•˜๊ฒŒ ๊ธฐํšํ•˜์„ธ์š”."
},
"English": {
"title": "You are a literary director planning a 30-page novella.",
"user_theme": "User Theme",
"plan_instruction": "Create a detailed novel plan including:",
"theme_section": "1. **Theme and Genre**\n - Core theme and message (Deeply reflect user's intent)\n - Genre and atmosphere",
"char_section": "2. **Main Characters** (3-5)\n | Name | Role | Personality | Background | Goal | Conflict |",
"setting_section": "3. **Setting**\n - Time and place\n - Social/cultural environment",
"plot_section": "4. **Plot Structure** (10 parts, ~3 pages each)\n | Part | Pages | Main Events | Tension | Character Development |",
"guideline_section": "5. **Writer Guidelines**\n - Key settings for consistency\n - Style and tone guidelines",
"constraint_title": "โš ๏ธCRITICAL INSTRUCTION: CORE CONSTRAINTSโš ๏ธ",
"constraint_body": "This is **NOT a simple, positive story where AI solves everything.**\nYou must develop the narrative around the core emotions of the user's theme: '{query}' (e.g., anxiety, alienation, loss, generational conflict).\nAI or specific technology should be the **root cause of the protagonist's conflict and loss**, not a convenient tool.\nDo not deviate from this constraint.",
"final_instruction": "Plan in detail for a creative and profound novel."
}
}
p = lang_prompts[language]
return f"{p['title']}\n\n{p['user_theme']}: {user_query}\n\n{search_results_str}\n\n{p['plan_instruction']}\n\n{p['theme_section']}\n\n{p['char_section']}\n\n{p['setting_section']}\n\n{p['plot_section']}\n\n{p['guideline_section']}\n\n---\n{p['constraint_title']}\n{p['constraint_body'].format(query=user_query)}\n---\n\n{p['final_instruction']}"
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str:
"""๋น„ํ‰๊ฐ€์˜ ๊ฐ๋…์ž ๊ธฐํš ๊ฒ€ํ†  ํ”„๋กฌํ”„ํŠธ (ํ…Œ๋งˆ ์ผ๊ด€์„ฑ ๊ฐ•ํ™”)"""
lang_prompts = {
"Korean": {
"title": "๋‹น์‹ ์€ ๋ฌธํ•™ ๋น„ํ‰๊ฐ€์ž…๋‹ˆ๋‹ค. ๊ฐ๋…์ž์˜ ์†Œ์„ค ๊ธฐํš์„ '์ฃผ์ œ ์ผ๊ด€์„ฑ'๊ณผ '๊ธฐ์ˆ ์  ์ผ๊ด€์„ฑ' ๊ด€์ ์—์„œ ๊ฒ€ํ† ํ•˜์„ธ์š”.",
"theme_check": f"**1. ์ฃผ์ œ ์ผ๊ด€์„ฑ (๊ฐ€์žฅ ์ค‘์š”)**\n - **์›๋ž˜ ์ฃผ์ œ:** '{user_query}'\n - ๊ธฐํš์•ˆ์ด ์ฃผ์ œ์˜ ํ•ต์‹ฌ ๊ฐ์ •(๋ถˆ์•ˆ, ์ƒ์‹ค๊ฐ ๋“ฑ)์—์„œ ๋ฒ—์–ด๋‚˜ ๊ธ์ •์ ์ด๊ฑฐ๋‚˜ ๋‹จ์ˆœํ•œ ๋ฐฉํ–ฅ์œผ๋กœ ํ๋ฅด์ง€ ์•Š์•˜์Šต๋‹ˆ๊นŒ?\n - AI๋‚˜ ๊ธฐ์ˆ ์ด ๊ฐˆ๋“ฑ์˜ ์›์ธ์ด ์•„๋‹Œ, ๋‹จ์ˆœ ํ•ด๊ฒฐ์‚ฌ๋กœ ๋ฌ˜์‚ฌ๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๊นŒ?",
"consistency_check": "**2. ๊ธฐ์ˆ ์  ์ผ๊ด€์„ฑ**\n - ์บ๋ฆญํ„ฐ ์„ค์ •์˜ ๋ชจ์ˆœ, ํ”Œ๋กฏ์˜ ๋…ผ๋ฆฌ์  ํ—ˆ์ , ์‹œ๊ฐ„์„ /๊ณต๊ฐ„ ์„ค์ •์˜ ๋ฌธ์ œ์ ์„ ๊ฒ€ํ† ํ•˜์„ธ์š”.",
"instruction": "์œ„ ํ•ญ๋ชฉ๋“ค์„ ์ค‘์‹ฌ์œผ๋กœ ๊ตฌ์ฒด์ ์ธ ๋ฌธ์ œ์ ๊ณผ ๊ฐœ์„ ์•ˆ์„ ์ œ์‹œํ•˜์„ธ์š”."
},
"English": {
"title": "You are a literary critic. Review the director's plan from the perspectives of 'Thematic Consistency' and 'Technical Consistency'.",
"theme_check": f"**1. Thematic Consistency (Most Important)**\n - **Original Theme:** '{user_query}'\n - Does the plan drift from the core emotions (e.g., anxiety, loss) towards an overly positive or simplistic narrative?\n - Is AI depicted as a simple problem-solver instead of the root of the conflict?",
"consistency_check": "**2. Technical Consistency**\n - Review for character contradictions, plot holes, and timeline/setting issues.",
"instruction": "Provide specific problems and suggestions for improvement based on the above."
}
}
p = lang_prompts[language]
return f"{p['title']}\n\n**๊ฐ๋…์ž ๊ธฐํš:**\n{director_plan}\n\n---\n**๊ฒ€ํ†  ํ•ญ๋ชฉ:**\n{p['theme_check']}\n\n{p['consistency_check']}\n\n{p['instruction']}"
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str:
"""๊ฐ๋…์ž ์ˆ˜์ • ํ”„๋กฌํ”„ํŠธ (ํ…Œ๋งˆ ์ œ์•ฝ ์กฐ๊ฑด ์žฌ๊ฐ•์กฐ)"""
return f"""๊ฐ๋…์ž๋กœ์„œ ๋น„ํ‰๊ฐ€์˜ ํ”ผ๋“œ๋ฐฑ์„ ๋ฐ˜์˜ํ•˜์—ฌ ์†Œ์„ค ๊ธฐํš์„ ์ˆ˜์ •ํ•ฉ๋‹ˆ๋‹ค.
**์›๋ž˜ ์ฃผ์ œ:** {user_query}
**์ดˆ๊ธฐ ๊ธฐํš:**\n{initial_plan}
**๋น„ํ‰๊ฐ€ ํ”ผ๋“œ๋ฐฑ:**\n{critic_feedback}
**์ˆ˜์ • ์ง€์นจ:**
- ๋น„ํ‰๊ฐ€๊ฐ€ ์ง€์ ํ•œ ๋ชจ๋“  ์ผ๊ด€์„ฑ ๋ฌธ์ œ์™€ ์ฃผ์ œ ์ดํƒˆ ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•˜์„ธ์š”.
- **ํ•ต์‹ฌ ์ œ์•ฝ ์กฐ๊ฑด**์„ ๋‹ค์‹œ ํ•œ๋ฒˆ ์ƒ๊ธฐํ•˜๊ณ , ์†Œ์„ค ์ „์ฒด๊ฐ€ '๋ถˆ์•ˆ'๊ณผ '์ƒ์‹ค๊ฐ'์˜ ํ†ค์„ ์œ ์ง€ํ•˜๋„๋ก ํ”Œ๋กฏ์„ ๊ตฌ์ฒดํ™”ํ•˜์„ธ์š”.
- 10๋ช…์˜ ์ž‘๊ฐ€๊ฐ€ ํ˜ผ๋™ ์—†์ด ์ž‘์—…ํ•  ์ˆ˜ ์žˆ๋„๋ก ๋ช…ํ™•ํ•˜๊ณ  ์ƒ์„ธํ•œ ์ตœ์ข… ๋งˆ์Šคํ„ฐํ”Œ๋žœ์„ ์ž‘์„ฑํ•˜์„ธ์š”.
"""
def create_writer_prompt(self, writer_number: int, director_plan: str, previous_content_summary: str, user_query: str, language: str) -> str:
"""์ž‘๊ฐ€ ํ”„๋กฌํ”„ํŠธ (ํ…Œ๋งˆ ๋ฆฌ๋งˆ์ธ๋” ํฌํ•จ)"""
pages_start = (writer_number - 1) * 3 + 1
pages_end = writer_number * 3
lang_prompts = {
"Korean": {
"title": f"๋‹น์‹ ์€ ์ž‘๊ฐ€ {writer_number}๋ฒˆ์ž…๋‹ˆ๋‹ค. ์†Œ์„ค์˜ {pages_start}-{pages_end} ํŽ˜์ด์ง€๋ฅผ ์ž‘์„ฑํ•˜์„ธ์š”.",
"plan": "๊ฐ๋…์ž ๋งˆ์Šคํ„ฐํ”Œ๋žœ",
"prev_summary": "์ด์ „ ๋‚ด์šฉ ์š”์•ฝ",
"guidelines": "**์ž‘์„ฑ ์ง€์นจ:**\n1. **๋ถ„๋Ÿ‰**: 1,400-1,500 ๋‹จ์–ด ๋‚ด์™ธ\n2. **์—ฐ๊ฒฐ์„ฑ**: ์š”์•ฝ๋œ ์ด์ „ ๋‚ด์šฉ๊ณผ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ์—ฐ๊ฒฐ\n3. **์ผ๊ด€์„ฑ**: ์บ๋ฆญํ„ฐ ์„ค์ •๊ณผ ์ƒํƒœ, ํ”Œ๋กฏ ๊ตฌ์กฐ๋ฅผ ๋ฐ˜๋“œ์‹œ ๋”ฐ๋ฅผ ๊ฒƒ",
"reminder_title": "โญ ์žŠ์ง€ ๋งˆ์„ธ์š” (ํ…Œ๋งˆ ๋ฆฌ๋งˆ์ธ๋”)",
"reminder_body": f"์ด ์†Œ์„ค์˜ ํ•ต์‹ฌ์€ '{user_query}'์— ๋‹ด๊ธด **๋ถˆ์•ˆ, ์†Œ์™ธ, ์ƒ์‹ค๊ฐ**์ž…๋‹ˆ๋‹ค. ๊ธ์ •์ ์ธ ํ•ด๊ฒฐ์„ ์„œ๋‘๋ฅด์ง€ ๋ง๊ณ , ์ฃผ์ธ๊ณต์˜ ๋‚ด๋ฉด ๊ฐˆ๋“ฑ์„ ์‹ฌ๋„ ์žˆ๊ฒŒ ๋ฌ˜์‚ฌํ•˜๋Š” ๋ฐ ์ง‘์ค‘ํ•˜์„ธ์š”.",
"final_instruction": "์ฐฝ์˜์ ์ด๋ฉด์„œ๋„ ์ฃผ์ œ์™€ ์ผ๊ด€์„ฑ์„ ์ ˆ๋Œ€ ์žƒ์ง€ ๋งˆ์‹ญ์‹œ์˜ค."
},
"English": {
"title": f"You are Writer #{writer_number}. Write pages {pages_start}-{pages_end} of the novella.",
"plan": "Director's Masterplan",
"prev_summary": "Previous Content Summary",
"guidelines": "**Writing Guidelines:**\n1. **Length**: Approx. 1,400-1,500 words\n2. **Connectivity**: Connect naturally with the summarized previous content.\n3. **Consistency**: Strictly follow character settings, states, and plot structure.",
"reminder_title": "โญ REMINDER (THEME)",
"reminder_body": f"The core of this novel is the **anxiety, alienation, and loss** from the theme '{user_query}'. Do not rush to a positive resolution; focus on deeply describing the protagonist's internal conflict.",
"final_instruction": "Be creative, but never lose consistency and the core theme."
}
}
p = lang_prompts[language]
consistency_info = self.consistency_tracker.get_character_summary(writer_number) + self.consistency_tracker.get_plot_summary(writer_number)
return f"{p['title']}\n\n**{p['plan']}:**\n{director_plan}\n\n{consistency_info}\n\n**{p['prev_summary']}:**\n{previous_content_summary}\n\n---\n{p['guidelines']}\n\n**{p['reminder_title']}**\n{p['reminder_body']}\n---\n\n{p['final_instruction']}"
def create_critic_consistency_prompt(self, all_content: str, user_query: str, language: str) -> str:
"""๋น„ํ‰๊ฐ€ ์ค‘๊ฐ„ ๊ฒ€ํ†  ํ”„๋กฌํ”„ํŠธ (ํ…Œ๋งˆ ๊ฒ€ํ†  ๊ฐ•ํ™”)"""
return f"""๋‹น์‹ ์€ ์ผ๊ด€์„ฑ ๊ฒ€ํ†  ์ „๋ฌธ ๋น„ํ‰๊ฐ€์ž…๋‹ˆ๋‹ค. ์ง€๊ธˆ๊นŒ์ง€ ์ž‘์„ฑ๋œ ๋‚ด์šฉ์„ ๊ฒ€ํ† ํ•˜์„ธ์š”.
**์›๋ž˜ ์ฃผ์ œ:** {user_query}
**ํ˜„์žฌ๊นŒ์ง€ ์ž‘์„ฑ๋œ ๋‚ด์šฉ (์ตœ๊ทผ 3000์ž):**\n{all_content[-3000:]}
**๊ฒ€ํ†  ํ•ญ๋ชฉ:**
1. **์ฃผ์ œ ์ผ๊ด€์„ฑ (๊ฐ€์žฅ ์ค‘์š”):** ๋‚ด์šฉ์ด ์›๋ž˜ ์ฃผ์ œ์˜ ์–ด๋‘์šด ๊ฐ์ •์„ ์—์„œ ๋ฒ—์–ด๋‚˜์ง€ ์•Š์•˜๋Š”์ง€ ํ™•์ธํ•˜๊ณ , ๋ฒ—์–ด๋‚ฌ๋‹ค๋ฉด ์ˆ˜์ • ๋ฐฉํ–ฅ์„ ์ œ์‹œํ•˜์„ธ์š”.
2. **๊ธฐ์ˆ ์  ์ผ๊ด€์„ฑ:** ์บ๋ฆญํ„ฐ, ํ”Œ๋กฏ, ์„ค์ •์˜ ์—ฐ์†์„ฑ๊ณผ ๋…ผ๋ฆฌ์  ์˜ค๋ฅ˜๋ฅผ ์ฐพ์•„๋‚ด์„ธ์š”.
3. **๋ฐ˜๋ณต ๋‚ด์šฉ:** ์˜๋ฏธ์ ์œผ๋กœ ์ค‘๋ณต๋˜๋Š” ์žฅ๋ฉด์ด๋‚˜ ํ‘œํ˜„์ด ์—†๋Š”์ง€ ํ™•์ธํ•˜์„ธ์š”.
**๊ฒฐ๊ณผ:** ๋ฐœ๊ฒฌ๋œ ๋ฌธ์ œ์ ๊ณผ ๊ตฌ์ฒด์ ์ธ ์ˆ˜์ • ์ œ์•ˆ์„ ๋ชฉ๋ก์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”.
"""
def create_writer_revision_prompt(self, writer_number: int, initial_content: str, consistency_feedback: str, language: str) -> str:
"""์ž‘๊ฐ€ ์ˆ˜์ • ํ”„๋กฌํ”„ํŠธ"""
return f"""์ž‘๊ฐ€ {writer_number}๋ฒˆ์œผ๋กœ์„œ ๋น„ํ‰๊ฐ€์˜ ํ”ผ๋“œ๋ฐฑ์„ ๋ฐ˜์˜ํ•˜์—ฌ ๋‚ด์šฉ์„ ์ˆ˜์ •ํ•˜์„ธ์š”.
**์ดˆ๊ธฐ ์ž‘์„ฑ ๋‚ด์šฉ:**\n{initial_content}
**๋น„ํ‰๊ฐ€ ํ”ผ๋“œ๋ฐฑ:**\n{consistency_feedback}
**์ˆ˜์ • ์ง€์นจ:**
- ์ง€์ ๋œ ๋ชจ๋“  ์ฃผ์ œ ์ดํƒˆ ๋ฐ ์ผ๊ด€์„ฑ ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•˜์„ธ์š”.
- ๋ถ„๋Ÿ‰(1,400-1,500 ๋‹จ์–ด)์„ ์œ ์ง€ํ•˜๋ฉด์„œ ๋‚ด์šฉ์˜ ์งˆ์„ ๋†’์ด์„ธ์š”.
- ์ˆ˜์ •๋œ ์ตœ์ข… ๋ฒ„์ „์„ ์ œ์‹œํ•˜์„ธ์š”.
"""
def create_critic_final_prompt(self, complete_novel: str, language: str) -> str:
"""์ตœ์ข… ๋น„ํ‰๊ฐ€ ๊ฒ€ํ†  ๋ฐ ๋ณด๊ณ ์„œ ์ž‘์„ฑ ํ”„๋กฌํ”„ํŠธ"""
return f"""์™„์„ฑ๋œ ์†Œ์„ค์˜ ์ตœ์ข… ์ผ๊ด€์„ฑ ๋ฐ ์™„์„ฑ๋„์— ๋Œ€ํ•œ ์ข…ํ•ฉ ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•˜์„ธ์š”.
**์™„์„ฑ๋œ ์†Œ์„ค (๋งˆ์ง€๋ง‰ 2000์ž):**\n{complete_novel[-2000:]}
**๋ณด๊ณ ์„œ ํฌํ•จ ํ•ญ๋ชฉ:**
1. **์ „์ฒด ์ผ๊ด€์„ฑ ํ‰๊ฐ€:** ์บ๋ฆญํ„ฐ, ํ”Œ๋กฏ, ์„ค์ •, ์ฃผ์ œ ์œ ์ง€์— ๋Œ€ํ•œ ์ ์ˆ˜(1-10)์™€ ์ดํ‰.
2. **์ตœ์ข… ๋ฐœ๊ฒฌ๋œ ๋ฌธ์ œ์ :** ๋‚จ์•„์žˆ๋Š” ์‚ฌ์†Œํ•œ ๋ฌธ์ œ์ ๋“ค.
3. **์„ฑ๊ณต ์š”์†Œ:** ํŠนํžˆ ์ž˜ ์œ ์ง€๋œ ์ผ๊ด€์„ฑ ๋ถ€๋ถ„์ด๋‚˜ ์ฃผ์ œ ํ‘œํ˜„์ด ๋›ฐ์–ด๋‚œ ๋ถ€๋ถ„.
4. **์ตœ์ข… ํ‰๊ฐ€:** ์†Œ์„ค์˜ ์ „๋ฐ˜์ ์ธ ์™„์„ฑ๋„์™€ ๋…์ž์—๊ฒŒ ๋ฏธ์น  ์˜ํ–ฅ์— ๋Œ€ํ•œ ํ‰๊ฐ€.
"""
# --- LLM ํ˜ธ์ถœ ํ•จ์ˆ˜๋“ค ---
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str:
"""LLM ๋™๊ธฐ์‹ ํ˜ธ์ถœ (์š”์•ฝ ๋“ฑ ๋‚ด๋ถ€์šฉ)"""
full_content = ""
for chunk in self.call_llm_streaming(messages, role, language):
full_content += chunk
if full_content.startswith("โŒ"):
raise Exception(f"LLM Sync Call Failed: {full_content}")
return full_content
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]:
"""LLM ์ŠคํŠธ๋ฆฌ๋ฐ ํ˜ธ์ถœ (์™„์ „ํ•œ ์—๋Ÿฌ ์ฒ˜๋ฆฌ ๋ฐ ๋””๋ฒ„๊น…)"""
try:
system_prompts = self.get_system_prompts(language)
full_messages = [{"role": "system", "content": system_prompts.get(role, "You are a helpful assistant.")}, *messages]
payload = {
"model": self.model_id,
"messages": full_messages,
"max_tokens": 10000,
"temperature": 0.75,
"top_p": 0.9,
"presence_penalty": 0.3,
"frequency_penalty": 0.2,
"stream": True,
"stream_options": {"include_usage": True}
}
logger.info(f"[{role}] API ์ŠคํŠธ๋ฆฌ๋ฐ ์‹œ์ž‘")
# API ํ˜ธ์ถœ
response = requests.post(
self.api_url,
headers=self.create_headers(),
json=payload,
stream=True,
timeout=180
)
# ์ƒํƒœ ์ฝ”๋“œ ํ™•์ธ
if response.status_code != 200:
logger.error(f"API ์‘๋‹ต ์˜ค๋ฅ˜: {response.status_code}")
logger.error(f"์‘๋‹ต ๋‚ด์šฉ: {response.text[:500]}")
yield f"โŒ API ์˜ค๋ฅ˜ (์ƒํƒœ ์ฝ”๋“œ: {response.status_code})"
return
response.raise_for_status()
# ์ŠคํŠธ๋ฆฌ๋ฐ ์ฒ˜๋ฆฌ
buffer = ""
total_content = ""
chunk_count = 0
error_count = 0
for line in response.iter_lines():
if not line:
continue
try:
line_str = line.decode('utf-8').strip()
# SSE ํ˜•์‹ ํ™•์ธ
if not line_str.startswith("data: "):
continue
data_str = line_str[6:] # "data: " ์ œ๊ฑฐ
# ์ŠคํŠธ๋ฆผ ์ข…๋ฃŒ ํ™•์ธ
if data_str == "[DONE]":
logger.info(f"[{role}] ์ŠคํŠธ๋ฆฌ๋ฐ ์™„๋ฃŒ - ์ด {len(total_content)} ๋ฌธ์ž")
break
# JSON ํŒŒ์‹ฑ
try:
data = json.loads(data_str)
except json.JSONDecodeError:
logger.warning(f"JSON ํŒŒ์‹ฑ ์‹คํŒจ: {data_str[:100]}")
continue
# choices ๋ฐฐ์—ด ์•ˆ์ „ํ•˜๊ฒŒ ํ™•์ธ
choices = data.get("choices", None)
if not choices or not isinstance(choices, list) or len(choices) == 0:
# ์—๋Ÿฌ ์‘๋‹ต ํ™•์ธ
if "error" in data:
error_msg = data.get("error", {}).get("message", "Unknown error")
logger.error(f"API ์—๋Ÿฌ: {error_msg}")
yield f"โŒ API ์—๋Ÿฌ: {error_msg}"
return
continue
# delta์—์„œ content ์ถ”์ถœ
delta = choices[0].get("delta", {})
content = delta.get("content", "")
if content:
buffer += content
total_content += content
chunk_count += 1
# 100์ž ๋˜๋Š” ์ค„๋ฐ”๊ฟˆ๋งˆ๋‹ค yield
if len(buffer) >= 100 or '\n' in buffer:
yield buffer
buffer = ""
time.sleep(0.01) # UI ์—…๋ฐ์ดํŠธ๋ฅผ ์œ„ํ•œ ์งง์€ ๋Œ€๊ธฐ
except Exception as e:
error_count += 1
logger.error(f"์ฒญํฌ ์ฒ˜๋ฆฌ ์˜ค๋ฅ˜ #{error_count}: {str(e)}")
if error_count > 10: # ๋„ˆ๋ฌด ๋งŽ์€ ์—๋Ÿฌ์‹œ ์ค‘๋‹จ
yield f"โŒ ์ŠคํŠธ๋ฆฌ๋ฐ ์ค‘ ๊ณผ๋„ํ•œ ์˜ค๋ฅ˜ ๋ฐœ์ƒ"
return
continue
# ๋‚จ์€ ๋ฒ„ํผ ์ฒ˜๋ฆฌ
if buffer:
yield buffer
# ๊ฒฐ๊ณผ ํ™•์ธ
if chunk_count == 0:
logger.error(f"[{role}] ์ฝ˜ํ…์ธ ๊ฐ€ ์ „ํ˜€ ์ˆ˜์‹ ๋˜์ง€ ์•Š์Œ")
yield "โŒ API๋กœ๋ถ€ํ„ฐ ์‘๋‹ต์„ ๋ฐ›์ง€ ๋ชปํ–ˆ์Šต๋‹ˆ๋‹ค."
else:
logger.info(f"[{role}] ์„ฑ๊ณต์ ์œผ๋กœ {chunk_count}๊ฐœ ์ฒญํฌ, ์ด {len(total_content)}์ž ์ˆ˜์‹ ")
except requests.exceptions.Timeout:
logger.error("API ์š”์ฒญ ์‹œ๊ฐ„ ์ดˆ๊ณผ")
yield "โŒ API ์š”์ฒญ ์‹œ๊ฐ„์ด ์ดˆ๊ณผ๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
except requests.exceptions.ConnectionError:
logger.error("API ์—ฐ๊ฒฐ ์‹คํŒจ")
yield "โŒ API ์„œ๋ฒ„์— ์—ฐ๊ฒฐํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
except Exception as e:
logger.error(f"์˜ˆ๊ธฐ์น˜ ์•Š์€ ์˜ค๋ฅ˜: {type(e).__name__}: {str(e)}", exc_info=True)
yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
def get_system_prompts(self, language: str) -> Dict[str, str]:
"""์—ญํ• ๋ณ„ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
base_prompts = {
"Korean": {
"director": "๋‹น์‹ ์€ ์ฐฝ์˜์ ์ด๊ณ  ์ฒด๊ณ„์ ์ธ ์†Œ์„ค ๊ธฐํš ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค. ํฅ๋ฏธ๋กญ๊ณ  ์ผ๊ด€์„ฑ ์žˆ๋Š” ์Šคํ† ๋ฆฌ๋ฅผ ์„ค๊ณ„ํ•˜์„ธ์š”.",
"critic": "๋‹น์‹ ์€ ์ผ๊ด€์„ฑ ๊ฒ€ํ†  ์ „๋ฌธ ๋น„ํ‰๊ฐ€์ž…๋‹ˆ๋‹ค. ์บ๋ฆญํ„ฐ, ํ”Œ๋กฏ, ์„ค์ •์˜ ์ผ๊ด€์„ฑ์„ ์ฒ ์ €ํžˆ ์ ๊ฒ€ํ•˜๊ณ  ๊ฐœ์„ ๋ฐฉ์•ˆ์„ ์ œ์‹œํ•˜์„ธ์š”.",
"writer_base": "๋‹น์‹ ์€ ์ „๋ฌธ ์†Œ์„ค ์ž‘๊ฐ€์ž…๋‹ˆ๋‹ค. ์ฃผ์–ด์ง„ ์ง€์นจ์— ๋”ฐ๋ผ ๋ชฐ์ž…๊ฐ ์žˆ๊ณ  ์ผ๊ด€์„ฑ ์žˆ๋Š” ๋‚ด์šฉ์„ ์ž‘์„ฑํ•˜์„ธ์š”."
},
"English": {
"director": "You are a creative and systematic novel planning expert. Design engaging and consistent stories.",
"critic": "You are a consistency review specialist critic. Thoroughly check character, plot, and setting consistency and suggest improvements.",
"writer_base": "You are a professional novel writer. Write immersive and consistent content according to the given guidelines."
}
}
prompts = base_prompts[language].copy()
# ์ž‘๊ฐ€๋ณ„ ํŠน์ˆ˜ ํ”„๋กฌํ”„ํŠธ
if language == "Korean":
prompts["writer1"] = "๋‹น์‹ ์€ ์†Œ์„ค์˜ ๋งค๋ ฅ์ ์ธ ์‹œ์ž‘์„ ๋‹ด๋‹นํ•˜๋Š” ์ž‘๊ฐ€์ž…๋‹ˆ๋‹ค. ๋…์ž๋ฅผ ์‚ฌ๋กœ์žก๋Š” ๋„์ž…๋ถ€๋ฅผ ๋งŒ๋“œ์„ธ์š”."
prompts["writer10"] = "๋‹น์‹ ์€ ์™„๋ฒฝํ•œ ๊ฒฐ๋ง์„ ๋งŒ๋“œ๋Š” ์ž‘๊ฐ€์ž…๋‹ˆ๋‹ค. ๋…์ž์—๊ฒŒ ๊นŠ์€ ์—ฌ์šด์„ ๋‚จ๊ธฐ๋Š” ๋งˆ๋ฌด๋ฆฌ๋ฅผ ํ•˜์„ธ์š”."
else:
prompts["writer1"] = "You are a writer responsible for the captivating beginning. Create an opening that hooks readers."
prompts["writer10"] = "You are a writer who creates the perfect ending. Create a conclusion that leaves readers with deep resonance."
# writer2-9๋Š” ๊ธฐ๋ณธ ํ”„๋กฌํ”„ํŠธ ์‚ฌ์šฉ
for i in range(2, 10):
prompts[f"writer{i}"] = prompts["writer_base"]
return prompts
# --- ๋ฉ”์ธ ํ”„๋กœ์„ธ์Šค ---
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]:
"""์†Œ์„ค ์ƒ์„ฑ ์ŠคํŠธ๋ฆฌ๋ฐ ํ”„๋กœ์„ธ์Šค (๊ฐ•ํ™”๋œ ๋กœ์ง)"""
try:
resume_from_stage = 0
if session_id:
self.current_session_id = session_id
session = NovelDatabase.get_session(session_id)
if session:
query = session['user_query']
language = session['language']
resume_from_stage = session['current_stage'] + 1
logger.info(f"Resuming session {session_id} from stage {resume_from_stage}")
else:
self.current_session_id = NovelDatabase.create_session(query, language)
logger.info(f"Created new session: {self.current_session_id}")
stages = []
if resume_from_stage > 0:
stages = [{
"name": s['stage_name'], "status": s['status'], "content": s.get('content', ''),
"consistency_score": s.get('consistency_score', 0.0)
} for s in NovelDatabase.get_stages(self.current_session_id)]
for stage_idx in range(resume_from_stage, len(OPTIMIZED_STAGES)):
role, stage_name = OPTIMIZED_STAGES[stage_idx]
if stage_idx >= len(stages):
stages.append({"name": stage_name, "status": "active", "content": "", "consistency_score": 0.0})
else:
stages[stage_idx]["status"] = "active"
yield "", stages, self.current_session_id
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages)
stage_content = ""
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language):
stage_content += chunk
stages[stage_idx]["content"] = stage_content
yield "", stages, self.current_session_id
consistency_score = 0.0
if role.startswith("writer"):
writer_num = int(re.search(r'\d+', role).group())
all_previous = self.get_all_content(stages, stage_idx)
errors = self.consistency_tracker.validate_consistency(writer_num, stage_content)
consistency_score = max(0, 10 - len(errors) * 2)
stages[stage_idx]["consistency_score"] = consistency_score
stages[stage_idx]["status"] = "complete"
NovelDatabase.save_stage(
self.current_session_id, stage_idx, stage_name, role,
stage_content, "complete", consistency_score
)
yield "", stages, self.current_session_id
final_novel = NovelDatabase.get_writer_content(self.current_session_id)
final_report = self.generate_consistency_report(final_novel, language)
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report)
yield f"โœ… ์†Œ์„ค ์™„์„ฑ! ์ด {len(final_novel.split())}๋‹จ์–ด", stages, self.current_session_id
except Exception as e:
logger.error(f"์†Œ์„ค ์ƒ์„ฑ ํ”„๋กœ์„ธ์Šค ์˜ค๋ฅ˜: {e}", exc_info=True)
yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {e}", stages if 'stages' in locals() else [], self.current_session_id
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str:
"""๋‹จ๊ณ„๋ณ„ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ (์š”์•ฝ ๊ธฐ๋Šฅ ๋ฐ ์ฃผ์ œ ์ „๋‹ฌ ๊ฐ•ํ™”)"""
if stage_idx == 0:
return self.create_director_initial_prompt(query, language)
if stage_idx == 1:
return self.create_critic_director_prompt(stages[0]["content"], query, language)
if stage_idx == 2:
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language)
master_plan = stages[2]["content"]
if 3 <= stage_idx <= 12: # ์ž‘๊ฐ€ ์ดˆ์•ˆ
writer_num = stage_idx - 2
previous_content = self.get_all_content(stages, stage_idx)
summary = self.create_summary(previous_content, language)
return self.create_writer_prompt(writer_num, master_plan, summary, query, language)
if stage_idx == 13: # ๋น„ํ‰๊ฐ€ ์ค‘๊ฐ„ ๊ฒ€ํ† 
all_content = self.get_all_content(stages, stage_idx)
return self.create_critic_consistency_prompt(all_content, query, language)
if 14 <= stage_idx <= 23: # ์ž‘๊ฐ€ ์ˆ˜์ •
writer_num = stage_idx - 13
initial_content = stages[2 + writer_num]["content"]
feedback = stages[13]["content"]
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language)
if stage_idx == 24: # ์ตœ์ข… ๊ฒ€ํ† 
complete_novel = self.get_all_writer_content(stages)
return self.create_critic_final_prompt(complete_novel, language)
return ""
def create_summary(self, content: str, language: str) -> str:
"""LLM์„ ์ด์šฉํ•ด ์ด์ „ ๋‚ด์šฉ์„ ์š”์•ฝ"""
if not content.strip():
return "์ด์ „ ๋‚ด์šฉ์ด ์—†์Šต๋‹ˆ๋‹ค." if language == "Korean" else "No previous content."
prompt_text = "๋‹ค์Œ ์†Œ์„ค ๋‚ด์šฉ์„ 3~5๊ฐœ์˜ ํ•ต์‹ฌ์ ์ธ ๋ฌธ์žฅ์œผ๋กœ ์š”์•ฝํ•ด์ค˜. ๋‹ค์Œ ์ž‘๊ฐ€๊ฐ€ ์ด์•ผ๊ธฐ๋ฅผ ์ด์–ด๊ฐ€๋Š” ๋ฐ ํ•„์š”ํ•œ ํ•ต์‹ฌ ์ •๋ณด(๋“ฑ์žฅ์ธ๋ฌผ์˜ ํ˜„์žฌ ์ƒํ™ฉ, ๊ฐ์ •, ๋งˆ์ง€๋ง‰ ์‚ฌ๊ฑด)๋ฅผ ํฌํ•จํ•ด์•ผ ํ•ด."
if language != "Korean":
prompt_text = "Summarize the following novel content in 3-5 key sentences. Include crucial information for the next writer to continue the story (characters' current situation, emotions, and the last major event)."
summary_prompt = f"{prompt_text}\n\n---\n{content[-2000:]}"
try:
summary = self.call_llm_sync([{"role": "user", "content": summary_prompt}], "critic", language)
return summary
except Exception as e:
logger.error(f"์š”์•ฝ ์ƒ์„ฑ ์‹คํŒจ: {e}")
return content[-1000:]
def get_all_content(self, stages: List[Dict], current_stage: int) -> str:
"""ํ˜„์žฌ๊นŒ์ง€์˜ ๋ชจ๋“  ๋‚ด์šฉ ๊ฐ€์ ธ์˜ค๊ธฐ"""
return "\n\n".join(s["content"] for i, s in enumerate(stages) if i < current_stage and s["content"])
def get_all_writer_content(self, stages: List[Dict]) -> str:
"""๋ชจ๋“  ์ž‘๊ฐ€ ์ตœ์ข… ์ˆ˜์ •๋ณธ ๋‚ด์šฉ ๊ฐ€์ ธ์˜ค๊ธฐ"""
return "\n\n".join(s["content"] for i, s in enumerate(stages) if 14 <= i <= 23 and s["content"])
def generate_consistency_report(self, complete_novel: str, language: str) -> str:
"""์ตœ์ข… ๋ณด๊ณ ์„œ ์ƒ์„ฑ (LLM ํ˜ธ์ถœ)"""
prompt = self.create_critic_final_prompt(complete_novel, language)
try:
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language)
return report
except Exception as e:
logger.error(f"์ตœ์ข… ๋ณด๊ณ ์„œ ์ƒ์„ฑ ์‹คํŒจ: {e}")
return "๋ณด๊ณ ์„œ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ"
# --- ์œ ํ‹ธ๋ฆฌํ‹ฐ ํ•จ์ˆ˜๋“ค ---
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]:
"""๋ฉ”์ธ ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜"""
if not query.strip():
yield "", "", "โŒ ์ฃผ์ œ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.", session_id
return
system = NovelWritingSystem()
stages_markdown = ""
novel_content = ""
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id):
stages_markdown = format_stages_display(stages)
# ์ตœ์ข… ์†Œ์„ค ๋‚ด์šฉ ๊ฐ€์ ธ์˜ค๊ธฐ
if stages and all(s.get("status") == "complete" for s in stages[-10:]):
novel_content = NovelDatabase.get_writer_content(current_session_id)
novel_content = format_novel_display(novel_content)
yield stages_markdown, novel_content, status or "๐Ÿ”„ ์ฒ˜๋ฆฌ ์ค‘...", current_session_id
def get_active_sessions(language: str) -> List[str]:
"""ํ™œ์„ฑ ์„ธ์…˜ ๋ชฉ๋ก ๊ฐ€์ ธ์˜ค๊ธฐ"""
sessions = NovelDatabase.get_active_sessions()
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']})"
for s in sessions]
def auto_recover_session(language: str) -> Tuple[Optional[str], str]:
"""๊ฐ€์žฅ ์ตœ๊ทผ ํ™œ์„ฑ ์„ธ์…˜ ์ž๋™ ๋ณต๊ตฌ"""
latest_session = NovelDatabase.get_latest_active_session()
if latest_session:
return latest_session['session_id'], f"์„ธ์…˜ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ"
return None, "๋ณต๊ตฌํ•  ์„ธ์…˜์ด ์—†์Šต๋‹ˆ๋‹ค."
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]:
"""์„ธ์…˜ ์žฌ๊ฐœ ํ•จ์ˆ˜"""
if not session_id:
yield "", "", "โŒ ์„ธ์…˜ ID๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", session_id
return
# ๋“œ๋กญ๋‹ค์šด์—์„œ ์„ธ์…˜ ID ์ถ”์ถœ
if "..." in session_id:
session_id = session_id.split("...")[0]
session = NovelDatabase.get_session(session_id)
if not session:
yield "", "", "โŒ ์„ธ์…˜์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", None
return
# process_query๋ฅผ ํ†ตํ•ด ์žฌ๊ฐœ
yield from process_query(session['user_query'], session['language'], session_id)
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]:
"""์†Œ์„ค ๋‹ค์šด๋กœ๋“œ ํŒŒ์ผ ์ƒ์„ฑ"""
if not novel_text or not session_id:
return None
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"novel_{session_id[:8]}_{timestamp}"
try:
if format_type == "DOCX" and DOCX_AVAILABLE:
return export_to_docx(novel_text, filename, language)
else:
return export_to_txt(novel_text, filename)
except Exception as e:
logger.error(f"ํŒŒ์ผ ์ƒ์„ฑ ์‹คํŒจ: {e}")
return None
def format_stages_display(stages: List[Dict]) -> str:
"""๋‹จ๊ณ„๋ณ„ ์ง„ํ–‰ ์ƒํ™ฉ ๋งˆํฌ๋‹ค์šด ํฌ๋งทํŒ…"""
markdown = "## ๐ŸŽฌ ์ง„ํ–‰ ์ƒํ™ฉ\n\n"
for i, stage in enumerate(stages):
status_icon = "โœ…" if stage['status'] == 'complete' else "๐Ÿ”„" if stage['status'] == 'active' else "โณ"
markdown += f"{status_icon} **{stage['name']}**"
if stage.get('consistency_score', 0) > 0:
markdown += f" (์ผ๊ด€์„ฑ: {stage['consistency_score']:.1f}/10)"
markdown += "\n"
if stage['content']:
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content']
markdown += f"> {preview}\n\n"
return markdown
def format_novel_display(novel_text: str) -> str:
"""์†Œ์„ค ๋‚ด์šฉ ๋งˆํฌ๋‹ค์šด ํฌ๋งทํŒ…"""
if not novel_text:
return "์•„์ง ์™„์„ฑ๋œ ๋‚ด์šฉ์ด ์—†์Šต๋‹ˆ๋‹ค."
# ํŽ˜์ด์ง€ ๊ตฌ๋ถ„ ์ถ”๊ฐ€
pages = novel_text.split('\n\n')
formatted = "# ๐Ÿ“– ์™„์„ฑ๋œ ์†Œ์„ค\n\n"
for i, page in enumerate(pages):
if page.strip():
formatted += f"### ํŽ˜์ด์ง€ {i+1}\n\n{page}\n\n---\n\n"
return formatted
def export_to_docx(content: str, filename: str, language: str) -> str:
"""DOCX ํŒŒ์ผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ"""
doc = Document()
# ์ œ๋ชฉ ์ถ”๊ฐ€
title = doc.add_heading('AI ํ˜‘์—… ์†Œ์„ค', 0)
title.alignment = WD_ALIGN_PARAGRAPH.CENTER
# ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ
doc.add_paragraph(f"์ƒ์„ฑ์ผ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
doc.add_paragraph(f"์–ธ์–ด: {language}")
doc.add_page_break()
# ๋ณธ๋ฌธ ์ถ”๊ฐ€
paragraphs = content.split('\n\n')
for para in paragraphs:
if para.strip():
doc.add_paragraph(para.strip())
# ํŒŒ์ผ ์ €์žฅ
filepath = f"{filename}.docx"
doc.save(filepath)
return filepath
def export_to_txt(content: str, filename: str) -> str:
"""TXT ํŒŒ์ผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ"""
filepath = f"{filename}.txt"
with open(filepath, 'w', encoding='utf-8') as f:
f.write(content)
return filepath
# CSS ์Šคํƒ€์ผ
custom_css = """
.gradio-container {
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
min-height: 100vh;
}
.main-header {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 30px;
border-radius: 12px;
margin-bottom: 30px;
text-align: center;
color: white;
}
.input-section {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 20px;
border-radius: 12px;
margin-bottom: 20px;
}
.session-section {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 15px;
border-radius: 8px;
margin-top: 20px;
color: white;
}
#stages-display {
background-color: rgba(255, 255, 255, 0.95);
padding: 20px;
border-radius: 12px;
max-height: 600px;
overflow-y: auto;
}
#novel-output {
background-color: rgba(255, 255, 255, 0.95);
padding: 30px;
border-radius: 12px;
max-height: 400px;
overflow-y: auto;
}
.download-section {
background-color: rgba(255, 255, 255, 0.9);
padding: 15px;
border-radius: 8px;
margin-top: 20px;
}
"""
# Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ
def create_interface():
with gr.Blocks(css=custom_css, title="AI ํ˜‘์—… ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ") as interface:
gr.HTML("""
<div class="main-header">
<h1 style="font-size: 2.5em; margin-bottom: 10px;">
๐Ÿ“š AI ํ˜‘์—… ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ
</h1>
<h3 style="color: #ccc; margin-bottom: 20px;">
์ผ๊ด€์„ฑ ์ค‘์‹ฌ์˜ ์ฐฝ์˜์  ์†Œ์„ค ์ƒ์„ฑ
</h3>
<p style="font-size: 1.1em; color: #ddd; max-width: 800px; margin: 0 auto;">
์ฃผ์ œ๋ฅผ ์ž…๋ ฅํ•˜๋ฉด AI ์—์ด์ „ํŠธ๋“ค์ด ํ˜‘์—…ํ•˜์—ฌ 30ํŽ˜์ด์ง€ ๋ถ„๋Ÿ‰์˜ ์™„์„ฑ๋œ ์†Œ์„ค์„ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค.
<br>
๊ฐ๋…์ž 1๋ช…, ๋น„ํ‰๊ฐ€ 1๋ช…, ์ž‘๊ฐ€ 10๋ช…์ด ํ•จ๊ป˜ ์ž‘์—…ํ•˜๋ฉฐ ์ผ๊ด€์„ฑ์„ ์œ ์ง€ํ•ฉ๋‹ˆ๋‹ค.
</p>
</div>
""")
# ์ƒํƒœ ๊ด€๋ฆฌ
current_session_id = gr.State(None)
with gr.Row():
with gr.Column(scale=1):
with gr.Group(elem_classes=["input-section"]):
query_input = gr.Textbox(
label="์†Œ์„ค ์ฃผ์ œ / Novel Theme",
placeholder="์†Œ์„ค์˜ ์ฃผ์ œ๋‚˜ ์ดˆ๊ธฐ ์•„์ด๋””์–ด๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...\nEnter your novel theme or initial idea...",
lines=4
)
language_select = gr.Radio(
choices=["English", "Korean"],
value="English",
label="์–ธ์–ด / Language"
)
with gr.Row():
submit_btn = gr.Button("๐Ÿš€ ์†Œ์„ค ์ƒ์„ฑ ์‹œ์ž‘", variant="primary", scale=2)
clear_btn = gr.Button("๐Ÿ—‘๏ธ ์ดˆ๊ธฐํ™”", scale=1)
status_text = gr.Textbox(
label="์ƒํƒœ",
interactive=False,
value="๐Ÿ”„ ์ค€๋น„ ์™„๋ฃŒ"
)
# ์„ธ์…˜ ๊ด€๋ฆฌ
with gr.Group(elem_classes=["session-section"]):
gr.Markdown("### ๐Ÿ’พ ์ด์ „ ์„ธ์…˜ ์žฌ๊ฐœ")
session_dropdown = gr.Dropdown(
label="์„ธ์…˜ ์„ ํƒ",
choices=[],
interactive=True
)
with gr.Row():
refresh_btn = gr.Button("๐Ÿ”„ ๋ชฉ๋ก ์ƒˆ๋กœ๊ณ ์นจ", scale=1)
resume_btn = gr.Button("โ–ถ๏ธ ์„ ํƒ ์žฌ๊ฐœ", variant="secondary", scale=1)
auto_recover_btn = gr.Button("โ™ป๏ธ ์ž๋™ ๋ณต๊ตฌ", scale=1)
with gr.Column(scale=2):
with gr.Tab("๐Ÿ“ ์ž‘์„ฑ ๊ณผ์ •"):
stages_display = gr.Markdown(
value="์ž‘์„ฑ ๊ณผ์ •์ด ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค...",
elem_id="stages-display"
)
with gr.Tab("๐Ÿ“– ์™„์„ฑ๋œ ์†Œ์„ค"):
novel_output = gr.Markdown(
value="์™„์„ฑ๋œ ์†Œ์„ค์ด ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค...",
elem_id="novel-output"
)
with gr.Group(elem_classes=["download-section"]):
gr.Markdown("### ๐Ÿ“ฅ ์†Œ์„ค ๋‹ค์šด๋กœ๋“œ")
with gr.Row():
format_select = gr.Radio(
choices=["DOCX", "TXT"],
value="DOCX" if DOCX_AVAILABLE else "TXT",
label="ํ˜•์‹"
)
download_btn = gr.Button("โฌ‡๏ธ ๋‹ค์šด๋กœ๋“œ", variant="secondary")
download_file = gr.File(
label="๋‹ค์šด๋กœ๋“œ๋œ ํŒŒ์ผ",
visible=False
)
# ์ˆจ๊ฒจ์ง„ ์ƒํƒœ
novel_text_state = gr.State("")
# ์˜ˆ์ œ
with gr.Row():
gr.Examples(
examples=[
["๋ฏธ๋ž˜ ๋„์‹œ์—์„œ ๊ธฐ์–ต์„ ๊ฑฐ๋ž˜ํ•˜๋Š” ์ƒ์ธ์˜ ์ด์•ผ๊ธฐ"],
["์‹œ๊ฐ„์ด ๊ฑฐ๊พธ๋กœ ํ๋ฅด๋Š” ๋งˆ์„์˜ ๋ฏธ์Šคํ„ฐ๋ฆฌ"],
["A scientist discovers a portal to parallel universes"],
["In a world where dreams can be traded, a dream thief's story"],
["Two AI entities fall in love while preventing a cyber war"],
["์ฑ… ์†์œผ๋กœ ๋“ค์–ด๊ฐˆ ์ˆ˜ ์žˆ๋Š” ๋Šฅ๋ ฅ์„ ๊ฐ€์ง„ ์‚ฌ์„œ์˜ ๋ชจํ—˜"]
],
inputs=query_input,
label="๐Ÿ’ก ์˜ˆ์ œ ์ฃผ์ œ"
)
# ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
def refresh_sessions():
try:
sessions = get_active_sessions("English")
return gr.update(choices=sessions)
except Exception as e:
logger.error(f"Error refreshing sessions: {str(e)}")
return gr.update(choices=[])
def handle_auto_recover(language):
session_id, message = auto_recover_session(language)
return session_id
# ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
submit_btn.click(
fn=process_query,
inputs=[query_input, language_select, current_session_id],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
novel_output.change(
fn=lambda x: x,
inputs=[novel_output],
outputs=[novel_text_state]
)
resume_btn.click(
fn=lambda x: x,
inputs=[session_dropdown],
outputs=[current_session_id]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
auto_recover_btn.click(
fn=handle_auto_recover,
inputs=[language_select],
outputs=[current_session_id]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
refresh_btn.click(
fn=refresh_sessions,
outputs=[session_dropdown]
)
clear_btn.click(
fn=lambda: ("", "", "๐Ÿ”„ ์ค€๋น„ ์™„๋ฃŒ", "", None),
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id]
)
def handle_download(format_type, language, session_id, novel_text):
if not session_id:
return gr.update(visible=False)
file_path = download_novel(novel_text, format_type, language, session_id)
if file_path:
return gr.update(value=file_path, visible=True)
else:
return gr.update(visible=False)
download_btn.click(
fn=handle_download,
inputs=[format_select, language_select, current_session_id, novel_text_state],
outputs=[download_file]
)
# ์‹œ์ž‘ ์‹œ ์„ธ์…˜ ๋กœ๋“œ
interface.load(
fn=refresh_sessions,
outputs=[session_dropdown]
)
return interface
# ๋ฉ”์ธ ์‹คํ–‰
if __name__ == "__main__":
logger.info("AI ํ˜‘์—… ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ ์‹œ์ž‘...")
logger.info("=" * 60)
# ํ™˜๊ฒฝ ํ™•์ธ
logger.info(f"API ์—”๋“œํฌ์ธํŠธ: {API_URL}")
if BRAVE_SEARCH_API_KEY:
logger.info("์›น ๊ฒ€์ƒ‰์ด ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
else:
logger.warning("์›น ๊ฒ€์ƒ‰์ด ๋น„ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
if DOCX_AVAILABLE:
logger.info("DOCX ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
else:
logger.warning("DOCX ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ๋น„ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
logger.info("=" * 60)
# ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™”
logger.info("๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™” ์ค‘...")
NovelDatabase.init_db()
logger.info("๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™” ์™„๋ฃŒ.")
# ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ ๋ฐ ์‹คํ–‰
interface = create_interface()
interface.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
debug=True
)