openfree's picture
Update app.py
8c93ab4 verified
raw
history blame
59.6 kB
import gradio as gr
import os
import json
import requests
from datetime import datetime
import time
from typing import List, Dict, Any, Generator, Tuple, Optional
import logging
import re
import tempfile
from pathlib import Path
import sqlite3
import hashlib
import threading
from contextlib import contextmanager
from dataclasses import dataclass, field
from collections import defaultdict
# --- λ‘œκΉ… μ„€μ • ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# --- Document export imports ---
try:
from docx import Document
from docx.shared import Inches, Pt, RGBColor
from docx.enum.text import WD_ALIGN_PARAGRAPH
from docx.enum.style import WD_STYLE_TYPE
DOCX_AVAILABLE = True
except ImportError:
DOCX_AVAILABLE = False
logger.warning("python-docx not installed. DOCX export will be disabled.")
# --- ν™˜κ²½ λ³€μˆ˜ 및 μƒμˆ˜ ---
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "")
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "")
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions"
MODEL_ID = "dep89a2fld32mcm"
DB_PATH = "novel_sessions_v2.db"
# --- ν™˜κ²½ λ³€μˆ˜ 검증 ---
if not FRIENDLI_TOKEN:
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.")
# μ‹€μ œ ν™˜κ²½μ—μ„œλŠ” μ—¬κΈ°μ„œ ν”„λ‘œκ·Έλž¨μ„ μ’…λ£Œν•΄μ•Ό ν•˜μ§€λ§Œ, 데λͺ¨λ₯Ό μœ„ν•΄ 더미 토큰을 μ‚¬μš©ν•©λ‹ˆλ‹€.
FRIENDLI_TOKEN = "dummy_token_for_testing"
if not BRAVE_SEARCH_API_KEY:
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.")
# --- μ „μ—­ λ³€μˆ˜ ---
db_lock = threading.Lock()
# μ΅œμ ν™”λœ 단계 ꡬ성 (25λ‹¨κ³„λ‘œ μ••μΆ• 및 κ°•ν™”)
OPTIMIZED_STAGES = [
("director", "🎬 κ°λ…μž: 초기 기획 (μ›Ή 검색 포함)"),
("critic", "πŸ“ 비평가: 기획 κ²€ν†  (ν…Œλ§ˆ 및 일관성)"),
("director", "🎬 κ°λ…μž: μˆ˜μ •λœ λ§ˆμŠ€ν„°ν”Œλžœ"),
] + [
(f"writer{i}", f"✍️ μž‘κ°€ {i}: μ΄ˆμ•ˆ (νŽ˜μ΄μ§€ {(i-1)*3+1}-{i*3})")
for i in range(1, 11)
] + [
("critic", "πŸ“ 비평가: 쀑간 κ²€ν†  (일관성 및 ν…Œλ§ˆ μœ μ§€)"),
] + [
(f"writer{i}", f"✍️ μž‘κ°€ {i}: μˆ˜μ •λ³Έ (νŽ˜μ΄μ§€ {(i-1)*3+1}-{i*3})")
for i in range(1, 11)
] + [
("critic", f"πŸ“ 비평가: μ΅œμ’… κ²€ν†  및 μ’…ν•© λ³΄κ³ μ„œ μž‘μ„±"),
]
# --- 데이터 클래슀 ---
@dataclass
class CharacterState:
"""μΊλ¦­ν„°μ˜ ν˜„μž¬ μƒνƒœλ₯Ό λ‚˜νƒ€λ‚΄λŠ” 데이터 클래슀"""
name: str
alive: bool = True
location: str = ""
injuries: List[str] = field(default_factory=list)
emotional_state: str = ""
relationships: Dict[str, str] = field(default_factory=dict)
last_seen_chapter: int = 0
description: str = ""
role: str = ""
@dataclass
class PlotPoint:
"""ν”Œλ‘― 포인트λ₯Ό λ‚˜νƒ€λ‚΄λŠ” 데이터 클래슀"""
chapter: int
event_type: str
description: str
characters_involved: List[str]
impact_level: int
timestamp: str = ""
@dataclass
class TimelineEvent:
"""μ‹œκ°„μ„  이벀트λ₯Ό λ‚˜νƒ€λ‚΄λŠ” 데이터 클래슀"""
chapter: int
time_reference: str
event_description: str
duration: str = ""
relative_time: str = ""
# --- 핡심 둜직 클래슀 ---
class ConsistencyTracker:
"""일관성 좔적 μ‹œμŠ€ν…œ"""
def __init__(self):
self.character_states: Dict[str, CharacterState] = {}
self.plot_points: List[PlotPoint] = []
self.timeline_events: List[TimelineEvent] = []
self.locations: Dict[str, str] = {}
self.established_facts: List[str] = []
self.content_hashes: Dict[str, int] = {} # ν•΄μ‹œμ™€ ν•΄λ‹Ή 챕터 번호λ₯Ό μ €μž₯
def register_character(self, character: CharacterState):
"""μƒˆ 캐릭터 등둝"""
self.character_states[character.name] = character
logger.info(f"Character registered: {character.name}")
def update_character_state(self, name: str, chapter: int, updates: Dict[str, Any]):
"""캐릭터 μƒνƒœ μ—…λ°μ΄νŠΈ"""
if name not in self.character_states:
self.register_character(CharacterState(name=name, last_seen_chapter=chapter))
char = self.character_states[name]
for key, value in updates.items():
if hasattr(char, key):
setattr(char, key, value)
char.last_seen_chapter = chapter
def add_plot_point(self, plot_point: PlotPoint):
"""ν”Œλ‘― 포인트 μΆ”κ°€"""
plot_point.timestamp = datetime.now().isoformat()
self.plot_points.append(plot_point)
def check_repetition(self, content: str, current_chapter: int) -> Tuple[bool, str]:
"""ν–₯μƒλœ 반볡 λ‚΄μš© 검사"""
sentences = re.split(r'[.!?]+', content)
for sentence in sentences:
sentence_strip = sentence.strip()
if len(sentence_strip) > 20: # λ„ˆλ¬΄ 짧은 λ¬Έμž₯은 λ¬΄μ‹œ
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest()
if sentence_hash in self.content_hashes:
previous_chapter = self.content_hashes[sentence_hash]
# λ°”λ‘œ 이전 μ±•ν„°μ™€μ˜ λ°˜λ³΅μ€ ν—ˆμš©ν•  수 μžˆμœΌλ―€λ‘œ, 2챕터 이상 차이날 λ•Œλ§Œ 였λ₯˜λ‘œ κ°„μ£Ό
if current_chapter > previous_chapter + 1:
return True, f"λ¬Έμž₯ 반볡 (챕터 {previous_chapter}κ³Ό μœ μ‚¬): {sentence_strip[:50]}..."
# μƒˆ λ‚΄μš©μ˜ ν•΄μ‹œ μΆ”κ°€
for sentence in sentences:
sentence_strip = sentence.strip()
if len(sentence_strip) > 20:
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest()
self.content_hashes[sentence_hash] = current_chapter
return False, ""
def validate_consistency(self, chapter: int, content: str) -> List[str]:
"""일관성 검증"""
errors = []
# μ‚¬λ§ν•œ 캐릭터 λ“±μž₯ 검사
for char_name, char_state in self.character_states.items():
if char_name.lower() in content.lower() and not char_state.alive:
errors.append(f"⚠️ μ‚¬λ§ν•œ 캐릭터 '{char_name}'이(κ°€) λ“±μž₯ν–ˆμŠ΅λ‹ˆλ‹€.")
# λ‚΄μš© 반볡 검사
is_repetition, repeat_msg = self.check_repetition(content, chapter)
if is_repetition:
errors.append(f"πŸ”„ {repeat_msg}")
return errors
def get_character_summary(self, chapter: int) -> str:
"""ν˜„μž¬ 챕터 κΈ°μ€€ 캐릭터 μš”μ•½"""
summary = "\n=== 캐릭터 ν˜„ν™© μš”μ•½ (이전 2개 챕터 κΈ°μ€€) ===\n"
active_chars = [char for char in self.character_states.values() if char.last_seen_chapter >= chapter - 2]
if not active_chars:
return "\n(아직 μ£Όμš” 캐릭터 정보가 μ—†μŠ΅λ‹ˆλ‹€.)\n"
for char in active_chars:
status = "생쑴" if char.alive else "사망"
summary += f"β€’ {char.name}: {status}"
if char.alive and char.location: summary += f" (μœ„μΉ˜: {char.location})"
if char.injuries: summary += f" (뢀상: {', '.join(char.injuries[-1:])})"
summary += "\n"
return summary
def get_plot_summary(self, chapter: int) -> str:
"""ν”Œλ‘― μš”μ•½"""
summary = "\n=== 졜근 μ£Όμš” 사건 μš”μ•½ ===\n"
recent_events = [p for p in self.plot_points if p.chapter >= chapter - 2]
if not recent_events:
return "\n(아직 μ£Όμš” 사건이 μ—†μŠ΅λ‹ˆλ‹€.)\n"
for event in recent_events[-3:]: # 졜근 3개만 ν‘œμ‹œ
summary += f"β€’ [챕터 {event.chapter}] {event.description}\n"
return summary
class WebSearchIntegration:
"""μ›Ή 검색 κΈ°λŠ₯ (κ°λ…μž λ‹¨κ³„μ—μ„œλ§Œ μ‚¬μš©)"""
def __init__(self):
self.brave_api_key = BRAVE_SEARCH_API_KEY
self.search_url = "https://api.search.brave.com/res/v1/web/search"
self.enabled = bool(self.brave_api_key)
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]:
"""μ›Ή 검색 μˆ˜ν–‰"""
if not self.enabled:
return []
headers = {
"Accept": "application/json",
"X-Subscription-Token": self.brave_api_key
}
params = {
"q": query,
"count": count,
"search_lang": "ko" if language == "Korean" else "en",
"text_decorations": False,
"safesearch": "moderate"
}
try:
response = requests.get(self.search_url, headers=headers, params=params, timeout=10)
response.raise_for_status()
results = response.json().get("web", {}).get("results", [])
logger.info(f"μ›Ή 검색 성곡: '{query}'에 λŒ€ν•΄ {len(results)}개 κ²°κ³Ό 발견")
return results
except requests.exceptions.RequestException as e:
logger.error(f"μ›Ή 검색 API 였λ₯˜: {e}")
return []
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str:
"""검색 κ²°κ³Όμ—μ„œ κ΄€λ ¨ 정보 μΆ”μΆœ"""
if not results:
return ""
extracted = []
total_chars = 0
for i, result in enumerate(results[:3], 1):
title = result.get("title", "")
description = result.get("description", "")
url = result.get("url", "")
info = f"[{i}] {title}\n{description}\nSource: {url}\n"
if total_chars + len(info) < max_chars:
extracted.append(info)
total_chars += len(info)
else:
break
return "\n---\n".join(extracted)
class NovelDatabase:
"""μ†Œμ„€ μ„Έμ…˜ 관리 λ°μ΄ν„°λ² μ΄μŠ€"""
@staticmethod
def init_db():
with sqlite3.connect(DB_PATH) as conn:
conn.execute("PRAGMA journal_mode=WAL")
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS sessions (
session_id TEXT PRIMARY KEY,
user_query TEXT NOT NULL,
language TEXT NOT NULL,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
status TEXT DEFAULT 'active',
current_stage INTEGER DEFAULT 0,
final_novel TEXT,
consistency_report TEXT
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS stages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
stage_number INTEGER NOT NULL,
stage_name TEXT NOT NULL,
role TEXT NOT NULL,
content TEXT,
word_count INTEGER DEFAULT 0,
status TEXT DEFAULT 'pending',
consistency_score REAL DEFAULT 0.0,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id),
UNIQUE(session_id, stage_number)
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS character_states (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
character_name TEXT NOT NULL,
chapter INTEGER NOT NULL,
is_alive BOOLEAN DEFAULT TRUE,
location TEXT,
injuries TEXT,
emotional_state TEXT,
description TEXT,
created_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id)
)
''')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_id ON stages(session_id)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_stage_number ON stages(stage_number)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_char_session ON character_states(session_id)')
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_status ON sessions(status)')
conn.commit()
@staticmethod
@contextmanager
def get_db():
with db_lock:
conn = sqlite3.connect(DB_PATH, timeout=30.0)
conn.row_factory = sqlite3.Row
try:
yield conn
finally:
conn.close()
@staticmethod
def create_session(user_query: str, language: str) -> str:
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest()
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)',
(session_id, user_query, language)
)
conn.commit()
return session_id
@staticmethod
def save_stage(session_id: str, stage_number: int, stage_name: str,
role: str, content: str, status: str = 'complete',
consistency_score: float = 0.0):
word_count = len(content.split()) if content else 0
with NovelDatabase.get_db() as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id, stage_number)
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, consistency_score=?, updated_at=datetime('now')
''', (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score,
content, word_count, status, stage_name, consistency_score))
cursor.execute(
"UPDATE sessions SET updated_at = datetime('now'), current_stage = ? WHERE session_id = ?",
(stage_number, session_id)
)
conn.commit()
@staticmethod
def get_session(session_id: str) -> Optional[Dict]:
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone()
return dict(row) if row else None
@staticmethod
def get_latest_active_session() -> Optional[Dict]:
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute("SELECT * FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 1").fetchone()
return dict(row) if row else None
@staticmethod
def get_stages(session_id: str) -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall()
return [dict(row) for row in rows]
@staticmethod
def get_writer_content(session_id: str) -> str:
with NovelDatabase.get_db() as conn:
all_content = []
for writer_num in range(1, 11):
row = conn.cursor().execute(
"SELECT content FROM stages WHERE session_id = ? AND role = ? AND stage_name LIKE '%μˆ˜μ •λ³Έ%' ORDER BY stage_number DESC LIMIT 1",
(session_id, f'writer{writer_num}')
).fetchone()
if row and row['content']:
all_content.append(row['content'].strip())
return '\n\n'.join(all_content)
@staticmethod
def update_final_novel(session_id: str, final_novel: str, consistency_report: str = ""):
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), consistency_report = ? WHERE session_id = ?",
(final_novel, consistency_report, session_id)
)
conn.commit()
@staticmethod
def get_active_sessions() -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute(
"SELECT session_id, user_query, language, created_at, current_stage FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10"
).fetchall()
return [dict(row) for row in rows]
class NovelWritingSystem:
"""μ΅œμ ν™”λœ μ†Œμ„€ 생성 μ‹œμŠ€ν…œ"""
def __init__(self):
self.token = FRIENDLI_TOKEN
self.api_url = API_URL
self.model_id = MODEL_ID
self.consistency_tracker = ConsistencyTracker()
self.web_search = WebSearchIntegration()
self.current_session_id = None
NovelDatabase.init_db()
def create_headers(self):
"""API 헀더 생성"""
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"}
# --- ν”„λ‘¬ν”„νŠΈ 생성 ν•¨μˆ˜λ“€ (Thematic Guardian κ°œλ… 톡합) ---
def create_director_initial_prompt(self, user_query: str, language: str) -> str:
"""κ°λ…μž 초기 기획 ν”„λ‘¬ν”„νŠΈ (μ›Ή 검색 및 ν…Œλ§ˆ μ œμ•½ 쑰건 κ°•ν™”)"""
search_results_str = ""
if self.web_search.enabled:
queries = [f"{user_query} novel setting", f"{user_query} background information"]
search_results = self.web_search.search(queries[0], count=2, language=language)
if search_results:
search_results_str = self.web_search.extract_relevant_info(search_results)
lang_prompts = {
"Korean": {
"title": "당신은 30νŽ˜μ΄μ§€ λΆ„λŸ‰μ˜ μ€‘νŽΈ μ†Œμ„€μ„ κΈ°νšν•˜λŠ” λ¬Έν•™ κ°λ…μžμž…λ‹ˆλ‹€.",
"user_theme": "μ‚¬μš©μž 주제",
"plan_instruction": "λ‹€μŒ μš”μ†Œλ“€μ„ ν¬ν•¨ν•œ μƒμ„Έν•œ μ†Œμ„€ κΈ°νšμ„ μž‘μ„±ν•˜μ„Έμš”:",
"theme_section": "1. **μ£Όμ œμ™€ μž₯λ₯΄ μ„€μ •**\n - 핡심 μ£Όμ œμ™€ λ©”μ‹œμ§€ (μ‚¬μš©μž μ˜λ„ 깊이 반영)\n - μž₯λ₯΄ 및 λΆ„μœ„κΈ°\n - λ…μžμΈ΅ 고렀사항",
"char_section": "2. **μ£Όμš” λ“±μž₯인물** (3-5λͺ…)\n | 이름 | μ—­ν•  | 성격 | λ°°κ²½ | λͺ©ν‘œ | κ°ˆλ“± |",
"setting_section": "3. **λ°°κ²½ μ„€μ •**\n - μ‹œκ³΅κ°„μ  λ°°κ²½\n - μ‚¬νšŒμ /문화적 ν™˜κ²½\n - μ£Όμš” μž₯μ†Œλ“€",
"plot_section": "4. **ν”Œλ‘― ꡬ쑰** (10개 파트, 각 3νŽ˜μ΄μ§€ λΆ„λŸ‰)\n | 파트 | νŽ˜μ΄μ§€ | μ£Όμš” 사건 | κΈ΄μž₯도 | 캐릭터 λ°œμ „ |",
"guideline_section": "5. **μž‘κ°€λ³„ μ§€μΉ¨**\n - 일관성 μœ μ§€λ₯Ό μœ„ν•œ 핡심 μ„€μ •\n - 문체와 톀 κ°€μ΄λ“œλΌμΈ",
"constraint_title": "⚠️맀우 μ€‘μš”ν•œ μ§€μ‹œμ‚¬ν•­: 핡심 μ œμ•½ 쑰건⚠️",
"constraint_body": "이 μ†Œμ„€μ€ **AI둜 인해 λͺ¨λ“  것이 μ‰½κ²Œ ν•΄κ²°λ˜λŠ” 긍정적이고 λ‹¨μˆœν•œ 이야기가 μ•„λ‹™λ‹ˆλ‹€.**\nλ°˜λ“œμ‹œ μ‚¬μš©μžμ˜ 주제인 '{query}'에 λ‹΄κΈ΄ **핡심 감정(예: λΆˆμ•ˆ, μ†Œμ™Έκ°, 상싀감, μ„ΈλŒ€ κ°ˆλ“± λ“±)을 μ€‘μ‹¬μœΌλ‘œ μ„œμ‚¬λ₯Ό μ „κ°œν•΄μ•Ό ν•©λ‹ˆλ‹€.**\nAIλ‚˜ νŠΉμ • κΈ°μˆ μ€ νŽΈλ¦¬ν•œ 도ꡬ가 μ•„λ‹ˆλΌ, μ£ΌμΈκ³΅μ—κ²Œ **κ°ˆλ“±κ³Ό 상싀감을 μ•ˆκ²¨μ£ΌλŠ” 핡심 원인**으둜 μž‘μš©ν•΄μ•Ό ν•©λ‹ˆλ‹€.\n이 μ œμ•½ 쑰건을 μ ˆλŒ€ λ²—μ–΄λ‚˜μ§€ λ§ˆμ‹­μ‹œμ˜€.",
"final_instruction": "창의적이고 깊이 μžˆλŠ” μ†Œμ„€μ΄ 될 수 μžˆλ„λ‘ μƒμ„Έν•˜κ²Œ κΈ°νšν•˜μ„Έμš”."
},
"English": {
"title": "You are a literary director planning a 30-page novella.",
"user_theme": "User Theme",
"plan_instruction": "Create a detailed novel plan including:",
"theme_section": "1. **Theme and Genre**\n - Core theme and message (Deeply reflect user's intent)\n - Genre and atmosphere",
"char_section": "2. **Main Characters** (3-5)\n | Name | Role | Personality | Background | Goal | Conflict |",
"setting_section": "3. **Setting**\n - Time and place\n - Social/cultural environment",
"plot_section": "4. **Plot Structure** (10 parts, ~3 pages each)\n | Part | Pages | Main Events | Tension | Character Development |",
"guideline_section": "5. **Writer Guidelines**\n - Key settings for consistency\n - Style and tone guidelines",
"constraint_title": "⚠️CRITICAL INSTRUCTION: CORE CONSTRAINTS⚠️",
"constraint_body": "This is **NOT a simple, positive story where AI solves everything.**\nYou must develop the narrative around the core emotions of the user's theme: '{query}' (e.g., anxiety, alienation, loss, generational conflict).\nAI or specific technology should be the **root cause of the protagonist's conflict and loss**, not a convenient tool.\nDo not deviate from this constraint.",
"final_instruction": "Plan in detail for a creative and profound novel."
}
}
p = lang_prompts[language]
return f"{p['title']}\n\n{p['user_theme']}: {user_query}\n\n{search_results_str}\n\n{p['plan_instruction']}\n\n{p['theme_section']}\n\n{p['char_section']}\n\n{p['setting_section']}\n\n{p['plot_section']}\n\n{p['guideline_section']}\n\n---\n{p['constraint_title']}\n{p['constraint_body'].format(query=user_query)}\n---\n\n{p['final_instruction']}"
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str:
"""λΉ„ν‰κ°€μ˜ κ°λ…μž 기획 κ²€ν†  ν”„λ‘¬ν”„νŠΈ (ν…Œλ§ˆ 일관성 κ°•ν™”)"""
lang_prompts = {
"Korean": {
"title": "당신은 λ¬Έν•™ λΉ„ν‰κ°€μž…λ‹ˆλ‹€. κ°λ…μžμ˜ μ†Œμ„€ κΈ°νšμ„ '주제 일관성'κ³Ό '기술적 일관성' κ΄€μ μ—μ„œ κ²€ν† ν•˜μ„Έμš”.",
"theme_check": f"**1. 주제 일관성 (κ°€μž₯ μ€‘μš”)**\n - **μ›λž˜ 주제:** '{user_query}'\n - κΈ°νšμ•ˆμ΄ 주제의 핡심 감정(λΆˆμ•ˆ, 상싀감 λ“±)μ—μ„œ λ²—μ–΄λ‚˜ κΈμ •μ μ΄κ±°λ‚˜ λ‹¨μˆœν•œ λ°©ν–₯으둜 흐λ₯΄μ§€ μ•Šμ•˜μŠ΅λ‹ˆκΉŒ?\n - AIλ‚˜ 기술이 κ°ˆλ“±μ˜ 원인이 μ•„λ‹Œ, λ‹¨μˆœ ν•΄κ²°μ‚¬λ‘œ λ¬˜μ‚¬λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆκΉŒ?",
"consistency_check": "**2. 기술적 일관성**\n - 캐릭터 μ„€μ •μ˜ λͺ¨μˆœ, ν”Œλ‘―μ˜ 논리적 ν—ˆμ , μ‹œκ°„μ„ /곡간 μ„€μ •μ˜ λ¬Έμ œμ μ„ κ²€ν† ν•˜μ„Έμš”.",
"instruction": "μœ„ ν•­λͺ©λ“€μ„ μ€‘μ‹¬μœΌλ‘œ ꡬ체적인 문제점과 κ°œμ„ μ•ˆμ„ μ œμ‹œν•˜μ„Έμš”."
},
"English": {
"title": "You are a literary critic. Review the director's plan from the perspectives of 'Thematic Consistency' and 'Technical Consistency'.",
"theme_check": f"**1. Thematic Consistency (Most Important)**\n - **Original Theme:** '{user_query}'\n - Does the plan drift from the core emotions (e.g., anxiety, loss) towards an overly positive or simplistic narrative?\n - Is AI depicted as a simple problem-solver instead of the root of the conflict?",
"consistency_check": "**2. Technical Consistency**\n - Review for character contradictions, plot holes, and timeline/setting issues.",
"instruction": "Provide specific problems and suggestions for improvement based on the above."
}
}
p = lang_prompts[language]
return f"{p['title']}\n\n**κ°λ…μž 기획:**\n{director_plan}\n\n---\n**κ²€ν†  ν•­λͺ©:**\n{p['theme_check']}\n\n{p['consistency_check']}\n\n{p['instruction']}"
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str:
"""κ°λ…μž μˆ˜μ • ν”„λ‘¬ν”„νŠΈ (ν…Œλ§ˆ μ œμ•½ 쑰건 μž¬κ°•μ‘°)"""
return f"""κ°λ…μžλ‘œμ„œ λΉ„ν‰κ°€μ˜ ν”Όλ“œλ°±μ„ λ°˜μ˜ν•˜μ—¬ μ†Œμ„€ κΈ°νšμ„ μˆ˜μ •ν•©λ‹ˆλ‹€.
**μ›λž˜ 주제:** {user_query}
**초기 기획:**\n{initial_plan}
**비평가 ν”Όλ“œλ°±:**\n{critic_feedback}
**μˆ˜μ • μ§€μΉ¨:**
- 비평가가 μ§€μ ν•œ λͺ¨λ“  일관성 λ¬Έμ œμ™€ 주제 μ΄νƒˆ 문제λ₯Ό ν•΄κ²°ν•˜μ„Έμš”.
- **핡심 μ œμ•½ 쑰건**을 λ‹€μ‹œ ν•œλ²ˆ μƒκΈ°ν•˜κ³ , μ†Œμ„€ 전체가 'λΆˆμ•ˆ'κ³Ό '상싀감'의 톀을 μœ μ§€ν•˜λ„λ‘ ν”Œλ‘―μ„ κ΅¬μ²΄ν™”ν•˜μ„Έμš”.
- 10λͺ…μ˜ μž‘κ°€κ°€ ν˜Όλ™ 없이 μž‘μ—…ν•  수 μžˆλ„λ‘ λͺ…ν™•ν•˜κ³  μƒμ„Έν•œ μ΅œμ’… λ§ˆμŠ€ν„°ν”Œλžœμ„ μž‘μ„±ν•˜μ„Έμš”.
"""
def create_writer_prompt(self, writer_number: int, director_plan: str, previous_content_summary: str, user_query: str, language: str) -> str:
"""μž‘κ°€ ν”„λ‘¬ν”„νŠΈ (ν…Œλ§ˆ λ¦¬λ§ˆμΈλ” 포함)"""
pages_start = (writer_number - 1) * 3 + 1
pages_end = writer_number * 3
lang_prompts = {
"Korean": {
"title": f"당신은 μž‘κ°€ {writer_number}λ²ˆμž…λ‹ˆλ‹€. μ†Œμ„€μ˜ {pages_start}-{pages_end} νŽ˜μ΄μ§€λ₯Ό μž‘μ„±ν•˜μ„Έμš”.",
"plan": "κ°λ…μž λ§ˆμŠ€ν„°ν”Œλžœ",
"prev_summary": "이전 λ‚΄μš© μš”μ•½",
"guidelines": "**μž‘μ„± μ§€μΉ¨:**\n1. **λΆ„λŸ‰**: 1,400-1,500 단어 λ‚΄μ™Έ\n2. **μ—°κ²°μ„±**: μš”μ•½λœ 이전 λ‚΄μš©κ³Ό μžμ—°μŠ€λŸ½κ²Œ μ—°κ²°\n3. **일관성**: 캐릭터 μ„€μ •κ³Ό μƒνƒœ, ν”Œλ‘― ꡬ쑰λ₯Ό λ°˜λ“œμ‹œ λ”°λ₯Ό 것",
"reminder_title": "⭐ μžŠμ§€ λ§ˆμ„Έμš” (ν…Œλ§ˆ λ¦¬λ§ˆμΈλ”)",
"reminder_body": f"이 μ†Œμ„€μ˜ 핡심은 '{user_query}'에 λ‹΄κΈ΄ **λΆˆμ•ˆ, μ†Œμ™Έ, 상싀감**μž…λ‹ˆλ‹€. 긍정적인 해결을 μ„œλ‘λ₯΄μ§€ 말고, 주인곡의 λ‚΄λ©΄ κ°ˆλ“±μ„ 심도 있게 λ¬˜μ‚¬ν•˜λŠ” 데 μ§‘μ€‘ν•˜μ„Έμš”.",
"final_instruction": "μ°½μ˜μ μ΄λ©΄μ„œλ„ μ£Όμ œμ™€ 일관성을 μ ˆλŒ€ μžƒμ§€ λ§ˆμ‹­μ‹œμ˜€."
},
"English": {
"title": f"You are Writer #{writer_number}. Write pages {pages_start}-{pages_end} of the novella.",
"plan": "Director's Masterplan",
"prev_summary": "Previous Content Summary",
"guidelines": "**Writing Guidelines:**\n1. **Length**: Approx. 1,400-1,500 words\n2. **Connectivity**: Connect naturally with the summarized previous content.\n3. **Consistency**: Strictly follow character settings, states, and plot structure.",
"reminder_title": "⭐ REMINDER (THEME)",
"reminder_body": f"The core of this novel is the **anxiety, alienation, and loss** from the theme '{user_query}'. Do not rush to a positive resolution; focus on deeply describing the protagonist's internal conflict.",
"final_instruction": "Be creative, but never lose consistency and the core theme."
}
}
p = lang_prompts[language]
consistency_info = self.consistency_tracker.get_character_summary(writer_number) + self.consistency_tracker.get_plot_summary(writer_number)
return f"{p['title']}\n\n**{p['plan']}:**\n{director_plan}\n\n{consistency_info}\n\n**{p['prev_summary']}:**\n{previous_content_summary}\n\n---\n{p['guidelines']}\n\n**{p['reminder_title']}**\n{p['reminder_body']}\n---\n\n{p['final_instruction']}"
def create_critic_consistency_prompt(self, all_content: str, user_query: str, language: str) -> str:
"""비평가 쀑간 κ²€ν†  ν”„λ‘¬ν”„νŠΈ (ν…Œλ§ˆ κ²€ν†  κ°•ν™”)"""
return f"""당신은 일관성 κ²€ν†  μ „λ¬Έ λΉ„ν‰κ°€μž…λ‹ˆλ‹€. μ§€κΈˆκΉŒμ§€ μž‘μ„±λœ λ‚΄μš©μ„ κ²€ν† ν•˜μ„Έμš”.
**μ›λž˜ 주제:** {user_query}
**ν˜„μž¬κΉŒμ§€ μž‘μ„±λœ λ‚΄μš© (졜근 3000자):**\n{all_content[-3000:]}
**κ²€ν†  ν•­λͺ©:**
1. **주제 일관성 (κ°€μž₯ μ€‘μš”):** λ‚΄μš©μ΄ μ›λž˜ 주제의 μ–΄λ‘μš΄ κ°μ •μ„ μ—μ„œ λ²—μ–΄λ‚˜μ§€ μ•Šμ•˜λŠ”μ§€ ν™•μΈν•˜κ³ , 벗어났닀면 μˆ˜μ • λ°©ν–₯을 μ œμ‹œν•˜μ„Έμš”.
2. **기술적 일관성:** 캐릭터, ν”Œλ‘―, μ„€μ •μ˜ 연속성과 논리적 였λ₯˜λ₯Ό μ°Ύμ•„λ‚΄μ„Έμš”.
3. **반볡 λ‚΄μš©:** 의미적으둜 μ€‘λ³΅λ˜λŠ” μž₯λ©΄μ΄λ‚˜ ν‘œν˜„μ΄ μ—†λŠ”μ§€ ν™•μΈν•˜μ„Έμš”.
**κ²°κ³Ό:** 발견된 문제점과 ꡬ체적인 μˆ˜μ • μ œμ•ˆμ„ λͺ©λ‘μœΌλ‘œ μ œμ‹œν•˜μ„Έμš”.
"""
def create_writer_revision_prompt(self, writer_number: int, initial_content: str, consistency_feedback: str, language: str) -> str:
"""μž‘κ°€ μˆ˜μ • ν”„λ‘¬ν”„νŠΈ"""
return f"""μž‘κ°€ {writer_number}λ²ˆμœΌλ‘œμ„œ λΉ„ν‰κ°€μ˜ ν”Όλ“œλ°±μ„ λ°˜μ˜ν•˜μ—¬ λ‚΄μš©μ„ μˆ˜μ •ν•˜μ„Έμš”.
**초기 μž‘μ„± λ‚΄μš©:**\n{initial_content}
**비평가 ν”Όλ“œλ°±:**\n{consistency_feedback}
**μˆ˜μ • μ§€μΉ¨:**
- μ§€μ λœ λͺ¨λ“  주제 μ΄νƒˆ 및 일관성 문제λ₯Ό ν•΄κ²°ν•˜μ„Έμš”.
- λΆ„λŸ‰(1,400-1,500 단어)을 μœ μ§€ν•˜λ©΄μ„œ λ‚΄μš©μ˜ μ§ˆμ„ λ†’μ΄μ„Έμš”.
- μˆ˜μ •λœ μ΅œμ’… 버전을 μ œμ‹œν•˜μ„Έμš”.
"""
def create_critic_final_prompt(self, complete_novel: str, language: str) -> str:
"""μ΅œμ’… 비평가 κ²€ν†  및 λ³΄κ³ μ„œ μž‘μ„± ν”„λ‘¬ν”„νŠΈ"""
return f"""μ™„μ„±λœ μ†Œμ„€μ˜ μ΅œμ’… 일관성 및 완성도에 λŒ€ν•œ μ’…ν•© λ³΄κ³ μ„œλ₯Ό μž‘μ„±ν•˜μ„Έμš”.
**μ™„μ„±λœ μ†Œμ„€ (λ§ˆμ§€λ§‰ 2000자):**\n{complete_novel[-2000:]}
**λ³΄κ³ μ„œ 포함 ν•­λͺ©:**
1. **전체 일관성 평가:** 캐릭터, ν”Œλ‘―, μ„€μ •, 주제 μœ μ§€μ— λŒ€ν•œ 점수(1-10)와 총평.
2. **μ΅œμ’… 발견된 문제점:** λ‚¨μ•„μžˆλŠ” μ‚¬μ†Œν•œ λ¬Έμ œμ λ“€.
3. **성곡 μš”μ†Œ:** 특히 잘 μœ μ§€λœ 일관성 λΆ€λΆ„μ΄λ‚˜ 주제 ν‘œν˜„μ΄ λ›°μ–΄λ‚œ λΆ€λΆ„.
4. **μ΅œμ’… 평가:** μ†Œμ„€μ˜ μ „λ°˜μ μΈ 완성도와 λ…μžμ—κ²Œ λ―ΈμΉ  영ν–₯에 λŒ€ν•œ 평가.
"""
# --- LLM 호좜 ν•¨μˆ˜λ“€ ---
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str:
"""LLM 동기식 호좜 (μš”μ•½ λ“± λ‚΄λΆ€μš©)"""
full_content = ""
for chunk in self.call_llm_streaming(messages, role, language):
full_content += chunk
if full_content.startswith("❌"):
raise Exception(f"LLM Sync Call Failed: {full_content}")
return full_content
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]:
"""LLM 슀트리밍 호좜 (μ™„μ „ν•œ μ—λŸ¬ 처리 및 디버깅)"""
try:
system_prompts = self.get_system_prompts(language)
full_messages = [{"role": "system", "content": system_prompts.get(role, "You are a helpful assistant.")}, *messages]
payload = {
"model": self.model_id,
"messages": full_messages,
"max_tokens": 10000,
"temperature": 0.75,
"top_p": 0.9,
"presence_penalty": 0.3,
"frequency_penalty": 0.2,
"stream": True,
"stream_options": {"include_usage": True}
}
logger.info(f"[{role}] API 슀트리밍 μ‹œμž‘")
# API 호좜
response = requests.post(
self.api_url,
headers=self.create_headers(),
json=payload,
stream=True,
timeout=180
)
# μƒνƒœ μ½”λ“œ 확인
if response.status_code != 200:
logger.error(f"API 응닡 였λ₯˜: {response.status_code}")
logger.error(f"응닡 λ‚΄μš©: {response.text[:500]}")
yield f"❌ API 였λ₯˜ (μƒνƒœ μ½”λ“œ: {response.status_code})"
return
response.raise_for_status()
# 슀트리밍 처리
buffer = ""
total_content = ""
chunk_count = 0
error_count = 0
for line in response.iter_lines():
if not line:
continue
try:
line_str = line.decode('utf-8').strip()
# SSE ν˜•μ‹ 확인
if not line_str.startswith("data: "):
continue
data_str = line_str[6:] # "data: " 제거
# 슀트림 μ’…λ£Œ 확인
if data_str == "[DONE]":
logger.info(f"[{role}] 슀트리밍 μ™„λ£Œ - 총 {len(total_content)} 문자")
break
# JSON νŒŒμ‹±
try:
data = json.loads(data_str)
except json.JSONDecodeError:
logger.warning(f"JSON νŒŒμ‹± μ‹€νŒ¨: {data_str[:100]}")
continue
# choices λ°°μ—΄ μ•ˆμ „ν•˜κ²Œ 확인
choices = data.get("choices", None)
if not choices or not isinstance(choices, list) or len(choices) == 0:
# μ—λŸ¬ 응닡 확인
if "error" in data:
error_msg = data.get("error", {}).get("message", "Unknown error")
logger.error(f"API μ—λŸ¬: {error_msg}")
yield f"❌ API μ—λŸ¬: {error_msg}"
return
continue
# deltaμ—μ„œ content μΆ”μΆœ
delta = choices[0].get("delta", {})
content = delta.get("content", "")
if content:
buffer += content
total_content += content
chunk_count += 1
# 100자 λ˜λŠ” μ€„λ°”κΏˆλ§ˆλ‹€ yield
if len(buffer) >= 100 or '\n' in buffer:
yield buffer
buffer = ""
time.sleep(0.01) # UI μ—…λ°μ΄νŠΈλ₯Ό μœ„ν•œ 짧은 λŒ€κΈ°
except Exception as e:
error_count += 1
logger.error(f"청크 처리 였λ₯˜ #{error_count}: {str(e)}")
if error_count > 10: # λ„ˆλ¬΄ λ§Žμ€ μ—λŸ¬μ‹œ 쀑단
yield f"❌ 슀트리밍 쀑 κ³Όλ„ν•œ 였λ₯˜ λ°œμƒ"
return
continue
# 남은 버퍼 처리
if buffer:
yield buffer
# κ²°κ³Ό 확인
if chunk_count == 0:
logger.error(f"[{role}] μ½˜ν…μΈ κ°€ μ „ν˜€ μˆ˜μ‹ λ˜μ§€ μ•ŠμŒ")
yield "❌ APIλ‘œλΆ€ν„° 응닡을 λ°›μ§€ λͺ»ν–ˆμŠ΅λ‹ˆλ‹€."
else:
logger.info(f"[{role}] μ„±κ³΅μ μœΌλ‘œ {chunk_count}개 청크, 총 {len(total_content)}자 μˆ˜μ‹ ")
except requests.exceptions.Timeout:
logger.error("API μš”μ²­ μ‹œκ°„ 초과")
yield "❌ API μš”μ²­ μ‹œκ°„μ΄ μ΄ˆκ³Όλ˜μ—ˆμŠ΅λ‹ˆλ‹€."
except requests.exceptions.ConnectionError:
logger.error("API μ—°κ²° μ‹€νŒ¨")
yield "❌ API μ„œλ²„μ— μ—°κ²°ν•  수 μ—†μŠ΅λ‹ˆλ‹€."
except Exception as e:
logger.error(f"예기치 μ•Šμ€ 였λ₯˜: {type(e).__name__}: {str(e)}", exc_info=True)
yield f"❌ 였λ₯˜ λ°œμƒ: {str(e)}"
def get_system_prompts(self, language: str) -> Dict[str, str]:
"""역할별 μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ 생성"""
base_prompts = {
"Korean": {
"director": "당신은 창의적이고 체계적인 μ†Œμ„€ 기획 μ „λ¬Έκ°€μž…λ‹ˆλ‹€. ν₯λ―Έλ‘­κ³  일관성 μžˆλŠ” μŠ€ν† λ¦¬λ₯Ό μ„€κ³„ν•˜μ„Έμš”.",
"critic": "당신은 일관성 κ²€ν†  μ „λ¬Έ λΉ„ν‰κ°€μž…λ‹ˆλ‹€. 캐릭터, ν”Œλ‘―, μ„€μ •μ˜ 일관성을 μ² μ €νžˆ μ κ²€ν•˜κ³  κ°œμ„ λ°©μ•ˆμ„ μ œμ‹œν•˜μ„Έμš”.",
"writer_base": "당신은 μ „λ¬Έ μ†Œμ„€ μž‘κ°€μž…λ‹ˆλ‹€. μ£Όμ–΄μ§„ 지침에 따라 λͺ°μž…감 있고 일관성 μžˆλŠ” λ‚΄μš©μ„ μž‘μ„±ν•˜μ„Έμš”."
},
"English": {
"director": "You are a creative and systematic novel planning expert. Design engaging and consistent stories.",
"critic": "You are a consistency review specialist critic. Thoroughly check character, plot, and setting consistency and suggest improvements.",
"writer_base": "You are a professional novel writer. Write immersive and consistent content according to the given guidelines."
}
}
prompts = base_prompts[language].copy()
# μž‘κ°€λ³„ 특수 ν”„λ‘¬ν”„νŠΈ
if language == "Korean":
prompts["writer1"] = "당신은 μ†Œμ„€μ˜ λ§€λ ₯적인 μ‹œμž‘μ„ λ‹΄λ‹Ήν•˜λŠ” μž‘κ°€μž…λ‹ˆλ‹€. λ…μžλ₯Ό μ‚¬λ‘œμž‘λŠ” λ„μž…λΆ€λ₯Ό λ§Œλ“œμ„Έμš”."
prompts["writer10"] = "당신은 μ™„λ²½ν•œ 결말을 λ§Œλ“œλŠ” μž‘κ°€μž…λ‹ˆλ‹€. λ…μžμ—κ²Œ κΉŠμ€ μ—¬μš΄μ„ λ‚¨κΈ°λŠ” 마무리λ₯Ό ν•˜μ„Έμš”."
else:
prompts["writer1"] = "You are a writer responsible for the captivating beginning. Create an opening that hooks readers."
prompts["writer10"] = "You are a writer who creates the perfect ending. Create a conclusion that leaves readers with deep resonance."
# writer2-9λŠ” κΈ°λ³Έ ν”„λ‘¬ν”„νŠΈ μ‚¬μš©
for i in range(2, 10):
prompts[f"writer{i}"] = prompts["writer_base"]
return prompts
# --- 메인 ν”„λ‘œμ„ΈμŠ€ ---
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]:
"""μ†Œμ„€ 생성 슀트리밍 ν”„λ‘œμ„ΈμŠ€ (κ°•ν™”λœ 둜직)"""
try:
resume_from_stage = 0
if session_id:
self.current_session_id = session_id
session = NovelDatabase.get_session(session_id)
if session:
query = session['user_query']
language = session['language']
resume_from_stage = session['current_stage'] + 1
logger.info(f"Resuming session {session_id} from stage {resume_from_stage}")
else:
self.current_session_id = NovelDatabase.create_session(query, language)
logger.info(f"Created new session: {self.current_session_id}")
stages = []
if resume_from_stage > 0:
stages = [{
"name": s['stage_name'], "status": s['status'], "content": s.get('content', ''),
"consistency_score": s.get('consistency_score', 0.0)
} for s in NovelDatabase.get_stages(self.current_session_id)]
for stage_idx in range(resume_from_stage, len(OPTIMIZED_STAGES)):
role, stage_name = OPTIMIZED_STAGES[stage_idx]
if stage_idx >= len(stages):
stages.append({"name": stage_name, "status": "active", "content": "", "consistency_score": 0.0})
else:
stages[stage_idx]["status"] = "active"
yield "", stages, self.current_session_id
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages)
stage_content = ""
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language):
stage_content += chunk
stages[stage_idx]["content"] = stage_content
yield "", stages, self.current_session_id
consistency_score = 0.0
if role.startswith("writer"):
writer_num = int(re.search(r'\d+', role).group())
all_previous = self.get_all_content(stages, stage_idx)
errors = self.consistency_tracker.validate_consistency(writer_num, stage_content)
consistency_score = max(0, 10 - len(errors) * 2)
stages[stage_idx]["consistency_score"] = consistency_score
stages[stage_idx]["status"] = "complete"
NovelDatabase.save_stage(
self.current_session_id, stage_idx, stage_name, role,
stage_content, "complete", consistency_score
)
yield "", stages, self.current_session_id
final_novel = NovelDatabase.get_writer_content(self.current_session_id)
final_report = self.generate_consistency_report(final_novel, language)
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report)
yield f"βœ… μ†Œμ„€ μ™„μ„±! 총 {len(final_novel.split())}단어", stages, self.current_session_id
except Exception as e:
logger.error(f"μ†Œμ„€ 생성 ν”„λ‘œμ„ΈμŠ€ 였λ₯˜: {e}", exc_info=True)
yield f"❌ 였λ₯˜ λ°œμƒ: {e}", stages if 'stages' in locals() else [], self.current_session_id
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str:
"""단계별 ν”„λ‘¬ν”„νŠΈ 생성 (μš”μ•½ κΈ°λŠ₯ 및 주제 전달 κ°•ν™”)"""
if stage_idx == 0:
return self.create_director_initial_prompt(query, language)
if stage_idx == 1:
return self.create_critic_director_prompt(stages[0]["content"], query, language)
if stage_idx == 2:
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language)
master_plan = stages[2]["content"]
if 3 <= stage_idx <= 12: # μž‘κ°€ μ΄ˆμ•ˆ
writer_num = stage_idx - 2
previous_content = self.get_all_content(stages, stage_idx)
summary = self.create_summary(previous_content, language)
return self.create_writer_prompt(writer_num, master_plan, summary, query, language)
if stage_idx == 13: # 비평가 쀑간 κ²€ν† 
all_content = self.get_all_content(stages, stage_idx)
return self.create_critic_consistency_prompt(all_content, query, language)
if 14 <= stage_idx <= 23: # μž‘κ°€ μˆ˜μ •
writer_num = stage_idx - 13
initial_content = stages[2 + writer_num]["content"]
feedback = stages[13]["content"]
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language)
if stage_idx == 24: # μ΅œμ’… κ²€ν† 
complete_novel = self.get_all_writer_content(stages)
return self.create_critic_final_prompt(complete_novel, language)
return ""
def create_summary(self, content: str, language: str) -> str:
"""LLM을 μ΄μš©ν•΄ 이전 λ‚΄μš©μ„ μš”μ•½"""
if not content.strip():
return "이전 λ‚΄μš©μ΄ μ—†μŠ΅λ‹ˆλ‹€." if language == "Korean" else "No previous content."
prompt_text = "λ‹€μŒ μ†Œμ„€ λ‚΄μš©μ„ 3~5개의 핡심적인 λ¬Έμž₯으둜 μš”μ•½ν•΄μ€˜. λ‹€μŒ μž‘κ°€κ°€ 이야기λ₯Ό μ΄μ–΄κ°€λŠ” 데 ν•„μš”ν•œ 핡심 정보(λ“±μž₯인물의 ν˜„μž¬ 상황, 감정, λ§ˆμ§€λ§‰ 사건)λ₯Ό 포함해야 ν•΄."
if language != "Korean":
prompt_text = "Summarize the following novel content in 3-5 key sentences. Include crucial information for the next writer to continue the story (characters' current situation, emotions, and the last major event)."
summary_prompt = f"{prompt_text}\n\n---\n{content[-2000:]}"
try:
summary = self.call_llm_sync([{"role": "user", "content": summary_prompt}], "critic", language)
return summary
except Exception as e:
logger.error(f"μš”μ•½ 생성 μ‹€νŒ¨: {e}")
return content[-1000:]
def get_all_content(self, stages: List[Dict], current_stage: int) -> str:
"""ν˜„μž¬κΉŒμ§€μ˜ λͺ¨λ“  λ‚΄μš© κ°€μ Έμ˜€κΈ°"""
return "\n\n".join(s["content"] for i, s in enumerate(stages) if i < current_stage and s["content"])
def get_all_writer_content(self, stages: List[Dict]) -> str:
"""λͺ¨λ“  μž‘κ°€ μ΅œμ’… μˆ˜μ •λ³Έ λ‚΄μš© κ°€μ Έμ˜€κΈ°"""
return "\n\n".join(s["content"] for i, s in enumerate(stages) if 14 <= i <= 23 and s["content"])
def generate_consistency_report(self, complete_novel: str, language: str) -> str:
"""μ΅œμ’… λ³΄κ³ μ„œ 생성 (LLM 호좜)"""
prompt = self.create_critic_final_prompt(complete_novel, language)
try:
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language)
return report
except Exception as e:
logger.error(f"μ΅œμ’… λ³΄κ³ μ„œ 생성 μ‹€νŒ¨: {e}")
return "λ³΄κ³ μ„œ 생성 쀑 였λ₯˜ λ°œμƒ"
# --- μœ ν‹Έλ¦¬ν‹° ν•¨μˆ˜λ“€ ---
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]:
"""메인 쿼리 처리 ν•¨μˆ˜"""
if not query.strip():
yield "", "", "❌ 주제λ₯Ό μž…λ ₯ν•΄μ£Όμ„Έμš”.", session_id
return
system = NovelWritingSystem()
stages_markdown = ""
novel_content = ""
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id):
stages_markdown = format_stages_display(stages)
# μ΅œμ’… μ†Œμ„€ λ‚΄μš© κ°€μ Έμ˜€κΈ°
if stages and all(s.get("status") == "complete" for s in stages[-10:]):
novel_content = NovelDatabase.get_writer_content(current_session_id)
novel_content = format_novel_display(novel_content)
yield stages_markdown, novel_content, status or "πŸ”„ 처리 쀑...", current_session_id
def get_active_sessions(language: str) -> List[str]:
"""ν™œμ„± μ„Έμ…˜ λͺ©λ‘ κ°€μ Έμ˜€κΈ°"""
sessions = NovelDatabase.get_active_sessions()
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']})"
for s in sessions]
def auto_recover_session(language: str) -> Tuple[Optional[str], str]:
"""κ°€μž₯ 졜근 ν™œμ„± μ„Έμ…˜ μžλ™ 볡ꡬ"""
latest_session = NovelDatabase.get_latest_active_session()
if latest_session:
return latest_session['session_id'], f"μ„Έμ…˜ {latest_session['session_id'][:8]}... 볡ꡬ됨"
return None, "볡ꡬ할 μ„Έμ…˜μ΄ μ—†μŠ΅λ‹ˆλ‹€."
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]:
"""μ„Έμ…˜ 재개 ν•¨μˆ˜"""
if not session_id:
yield "", "", "❌ μ„Έμ…˜ IDκ°€ μ—†μŠ΅λ‹ˆλ‹€.", session_id
return
# λ“œλ‘­λ‹€μš΄μ—μ„œ μ„Έμ…˜ ID μΆ”μΆœ
if "..." in session_id:
session_id = session_id.split("...")[0]
session = NovelDatabase.get_session(session_id)
if not session:
yield "", "", "❌ μ„Έμ…˜μ„ 찾을 수 μ—†μŠ΅λ‹ˆλ‹€.", None
return
# process_queryλ₯Ό 톡해 재개
yield from process_query(session['user_query'], session['language'], session_id)
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]:
"""μ†Œμ„€ λ‹€μš΄λ‘œλ“œ 파일 생성"""
if not novel_text or not session_id:
return None
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"novel_{session_id[:8]}_{timestamp}"
try:
if format_type == "DOCX" and DOCX_AVAILABLE:
return export_to_docx(novel_text, filename, language)
else:
return export_to_txt(novel_text, filename)
except Exception as e:
logger.error(f"파일 생성 μ‹€νŒ¨: {e}")
return None
def format_stages_display(stages: List[Dict]) -> str:
"""단계별 μ§„ν–‰ 상황 λ§ˆν¬λ‹€μš΄ ν¬λ§·νŒ…"""
markdown = "## 🎬 μ§„ν–‰ 상황\n\n"
for i, stage in enumerate(stages):
status_icon = "βœ…" if stage['status'] == 'complete' else "πŸ”„" if stage['status'] == 'active' else "⏳"
markdown += f"{status_icon} **{stage['name']}**"
if stage.get('consistency_score', 0) > 0:
markdown += f" (일관성: {stage['consistency_score']:.1f}/10)"
markdown += "\n"
if stage['content']:
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content']
markdown += f"> {preview}\n\n"
return markdown
def format_novel_display(novel_text: str) -> str:
"""μ†Œμ„€ λ‚΄μš© λ§ˆν¬λ‹€μš΄ ν¬λ§·νŒ…"""
if not novel_text:
return "아직 μ™„μ„±λœ λ‚΄μš©μ΄ μ—†μŠ΅λ‹ˆλ‹€."
# νŽ˜μ΄μ§€ ꡬ뢄 μΆ”κ°€
pages = novel_text.split('\n\n')
formatted = "# πŸ“– μ™„μ„±λœ μ†Œμ„€\n\n"
for i, page in enumerate(pages):
if page.strip():
formatted += f"### νŽ˜μ΄μ§€ {i+1}\n\n{page}\n\n---\n\n"
return formatted
def export_to_docx(content: str, filename: str, language: str) -> str:
"""DOCX 파일둜 내보내기"""
doc = Document()
# 제λͺ© μΆ”κ°€
title = doc.add_heading('AI ν˜‘μ—… μ†Œμ„€', 0)
title.alignment = WD_ALIGN_PARAGRAPH.CENTER
# 메타데이터
doc.add_paragraph(f"생성일: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
doc.add_paragraph(f"μ–Έμ–΄: {language}")
doc.add_page_break()
# λ³Έλ¬Έ μΆ”κ°€
paragraphs = content.split('\n\n')
for para in paragraphs:
if para.strip():
doc.add_paragraph(para.strip())
# 파일 μ €μž₯
filepath = f"{filename}.docx"
doc.save(filepath)
return filepath
def export_to_txt(content: str, filename: str) -> str:
"""TXT 파일둜 내보내기"""
filepath = f"{filename}.txt"
with open(filepath, 'w', encoding='utf-8') as f:
f.write(content)
return filepath
# CSS μŠ€νƒ€μΌ
custom_css = """
.gradio-container {
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
min-height: 100vh;
}
.main-header {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 30px;
border-radius: 12px;
margin-bottom: 30px;
text-align: center;
color: white;
}
.input-section {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 20px;
border-radius: 12px;
margin-bottom: 20px;
}
.session-section {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 15px;
border-radius: 8px;
margin-top: 20px;
color: white;
}
#stages-display {
background-color: rgba(255, 255, 255, 0.95);
padding: 20px;
border-radius: 12px;
max-height: 600px;
overflow-y: auto;
}
#novel-output {
background-color: rgba(255, 255, 255, 0.95);
padding: 30px;
border-radius: 12px;
max-height: 400px;
overflow-y: auto;
}
.download-section {
background-color: rgba(255, 255, 255, 0.9);
padding: 15px;
border-radius: 8px;
margin-top: 20px;
}
"""
# Gradio μΈν„°νŽ˜μ΄μŠ€ 생성
def create_interface():
with gr.Blocks(css=custom_css, title="AI ν˜‘μ—… μ†Œμ„€ 생성 μ‹œμŠ€ν…œ") as interface:
gr.HTML("""
<div class="main-header">
<h1 style="font-size: 2.5em; margin-bottom: 10px;">
πŸ“š AI ν˜‘μ—… μ†Œμ„€ 생성 μ‹œμŠ€ν…œ
</h1>
<h3 style="color: #ccc; margin-bottom: 20px;">
일관성 μ€‘μ‹¬μ˜ 창의적 μ†Œμ„€ 생성
</h3>
<p style="font-size: 1.1em; color: #ddd; max-width: 800px; margin: 0 auto;">
주제λ₯Ό μž…λ ₯ν•˜λ©΄ AI μ—μ΄μ „νŠΈλ“€μ΄ ν˜‘μ—…ν•˜μ—¬ 30νŽ˜μ΄μ§€ λΆ„λŸ‰μ˜ μ™„μ„±λœ μ†Œμ„€μ„ μƒμ„±ν•©λ‹ˆλ‹€.
<br>
κ°λ…μž 1λͺ…, 비평가 1λͺ…, μž‘κ°€ 10λͺ…이 ν•¨κ»˜ μž‘μ—…ν•˜λ©° 일관성을 μœ μ§€ν•©λ‹ˆλ‹€.
</p>
</div>
""")
# μƒνƒœ 관리
current_session_id = gr.State(None)
with gr.Row():
with gr.Column(scale=1):
with gr.Group(elem_classes=["input-section"]):
query_input = gr.Textbox(
label="μ†Œμ„€ 주제 / Novel Theme",
placeholder="μ†Œμ„€μ˜ μ£Όμ œλ‚˜ 초기 아이디어λ₯Ό μž…λ ₯ν•˜μ„Έμš”...\nEnter your novel theme or initial idea...",
lines=4
)
language_select = gr.Radio(
choices=["English", "Korean"],
value="English",
label="μ–Έμ–΄ / Language"
)
with gr.Row():
submit_btn = gr.Button("πŸš€ μ†Œμ„€ 생성 μ‹œμž‘", variant="primary", scale=2)
clear_btn = gr.Button("πŸ—‘οΈ μ΄ˆκΈ°ν™”", scale=1)
status_text = gr.Textbox(
label="μƒνƒœ",
interactive=False,
value="πŸ”„ μ€€λΉ„ μ™„λ£Œ"
)
# μ„Έμ…˜ 관리
with gr.Group(elem_classes=["session-section"]):
gr.Markdown("### πŸ’Ύ 이전 μ„Έμ…˜ 재개")
session_dropdown = gr.Dropdown(
label="μ„Έμ…˜ 선택",
choices=[],
interactive=True
)
with gr.Row():
refresh_btn = gr.Button("πŸ”„ λͺ©λ‘ μƒˆλ‘œκ³ μΉ¨", scale=1)
resume_btn = gr.Button("▢️ 선택 재개", variant="secondary", scale=1)
auto_recover_btn = gr.Button("♻️ μžλ™ 볡ꡬ", scale=1)
with gr.Column(scale=2):
with gr.Tab("πŸ“ μž‘μ„± κ³Όμ •"):
stages_display = gr.Markdown(
value="μž‘μ„± 과정이 여기에 ν‘œμ‹œλ©λ‹ˆλ‹€...",
elem_id="stages-display"
)
with gr.Tab("πŸ“– μ™„μ„±λœ μ†Œμ„€"):
novel_output = gr.Markdown(
value="μ™„μ„±λœ μ†Œμ„€μ΄ 여기에 ν‘œμ‹œλ©λ‹ˆλ‹€...",
elem_id="novel-output"
)
with gr.Group(elem_classes=["download-section"]):
gr.Markdown("### πŸ“₯ μ†Œμ„€ λ‹€μš΄λ‘œλ“œ")
with gr.Row():
format_select = gr.Radio(
choices=["DOCX", "TXT"],
value="DOCX" if DOCX_AVAILABLE else "TXT",
label="ν˜•μ‹"
)
download_btn = gr.Button("⬇️ λ‹€μš΄λ‘œλ“œ", variant="secondary")
download_file = gr.File(
label="λ‹€μš΄λ‘œλ“œλœ 파일",
visible=False
)
# μˆ¨κ²¨μ§„ μƒνƒœ
novel_text_state = gr.State("")
# 예제
with gr.Row():
gr.Examples(
examples=[
["미래 λ„μ‹œμ—μ„œ 기얡을 κ±°λž˜ν•˜λŠ” μƒμΈμ˜ 이야기"],
["μ‹œκ°„μ΄ 거꾸둜 흐λ₯΄λŠ” λ§ˆμ„μ˜ λ―ΈμŠ€ν„°λ¦¬"],
["A scientist discovers a portal to parallel universes"],
["In a world where dreams can be traded, a dream thief's story"],
["Two AI entities fall in love while preventing a cyber war"],
["μ±… μ†μœΌλ‘œ λ“€μ–΄κ°ˆ 수 μžˆλŠ” λŠ₯λ ₯을 κ°€μ§„ μ‚¬μ„œμ˜ λͺ¨ν—˜"]
],
inputs=query_input,
label="πŸ’‘ 예제 주제"
)
# 이벀트 ν•Έλ“€λŸ¬
def refresh_sessions():
try:
sessions = get_active_sessions("English")
return gr.update(choices=sessions)
except Exception as e:
logger.error(f"Error refreshing sessions: {str(e)}")
return gr.update(choices=[])
def handle_auto_recover(language):
session_id, message = auto_recover_session(language)
return session_id
# 이벀트 μ—°κ²°
submit_btn.click(
fn=process_query,
inputs=[query_input, language_select, current_session_id],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
novel_output.change(
fn=lambda x: x,
inputs=[novel_output],
outputs=[novel_text_state]
)
resume_btn.click(
fn=lambda x: x,
inputs=[session_dropdown],
outputs=[current_session_id]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
auto_recover_btn.click(
fn=handle_auto_recover,
inputs=[language_select],
outputs=[current_session_id]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
refresh_btn.click(
fn=refresh_sessions,
outputs=[session_dropdown]
)
clear_btn.click(
fn=lambda: ("", "", "πŸ”„ μ€€λΉ„ μ™„λ£Œ", "", None),
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id]
)
def handle_download(format_type, language, session_id, novel_text):
if not session_id:
return gr.update(visible=False)
file_path = download_novel(novel_text, format_type, language, session_id)
if file_path:
return gr.update(value=file_path, visible=True)
else:
return gr.update(visible=False)
download_btn.click(
fn=handle_download,
inputs=[format_select, language_select, current_session_id, novel_text_state],
outputs=[download_file]
)
# μ‹œμž‘ μ‹œ μ„Έμ…˜ λ‘œλ“œ
interface.load(
fn=refresh_sessions,
outputs=[session_dropdown]
)
return interface
# 메인 μ‹€ν–‰
if __name__ == "__main__":
logger.info("AI ν˜‘μ—… μ†Œμ„€ 생성 μ‹œμŠ€ν…œ μ‹œμž‘...")
logger.info("=" * 60)
# ν™˜κ²½ 확인
logger.info(f"API μ—”λ“œν¬μΈνŠΈ: {API_URL}")
if BRAVE_SEARCH_API_KEY:
logger.info("μ›Ή 검색이 ν™œμ„±ν™”λ˜μ—ˆμŠ΅λ‹ˆλ‹€.")
else:
logger.warning("μ›Ή 검색이 λΉ„ν™œμ„±ν™”λ˜μ—ˆμŠ΅λ‹ˆλ‹€.")
if DOCX_AVAILABLE:
logger.info("DOCX 내보내기가 ν™œμ„±ν™”λ˜μ—ˆμŠ΅λ‹ˆλ‹€.")
else:
logger.warning("DOCX 내보내기가 λΉ„ν™œμ„±ν™”λ˜μ—ˆμŠ΅λ‹ˆλ‹€.")
logger.info("=" * 60)
# λ°μ΄ν„°λ² μ΄μŠ€ μ΄ˆκΈ°ν™”
logger.info("λ°μ΄ν„°λ² μ΄μŠ€ μ΄ˆκΈ°ν™” 쀑...")
NovelDatabase.init_db()
logger.info("λ°μ΄ν„°λ² μ΄μŠ€ μ΄ˆκΈ°ν™” μ™„λ£Œ.")
# μΈν„°νŽ˜μ΄μŠ€ 생성 및 μ‹€ν–‰
interface = create_interface()
interface.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
debug=True
)