Spaces:
Running
Running
import gradio as gr | |
import os | |
import json | |
import requests | |
from datetime import datetime | |
import time | |
from typing import List, Dict, Any, Generator, Tuple, Optional | |
import logging | |
import re | |
import tempfile | |
from pathlib import Path | |
import sqlite3 | |
import hashlib | |
import threading | |
from contextlib import contextmanager | |
from dataclasses import dataclass, field | |
from collections import defaultdict | |
# --- λ‘κΉ μ€μ --- | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
logger = logging.getLogger(__name__) | |
# --- Document export imports --- | |
try: | |
from docx import Document | |
from docx.shared import Inches, Pt, RGBColor | |
from docx.enum.text import WD_ALIGN_PARAGRAPH | |
from docx.enum.style import WD_STYLE_TYPE | |
DOCX_AVAILABLE = True | |
except ImportError: | |
DOCX_AVAILABLE = False | |
logger.warning("python-docx not installed. DOCX export will be disabled.") | |
# --- νκ²½ λ³μ λ° μμ --- | |
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") | |
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") | |
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
MODEL_ID = "dep89a2fld32mcm" | |
DB_PATH = "novel_sessions_v2.db" | |
# --- νκ²½ λ³μ κ²μ¦ --- | |
if not FRIENDLI_TOKEN: | |
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") | |
# μ€μ νκ²½μμλ μ¬κΈ°μ νλ‘κ·Έλ¨μ μ’ λ£ν΄μΌ νμ§λ§, λ°λͺ¨λ₯Ό μν΄ λλ―Έ ν ν°μ μ¬μ©ν©λλ€. | |
FRIENDLI_TOKEN = "dummy_token_for_testing" | |
if not BRAVE_SEARCH_API_KEY: | |
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") | |
# --- μ μ λ³μ --- | |
db_lock = threading.Lock() | |
# μ΅μ νλ λ¨κ³ κ΅¬μ± (25λ¨κ³λ‘ μμΆ λ° κ°ν) | |
OPTIMIZED_STAGES = [ | |
("director", "π¬ κ°λ μ: μ΄κΈ° κΈ°ν (μΉ κ²μ ν¬ν¨)"), | |
("critic", "π λΉνκ°: κΈ°ν κ²ν (ν λ§ λ° μΌκ΄μ±)"), | |
("director", "π¬ κ°λ μ: μμ λ λ§μ€ν°νλ"), | |
] + [ | |
(f"writer{i}", f"βοΈ μκ° {i}: μ΄μ (νμ΄μ§ {(i-1)*3+1}-{i*3})") | |
for i in range(1, 11) | |
] + [ | |
("critic", "π λΉνκ°: μ€κ° κ²ν (μΌκ΄μ± λ° ν λ§ μ μ§)"), | |
] + [ | |
(f"writer{i}", f"βοΈ μκ° {i}: μμ λ³Έ (νμ΄μ§ {(i-1)*3+1}-{i*3})") | |
for i in range(1, 11) | |
] + [ | |
("critic", f"π λΉνκ°: μ΅μ’ κ²ν λ° μ’ ν© λ³΄κ³ μ μμ±"), | |
] | |
# --- λ°μ΄ν° ν΄λμ€ --- | |
class CharacterState: | |
"""μΊλ¦ν°μ νμ¬ μνλ₯Ό λνλ΄λ λ°μ΄ν° ν΄λμ€""" | |
name: str | |
alive: bool = True | |
location: str = "" | |
injuries: List[str] = field(default_factory=list) | |
emotional_state: str = "" | |
relationships: Dict[str, str] = field(default_factory=dict) | |
last_seen_chapter: int = 0 | |
description: str = "" | |
role: str = "" | |
class PlotPoint: | |
"""νλ‘― ν¬μΈνΈλ₯Ό λνλ΄λ λ°μ΄ν° ν΄λμ€""" | |
chapter: int | |
event_type: str | |
description: str | |
characters_involved: List[str] | |
impact_level: int | |
timestamp: str = "" | |
class TimelineEvent: | |
"""μκ°μ μ΄λ²€νΈλ₯Ό λνλ΄λ λ°μ΄ν° ν΄λμ€""" | |
chapter: int | |
time_reference: str | |
event_description: str | |
duration: str = "" | |
relative_time: str = "" | |
# --- ν΅μ¬ λ‘μ§ ν΄λμ€ --- | |
class ConsistencyTracker: | |
"""μΌκ΄μ± μΆμ μμ€ν """ | |
def __init__(self): | |
self.character_states: Dict[str, CharacterState] = {} | |
self.plot_points: List[PlotPoint] = [] | |
self.timeline_events: List[TimelineEvent] = [] | |
self.locations: Dict[str, str] = {} | |
self.established_facts: List[str] = [] | |
self.content_hashes: Dict[str, int] = {} # ν΄μμ ν΄λΉ μ±ν° λ²νΈλ₯Ό μ μ₯ | |
def register_character(self, character: CharacterState): | |
"""μ μΊλ¦ν° λ±λ‘""" | |
self.character_states[character.name] = character | |
logger.info(f"Character registered: {character.name}") | |
def update_character_state(self, name: str, chapter: int, updates: Dict[str, Any]): | |
"""μΊλ¦ν° μν μ λ°μ΄νΈ""" | |
if name not in self.character_states: | |
self.register_character(CharacterState(name=name, last_seen_chapter=chapter)) | |
char = self.character_states[name] | |
for key, value in updates.items(): | |
if hasattr(char, key): | |
setattr(char, key, value) | |
char.last_seen_chapter = chapter | |
def add_plot_point(self, plot_point: PlotPoint): | |
"""νλ‘― ν¬μΈνΈ μΆκ°""" | |
plot_point.timestamp = datetime.now().isoformat() | |
self.plot_points.append(plot_point) | |
def check_repetition(self, content: str, current_chapter: int) -> Tuple[bool, str]: | |
"""ν₯μλ λ°λ³΅ λ΄μ© κ²μ¬""" | |
sentences = re.split(r'[.!?]+', content) | |
for sentence in sentences: | |
sentence_strip = sentence.strip() | |
if len(sentence_strip) > 20: # λ무 μ§§μ λ¬Έμ₯μ 무μ | |
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest() | |
if sentence_hash in self.content_hashes: | |
previous_chapter = self.content_hashes[sentence_hash] | |
# λ°λ‘ μ΄μ μ±ν°μμ λ°λ³΅μ νμ©ν μ μμΌλ―λ‘, 2μ±ν° μ΄μ μ°¨μ΄λ λλ§ μ€λ₯λ‘ κ°μ£Ό | |
if current_chapter > previous_chapter + 1: | |
return True, f"λ¬Έμ₯ λ°λ³΅ (μ±ν° {previous_chapter}κ³Ό μ μ¬): {sentence_strip[:50]}..." | |
# μ λ΄μ©μ ν΄μ μΆκ° | |
for sentence in sentences: | |
sentence_strip = sentence.strip() | |
if len(sentence_strip) > 20: | |
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest() | |
self.content_hashes[sentence_hash] = current_chapter | |
return False, "" | |
def validate_consistency(self, chapter: int, content: str) -> List[str]: | |
"""μΌκ΄μ± κ²μ¦""" | |
errors = [] | |
# μ¬λ§ν μΊλ¦ν° λ±μ₯ κ²μ¬ | |
for char_name, char_state in self.character_states.items(): | |
if char_name.lower() in content.lower() and not char_state.alive: | |
errors.append(f"β οΈ μ¬λ§ν μΊλ¦ν° '{char_name}'μ΄(κ°) λ±μ₯νμ΅λλ€.") | |
# λ΄μ© λ°λ³΅ κ²μ¬ | |
is_repetition, repeat_msg = self.check_repetition(content, chapter) | |
if is_repetition: | |
errors.append(f"π {repeat_msg}") | |
return errors | |
def get_character_summary(self, chapter: int) -> str: | |
"""νμ¬ μ±ν° κΈ°μ€ μΊλ¦ν° μμ½""" | |
summary = "\n=== μΊλ¦ν° νν© μμ½ (μ΄μ 2κ° μ±ν° κΈ°μ€) ===\n" | |
active_chars = [char for char in self.character_states.values() if char.last_seen_chapter >= chapter - 2] | |
if not active_chars: | |
return "\n(μμ§ μ£Όμ μΊλ¦ν° μ λ³΄κ° μμ΅λλ€.)\n" | |
for char in active_chars: | |
status = "μμ‘΄" if char.alive else "μ¬λ§" | |
summary += f"β’ {char.name}: {status}" | |
if char.alive and char.location: summary += f" (μμΉ: {char.location})" | |
if char.injuries: summary += f" (λΆμ: {', '.join(char.injuries[-1:])})" | |
summary += "\n" | |
return summary | |
def get_plot_summary(self, chapter: int) -> str: | |
"""νλ‘― μμ½""" | |
summary = "\n=== μ΅κ·Ό μ£Όμ μ¬κ±΄ μμ½ ===\n" | |
recent_events = [p for p in self.plot_points if p.chapter >= chapter - 2] | |
if not recent_events: | |
return "\n(μμ§ μ£Όμ μ¬κ±΄μ΄ μμ΅λλ€.)\n" | |
for event in recent_events[-3:]: # μ΅κ·Ό 3κ°λ§ νμ | |
summary += f"β’ [μ±ν° {event.chapter}] {event.description}\n" | |
return summary | |
class WebSearchIntegration: | |
"""μΉ κ²μ κΈ°λ₯ (κ°λ μ λ¨κ³μμλ§ μ¬μ©)""" | |
def __init__(self): | |
self.brave_api_key = BRAVE_SEARCH_API_KEY | |
self.search_url = "https://api.search.brave.com/res/v1/web/search" | |
self.enabled = bool(self.brave_api_key) | |
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: | |
"""μΉ κ²μ μν""" | |
if not self.enabled: | |
return [] | |
headers = { | |
"Accept": "application/json", | |
"X-Subscription-Token": self.brave_api_key | |
} | |
params = { | |
"q": query, | |
"count": count, | |
"search_lang": "ko" if language == "Korean" else "en", | |
"text_decorations": False, | |
"safesearch": "moderate" | |
} | |
try: | |
response = requests.get(self.search_url, headers=headers, params=params, timeout=10) | |
response.raise_for_status() | |
results = response.json().get("web", {}).get("results", []) | |
logger.info(f"μΉ κ²μ μ±κ³΅: '{query}'μ λν΄ {len(results)}κ° κ²°κ³Ό λ°κ²¬") | |
return results | |
except requests.exceptions.RequestException as e: | |
logger.error(f"μΉ κ²μ API μ€λ₯: {e}") | |
return [] | |
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: | |
"""κ²μ κ²°κ³Όμμ κ΄λ ¨ μ 보 μΆμΆ""" | |
if not results: | |
return "" | |
extracted = [] | |
total_chars = 0 | |
for i, result in enumerate(results[:3], 1): | |
title = result.get("title", "") | |
description = result.get("description", "") | |
url = result.get("url", "") | |
info = f"[{i}] {title}\n{description}\nSource: {url}\n" | |
if total_chars + len(info) < max_chars: | |
extracted.append(info) | |
total_chars += len(info) | |
else: | |
break | |
return "\n---\n".join(extracted) | |
class NovelDatabase: | |
"""μμ€ μΈμ κ΄λ¦¬ λ°μ΄ν°λ² μ΄μ€""" | |
def init_db(): | |
with sqlite3.connect(DB_PATH) as conn: | |
conn.execute("PRAGMA journal_mode=WAL") | |
cursor = conn.cursor() | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS sessions ( | |
session_id TEXT PRIMARY KEY, | |
user_query TEXT NOT NULL, | |
language TEXT NOT NULL, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
status TEXT DEFAULT 'active', | |
current_stage INTEGER DEFAULT 0, | |
final_novel TEXT, | |
consistency_report TEXT | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS stages ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
stage_number INTEGER NOT NULL, | |
stage_name TEXT NOT NULL, | |
role TEXT NOT NULL, | |
content TEXT, | |
word_count INTEGER DEFAULT 0, | |
status TEXT DEFAULT 'pending', | |
consistency_score REAL DEFAULT 0.0, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
UNIQUE(session_id, stage_number) | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS character_states ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
character_name TEXT NOT NULL, | |
chapter INTEGER NOT NULL, | |
is_alive BOOLEAN DEFAULT TRUE, | |
location TEXT, | |
injuries TEXT, | |
emotional_state TEXT, | |
description TEXT, | |
created_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
) | |
''') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_id ON stages(session_id)') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_stage_number ON stages(stage_number)') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_char_session ON character_states(session_id)') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_status ON sessions(status)') | |
conn.commit() | |
def get_db(): | |
with db_lock: | |
conn = sqlite3.connect(DB_PATH, timeout=30.0) | |
conn.row_factory = sqlite3.Row | |
try: | |
yield conn | |
finally: | |
conn.close() | |
def create_session(user_query: str, language: str) -> str: | |
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', | |
(session_id, user_query, language) | |
) | |
conn.commit() | |
return session_id | |
def save_stage(session_id: str, stage_number: int, stage_name: str, | |
role: str, content: str, status: str = 'complete', | |
consistency_score: float = 0.0): | |
word_count = len(content.split()) if content else 0 | |
with NovelDatabase.get_db() as conn: | |
cursor = conn.cursor() | |
cursor.execute(''' | |
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score) | |
VALUES (?, ?, ?, ?, ?, ?, ?, ?) | |
ON CONFLICT(session_id, stage_number) | |
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, consistency_score=?, updated_at=datetime('now') | |
''', (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score, | |
content, word_count, status, stage_name, consistency_score)) | |
cursor.execute( | |
"UPDATE sessions SET updated_at = datetime('now'), current_stage = ? WHERE session_id = ?", | |
(stage_number, session_id) | |
) | |
conn.commit() | |
def get_session(session_id: str) -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone() | |
return dict(row) if row else None | |
def get_latest_active_session() -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute("SELECT * FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 1").fetchone() | |
return dict(row) if row else None | |
def get_stages(session_id: str) -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall() | |
return [dict(row) for row in rows] | |
def get_writer_content(session_id: str) -> str: | |
with NovelDatabase.get_db() as conn: | |
all_content = [] | |
for writer_num in range(1, 11): | |
row = conn.cursor().execute( | |
"SELECT content FROM stages WHERE session_id = ? AND role = ? AND stage_name LIKE '%μμ λ³Έ%' ORDER BY stage_number DESC LIMIT 1", | |
(session_id, f'writer{writer_num}') | |
).fetchone() | |
if row and row['content']: | |
all_content.append(row['content'].strip()) | |
return '\n\n'.join(all_content) | |
def update_final_novel(session_id: str, final_novel: str, consistency_report: str = ""): | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), consistency_report = ? WHERE session_id = ?", | |
(final_novel, consistency_report, session_id) | |
) | |
conn.commit() | |
def get_active_sessions() -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute( | |
"SELECT session_id, user_query, language, created_at, current_stage FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10" | |
).fetchall() | |
return [dict(row) for row in rows] | |
class NovelWritingSystem: | |
"""μ΅μ νλ μμ€ μμ± μμ€ν """ | |
def __init__(self): | |
self.token = FRIENDLI_TOKEN | |
self.api_url = API_URL | |
self.model_id = MODEL_ID | |
self.consistency_tracker = ConsistencyTracker() | |
self.web_search = WebSearchIntegration() | |
self.current_session_id = None | |
NovelDatabase.init_db() | |
def create_headers(self): | |
"""API ν€λ μμ±""" | |
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} | |
# --- ν둬ννΈ μμ± ν¨μλ€ (Thematic Guardian κ°λ ν΅ν©) --- | |
def create_director_initial_prompt(self, user_query: str, language: str) -> str: | |
"""κ°λ μ μ΄κΈ° κΈ°ν ν둬ννΈ (μΉ κ²μ λ° ν λ§ μ μ½ μ‘°κ±΄ κ°ν)""" | |
search_results_str = "" | |
if self.web_search.enabled: | |
queries = [f"{user_query} novel setting", f"{user_query} background information"] | |
search_results = self.web_search.search(queries[0], count=2, language=language) | |
if search_results: | |
search_results_str = self.web_search.extract_relevant_info(search_results) | |
lang_prompts = { | |
"Korean": { | |
"title": "λΉμ μ 30νμ΄μ§ λΆλμ μ€νΈ μμ€μ κΈ°ννλ λ¬Έν κ°λ μμ λλ€.", | |
"user_theme": "μ¬μ©μ μ£Όμ ", | |
"plan_instruction": "λ€μ μμλ€μ ν¬ν¨ν μμΈν μμ€ κΈ°νμ μμ±νμΈμ:", | |
"theme_section": "1. **μ£Όμ μ μ₯λ₯΄ μ€μ **\n - ν΅μ¬ μ£Όμ μ λ©μμ§ (μ¬μ©μ μλ κΉμ΄ λ°μ)\n - μ₯λ₯΄ λ° λΆμκΈ°\n - λ μμΈ΅ κ³ λ €μ¬ν", | |
"char_section": "2. **μ£Όμ λ±μ₯μΈλ¬Ό** (3-5λͺ )\n | μ΄λ¦ | μν | μ±κ²© | λ°°κ²½ | λͺ©ν | κ°λ± |", | |
"setting_section": "3. **λ°°κ²½ μ€μ **\n - μ곡κ°μ λ°°κ²½\n - μ¬νμ /λ¬Ένμ νκ²½\n - μ£Όμ μ₯μλ€", | |
"plot_section": "4. **νλ‘― ꡬ쑰** (10κ° ννΈ, κ° 3νμ΄μ§ λΆλ)\n | ννΈ | νμ΄μ§ | μ£Όμ μ¬κ±΄ | κΈ΄μ₯λ | μΊλ¦ν° λ°μ |", | |
"guideline_section": "5. **μκ°λ³ μ§μΉ¨**\n - μΌκ΄μ± μ μ§λ₯Ό μν ν΅μ¬ μ€μ \n - 문체μ ν€ κ°μ΄λλΌμΈ", | |
"constraint_title": "β οΈλ§€μ° μ€μν μ§μμ¬ν: ν΅μ¬ μ μ½ μ‘°κ±΄β οΈ", | |
"constraint_body": "μ΄ μμ€μ **AIλ‘ μΈν΄ λͺ¨λ κ²μ΄ μ½κ² ν΄κ²°λλ κΈμ μ μ΄κ³ λ¨μν μ΄μΌκΈ°κ° μλλλ€.**\nλ°λμ μ¬μ©μμ μ£Όμ μΈ '{query}'μ λ΄κΈ΄ **ν΅μ¬ κ°μ (μ: λΆμ, μμΈκ°, μμ€κ°, μΈλ κ°λ± λ±)μ μ€μ¬μΌλ‘ μμ¬λ₯Ό μ κ°ν΄μΌ ν©λλ€.**\nAIλ νΉμ κΈ°μ μ νΈλ¦¬ν λκ΅¬κ° μλλΌ, μ£ΌμΈκ³΅μκ² **κ°λ±κ³Ό μμ€κ°μ μ겨주λ ν΅μ¬ μμΈ**μΌλ‘ μμ©ν΄μΌ ν©λλ€.\nμ΄ μ μ½ μ‘°κ±΄μ μ λ λ²μ΄λμ§ λ§μμμ€.", | |
"final_instruction": "μ°½μμ μ΄κ³ κΉμ΄ μλ μμ€μ΄ λ μ μλλ‘ μμΈνκ² κΈ°ννμΈμ." | |
}, | |
"English": { | |
"title": "You are a literary director planning a 30-page novella.", | |
"user_theme": "User Theme", | |
"plan_instruction": "Create a detailed novel plan including:", | |
"theme_section": "1. **Theme and Genre**\n - Core theme and message (Deeply reflect user's intent)\n - Genre and atmosphere", | |
"char_section": "2. **Main Characters** (3-5)\n | Name | Role | Personality | Background | Goal | Conflict |", | |
"setting_section": "3. **Setting**\n - Time and place\n - Social/cultural environment", | |
"plot_section": "4. **Plot Structure** (10 parts, ~3 pages each)\n | Part | Pages | Main Events | Tension | Character Development |", | |
"guideline_section": "5. **Writer Guidelines**\n - Key settings for consistency\n - Style and tone guidelines", | |
"constraint_title": "β οΈCRITICAL INSTRUCTION: CORE CONSTRAINTSβ οΈ", | |
"constraint_body": "This is **NOT a simple, positive story where AI solves everything.**\nYou must develop the narrative around the core emotions of the user's theme: '{query}' (e.g., anxiety, alienation, loss, generational conflict).\nAI or specific technology should be the **root cause of the protagonist's conflict and loss**, not a convenient tool.\nDo not deviate from this constraint.", | |
"final_instruction": "Plan in detail for a creative and profound novel." | |
} | |
} | |
p = lang_prompts[language] | |
return f"{p['title']}\n\n{p['user_theme']}: {user_query}\n\n{search_results_str}\n\n{p['plan_instruction']}\n\n{p['theme_section']}\n\n{p['char_section']}\n\n{p['setting_section']}\n\n{p['plot_section']}\n\n{p['guideline_section']}\n\n---\n{p['constraint_title']}\n{p['constraint_body'].format(query=user_query)}\n---\n\n{p['final_instruction']}" | |
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: | |
"""λΉνκ°μ κ°λ μ κΈ°ν κ²ν ν둬ννΈ (ν λ§ μΌκ΄μ± κ°ν)""" | |
lang_prompts = { | |
"Korean": { | |
"title": "λΉμ μ λ¬Έν λΉνκ°μ λλ€. κ°λ μμ μμ€ κΈ°νμ 'μ£Όμ μΌκ΄μ±'κ³Ό 'κΈ°μ μ μΌκ΄μ±' κ΄μ μμ κ²ν νμΈμ.", | |
"theme_check": f"**1. μ£Όμ μΌκ΄μ± (κ°μ₯ μ€μ)**\n - **μλ μ£Όμ :** '{user_query}'\n - κΈ°νμμ΄ μ£Όμ μ ν΅μ¬ κ°μ (λΆμ, μμ€κ° λ±)μμ λ²μ΄λ κΈμ μ μ΄κ±°λ λ¨μν λ°©ν₯μΌλ‘ νλ₯΄μ§ μμμ΅λκΉ?\n - AIλ κΈ°μ μ΄ κ°λ±μ μμΈμ΄ μλ, λ¨μ ν΄κ²°μ¬λ‘ λ¬μ¬λμ§ μμμ΅λκΉ?", | |
"consistency_check": "**2. κΈ°μ μ μΌκ΄μ±**\n - μΊλ¦ν° μ€μ μ λͺ¨μ, νλ‘―μ λ Όλ¦¬μ νμ , μκ°μ /κ³΅κ° μ€μ μ λ¬Έμ μ μ κ²ν νμΈμ.", | |
"instruction": "μ νλͺ©λ€μ μ€μ¬μΌλ‘ ꡬ체μ μΈ λ¬Έμ μ κ³Ό κ°μ μμ μ μνμΈμ." | |
}, | |
"English": { | |
"title": "You are a literary critic. Review the director's plan from the perspectives of 'Thematic Consistency' and 'Technical Consistency'.", | |
"theme_check": f"**1. Thematic Consistency (Most Important)**\n - **Original Theme:** '{user_query}'\n - Does the plan drift from the core emotions (e.g., anxiety, loss) towards an overly positive or simplistic narrative?\n - Is AI depicted as a simple problem-solver instead of the root of the conflict?", | |
"consistency_check": "**2. Technical Consistency**\n - Review for character contradictions, plot holes, and timeline/setting issues.", | |
"instruction": "Provide specific problems and suggestions for improvement based on the above." | |
} | |
} | |
p = lang_prompts[language] | |
return f"{p['title']}\n\n**κ°λ μ κΈ°ν:**\n{director_plan}\n\n---\n**κ²ν νλͺ©:**\n{p['theme_check']}\n\n{p['consistency_check']}\n\n{p['instruction']}" | |
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str: | |
"""κ°λ μ μμ ν둬ννΈ (ν λ§ μ μ½ μ‘°κ±΄ μ¬κ°μ‘°)""" | |
return f"""κ°λ μλ‘μ λΉνκ°μ νΌλλ°±μ λ°μνμ¬ μμ€ κΈ°νμ μμ ν©λλ€. | |
**μλ μ£Όμ :** {user_query} | |
**μ΄κΈ° κΈ°ν:**\n{initial_plan} | |
**λΉνκ° νΌλλ°±:**\n{critic_feedback} | |
**μμ μ§μΉ¨:** | |
- λΉνκ°κ° μ§μ ν λͺ¨λ μΌκ΄μ± λ¬Έμ μ μ£Όμ μ΄ν λ¬Έμ λ₯Ό ν΄κ²°νμΈμ. | |
- **ν΅μ¬ μ μ½ μ‘°κ±΄**μ λ€μ νλ² μκΈ°νκ³ , μμ€ μ μ²΄κ° 'λΆμ'κ³Ό 'μμ€κ°'μ ν€μ μ μ§νλλ‘ νλ‘―μ ꡬ체ννμΈμ. | |
- 10λͺ μ μκ°κ° νΌλ μμ΄ μμ ν μ μλλ‘ λͺ ννκ³ μμΈν μ΅μ’ λ§μ€ν°νλμ μμ±νμΈμ. | |
""" | |
def create_writer_prompt(self, writer_number: int, director_plan: str, previous_content_summary: str, user_query: str, language: str) -> str: | |
"""μκ° ν둬ννΈ (ν λ§ λ¦¬λ§μΈλ ν¬ν¨)""" | |
pages_start = (writer_number - 1) * 3 + 1 | |
pages_end = writer_number * 3 | |
lang_prompts = { | |
"Korean": { | |
"title": f"λΉμ μ μκ° {writer_number}λ²μ λλ€. μμ€μ {pages_start}-{pages_end} νμ΄μ§λ₯Ό μμ±νμΈμ.", | |
"plan": "κ°λ μ λ§μ€ν°νλ", | |
"prev_summary": "μ΄μ λ΄μ© μμ½", | |
"guidelines": "**μμ± μ§μΉ¨:**\n1. **λΆλ**: 1,400-1,500 λ¨μ΄ λ΄μΈ\n2. **μ°κ²°μ±**: μμ½λ μ΄μ λ΄μ©κ³Ό μμ°μ€λ½κ² μ°κ²°\n3. **μΌκ΄μ±**: μΊλ¦ν° μ€μ κ³Ό μν, νλ‘― ꡬ쑰λ₯Ό λ°λμ λ°λ₯Ό κ²", | |
"reminder_title": "β μμ§ λ§μΈμ (ν λ§ λ¦¬λ§μΈλ)", | |
"reminder_body": f"μ΄ μμ€μ ν΅μ¬μ '{user_query}'μ λ΄κΈ΄ **λΆμ, μμΈ, μμ€κ°**μ λλ€. κΈμ μ μΈ ν΄κ²°μ μλλ₯΄μ§ λ§κ³ , μ£ΌμΈκ³΅μ λ΄λ©΄ κ°λ±μ μ¬λ μκ² λ¬μ¬νλ λ° μ§μ€νμΈμ.", | |
"final_instruction": "μ°½μμ μ΄λ©΄μλ μ£Όμ μ μΌκ΄μ±μ μ λ μμ§ λ§μμμ€." | |
}, | |
"English": { | |
"title": f"You are Writer #{writer_number}. Write pages {pages_start}-{pages_end} of the novella.", | |
"plan": "Director's Masterplan", | |
"prev_summary": "Previous Content Summary", | |
"guidelines": "**Writing Guidelines:**\n1. **Length**: Approx. 1,400-1,500 words\n2. **Connectivity**: Connect naturally with the summarized previous content.\n3. **Consistency**: Strictly follow character settings, states, and plot structure.", | |
"reminder_title": "β REMINDER (THEME)", | |
"reminder_body": f"The core of this novel is the **anxiety, alienation, and loss** from the theme '{user_query}'. Do not rush to a positive resolution; focus on deeply describing the protagonist's internal conflict.", | |
"final_instruction": "Be creative, but never lose consistency and the core theme." | |
} | |
} | |
p = lang_prompts[language] | |
consistency_info = self.consistency_tracker.get_character_summary(writer_number) + self.consistency_tracker.get_plot_summary(writer_number) | |
return f"{p['title']}\n\n**{p['plan']}:**\n{director_plan}\n\n{consistency_info}\n\n**{p['prev_summary']}:**\n{previous_content_summary}\n\n---\n{p['guidelines']}\n\n**{p['reminder_title']}**\n{p['reminder_body']}\n---\n\n{p['final_instruction']}" | |
def create_critic_consistency_prompt(self, all_content: str, user_query: str, language: str) -> str: | |
"""λΉνκ° μ€κ° κ²ν ν둬ννΈ (ν λ§ κ²ν κ°ν)""" | |
return f"""λΉμ μ μΌκ΄μ± κ²ν μ λ¬Έ λΉνκ°μ λλ€. μ§κΈκΉμ§ μμ±λ λ΄μ©μ κ²ν νμΈμ. | |
**μλ μ£Όμ :** {user_query} | |
**νμ¬κΉμ§ μμ±λ λ΄μ© (μ΅κ·Ό 3000μ):**\n{all_content[-3000:]} | |
**κ²ν νλͺ©:** | |
1. **μ£Όμ μΌκ΄μ± (κ°μ₯ μ€μ):** λ΄μ©μ΄ μλ μ£Όμ μ μ΄λμ΄ κ°μ μ μμ λ²μ΄λμ§ μμλμ§ νμΈνκ³ , λ²μ΄λ¬λ€λ©΄ μμ λ°©ν₯μ μ μνμΈμ. | |
2. **κΈ°μ μ μΌκ΄μ±:** μΊλ¦ν°, νλ‘―, μ€μ μ μ°μμ±κ³Ό λ Όλ¦¬μ μ€λ₯λ₯Ό μ°Ύμλ΄μΈμ. | |
3. **λ°λ³΅ λ΄μ©:** μλ―Έμ μΌλ‘ μ€λ³΅λλ μ₯λ©΄μ΄λ ννμ΄ μλμ§ νμΈνμΈμ. | |
**κ²°κ³Ό:** λ°κ²¬λ λ¬Έμ μ κ³Ό ꡬ체μ μΈ μμ μ μμ λͺ©λ‘μΌλ‘ μ μνμΈμ. | |
""" | |
def create_writer_revision_prompt(self, writer_number: int, initial_content: str, consistency_feedback: str, language: str) -> str: | |
"""μκ° μμ ν둬ννΈ""" | |
return f"""μκ° {writer_number}λ²μΌλ‘μ λΉνκ°μ νΌλλ°±μ λ°μνμ¬ λ΄μ©μ μμ νμΈμ. | |
**μ΄κΈ° μμ± λ΄μ©:**\n{initial_content} | |
**λΉνκ° νΌλλ°±:**\n{consistency_feedback} | |
**μμ μ§μΉ¨:** | |
- μ§μ λ λͺ¨λ μ£Όμ μ΄ν λ° μΌκ΄μ± λ¬Έμ λ₯Ό ν΄κ²°νμΈμ. | |
- λΆλ(1,400-1,500 λ¨μ΄)μ μ μ§νλ©΄μ λ΄μ©μ μ§μ λμ΄μΈμ. | |
- μμ λ μ΅μ’ λ²μ μ μ μνμΈμ. | |
""" | |
def create_critic_final_prompt(self, complete_novel: str, language: str) -> str: | |
"""μ΅μ’ λΉνκ° κ²ν λ° λ³΄κ³ μ μμ± ν둬ννΈ""" | |
return f"""μμ±λ μμ€μ μ΅μ’ μΌκ΄μ± λ° μμ±λμ λν μ’ ν© λ³΄κ³ μλ₯Ό μμ±νμΈμ. | |
**μμ±λ μμ€ (λ§μ§λ§ 2000μ):**\n{complete_novel[-2000:]} | |
**λ³΄κ³ μ ν¬ν¨ νλͺ©:** | |
1. **μ 체 μΌκ΄μ± νκ°:** μΊλ¦ν°, νλ‘―, μ€μ , μ£Όμ μ μ§μ λν μ μ(1-10)μ μ΄ν. | |
2. **μ΅μ’ λ°κ²¬λ λ¬Έμ μ :** λ¨μμλ μ¬μν λ¬Έμ μ λ€. | |
3. **μ±κ³΅ μμ:** νΉν μ μ μ§λ μΌκ΄μ± λΆλΆμ΄λ μ£Όμ ννμ΄ λ°μ΄λ λΆλΆ. | |
4. **μ΅μ’ νκ°:** μμ€μ μ λ°μ μΈ μμ±λμ λ μμκ² λ―ΈμΉ μν₯μ λν νκ°. | |
""" | |
# --- LLM νΈμΆ ν¨μλ€ --- | |
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: | |
"""LLM λκΈ°μ νΈμΆ (μμ½ λ± λ΄λΆμ©)""" | |
full_content = "" | |
for chunk in self.call_llm_streaming(messages, role, language): | |
full_content += chunk | |
if full_content.startswith("β"): | |
raise Exception(f"LLM Sync Call Failed: {full_content}") | |
return full_content | |
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]: | |
"""LLM μ€νΈλ¦¬λ° νΈμΆ (μμ ν μλ¬ μ²λ¦¬ λ° λλ²κΉ )""" | |
try: | |
system_prompts = self.get_system_prompts(language) | |
full_messages = [{"role": "system", "content": system_prompts.get(role, "You are a helpful assistant.")}, *messages] | |
payload = { | |
"model": self.model_id, | |
"messages": full_messages, | |
"max_tokens": 10000, | |
"temperature": 0.75, | |
"top_p": 0.9, | |
"presence_penalty": 0.3, | |
"frequency_penalty": 0.2, | |
"stream": True, | |
"stream_options": {"include_usage": True} | |
} | |
logger.info(f"[{role}] API μ€νΈλ¦¬λ° μμ") | |
# API νΈμΆ | |
response = requests.post( | |
self.api_url, | |
headers=self.create_headers(), | |
json=payload, | |
stream=True, | |
timeout=180 | |
) | |
# μν μ½λ νμΈ | |
if response.status_code != 200: | |
logger.error(f"API μλ΅ μ€λ₯: {response.status_code}") | |
logger.error(f"μλ΅ λ΄μ©: {response.text[:500]}") | |
yield f"β API μ€λ₯ (μν μ½λ: {response.status_code})" | |
return | |
response.raise_for_status() | |
# μ€νΈλ¦¬λ° μ²λ¦¬ | |
buffer = "" | |
total_content = "" | |
chunk_count = 0 | |
error_count = 0 | |
for line in response.iter_lines(): | |
if not line: | |
continue | |
try: | |
line_str = line.decode('utf-8').strip() | |
# SSE νμ νμΈ | |
if not line_str.startswith("data: "): | |
continue | |
data_str = line_str[6:] # "data: " μ κ±° | |
# μ€νΈλ¦Ό μ’ λ£ νμΈ | |
if data_str == "[DONE]": | |
logger.info(f"[{role}] μ€νΈλ¦¬λ° μλ£ - μ΄ {len(total_content)} λ¬Έμ") | |
break | |
# JSON νμ± | |
try: | |
data = json.loads(data_str) | |
except json.JSONDecodeError: | |
logger.warning(f"JSON νμ± μ€ν¨: {data_str[:100]}") | |
continue | |
# choices λ°°μ΄ μμ νκ² νμΈ | |
choices = data.get("choices", None) | |
if not choices or not isinstance(choices, list) or len(choices) == 0: | |
# μλ¬ μλ΅ νμΈ | |
if "error" in data: | |
error_msg = data.get("error", {}).get("message", "Unknown error") | |
logger.error(f"API μλ¬: {error_msg}") | |
yield f"β API μλ¬: {error_msg}" | |
return | |
continue | |
# deltaμμ content μΆμΆ | |
delta = choices[0].get("delta", {}) | |
content = delta.get("content", "") | |
if content: | |
buffer += content | |
total_content += content | |
chunk_count += 1 | |
# 100μ λλ μ€λ°κΏλ§λ€ yield | |
if len(buffer) >= 100 or '\n' in buffer: | |
yield buffer | |
buffer = "" | |
time.sleep(0.01) # UI μ λ°μ΄νΈλ₯Ό μν μ§§μ λκΈ° | |
except Exception as e: | |
error_count += 1 | |
logger.error(f"μ²ν¬ μ²λ¦¬ μ€λ₯ #{error_count}: {str(e)}") | |
if error_count > 10: # λ무 λ§μ μλ¬μ μ€λ¨ | |
yield f"β μ€νΈλ¦¬λ° μ€ κ³Όλν μ€λ₯ λ°μ" | |
return | |
continue | |
# λ¨μ λ²νΌ μ²λ¦¬ | |
if buffer: | |
yield buffer | |
# κ²°κ³Ό νμΈ | |
if chunk_count == 0: | |
logger.error(f"[{role}] μ½ν μΈ κ° μ ν μμ λμ§ μμ") | |
yield "β APIλ‘λΆν° μλ΅μ λ°μ§ λͺ»νμ΅λλ€." | |
else: | |
logger.info(f"[{role}] μ±κ³΅μ μΌλ‘ {chunk_count}κ° μ²ν¬, μ΄ {len(total_content)}μ μμ ") | |
except requests.exceptions.Timeout: | |
logger.error("API μμ² μκ° μ΄κ³Ό") | |
yield "β API μμ² μκ°μ΄ μ΄κ³Όλμμ΅λλ€." | |
except requests.exceptions.ConnectionError: | |
logger.error("API μ°κ²° μ€ν¨") | |
yield "β API μλ²μ μ°κ²°ν μ μμ΅λλ€." | |
except Exception as e: | |
logger.error(f"μκΈ°μΉ μμ μ€λ₯: {type(e).__name__}: {str(e)}", exc_info=True) | |
yield f"β μ€λ₯ λ°μ: {str(e)}" | |
def get_system_prompts(self, language: str) -> Dict[str, str]: | |
"""μν λ³ μμ€ν ν둬ννΈ μμ±""" | |
base_prompts = { | |
"Korean": { | |
"director": "λΉμ μ μ°½μμ μ΄κ³ 체κ³μ μΈ μμ€ κΈ°ν μ λ¬Έκ°μ λλ€. ν₯λ―Έλ‘κ³ μΌκ΄μ± μλ μ€ν 리λ₯Ό μ€κ³νμΈμ.", | |
"critic": "λΉμ μ μΌκ΄μ± κ²ν μ λ¬Έ λΉνκ°μ λλ€. μΊλ¦ν°, νλ‘―, μ€μ μ μΌκ΄μ±μ μ² μ ν μ κ²νκ³ κ°μ λ°©μμ μ μνμΈμ.", | |
"writer_base": "λΉμ μ μ λ¬Έ μμ€ μκ°μ λλ€. μ£Όμ΄μ§ μ§μΉ¨μ λ°λΌ λͺ°μ κ° μκ³ μΌκ΄μ± μλ λ΄μ©μ μμ±νμΈμ." | |
}, | |
"English": { | |
"director": "You are a creative and systematic novel planning expert. Design engaging and consistent stories.", | |
"critic": "You are a consistency review specialist critic. Thoroughly check character, plot, and setting consistency and suggest improvements.", | |
"writer_base": "You are a professional novel writer. Write immersive and consistent content according to the given guidelines." | |
} | |
} | |
prompts = base_prompts[language].copy() | |
# μκ°λ³ νΉμ ν둬ννΈ | |
if language == "Korean": | |
prompts["writer1"] = "λΉμ μ μμ€μ λ§€λ ₯μ μΈ μμμ λ΄λΉνλ μκ°μ λλ€. λ μλ₯Ό μ¬λ‘μ‘λ λμ λΆλ₯Ό λ§λμΈμ." | |
prompts["writer10"] = "λΉμ μ μλ²½ν κ²°λ§μ λ§λλ μκ°μ λλ€. λ μμκ² κΉμ μ¬μ΄μ λ¨κΈ°λ λ§λ¬΄λ¦¬λ₯Ό νμΈμ." | |
else: | |
prompts["writer1"] = "You are a writer responsible for the captivating beginning. Create an opening that hooks readers." | |
prompts["writer10"] = "You are a writer who creates the perfect ending. Create a conclusion that leaves readers with deep resonance." | |
# writer2-9λ κΈ°λ³Έ ν둬ννΈ μ¬μ© | |
for i in range(2, 10): | |
prompts[f"writer{i}"] = prompts["writer_base"] | |
return prompts | |
# --- λ©μΈ νλ‘μΈμ€ --- | |
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: | |
"""μμ€ μμ± μ€νΈλ¦¬λ° νλ‘μΈμ€ (κ°νλ λ‘μ§)""" | |
try: | |
resume_from_stage = 0 | |
if session_id: | |
self.current_session_id = session_id | |
session = NovelDatabase.get_session(session_id) | |
if session: | |
query = session['user_query'] | |
language = session['language'] | |
resume_from_stage = session['current_stage'] + 1 | |
logger.info(f"Resuming session {session_id} from stage {resume_from_stage}") | |
else: | |
self.current_session_id = NovelDatabase.create_session(query, language) | |
logger.info(f"Created new session: {self.current_session_id}") | |
stages = [] | |
if resume_from_stage > 0: | |
stages = [{ | |
"name": s['stage_name'], "status": s['status'], "content": s.get('content', ''), | |
"consistency_score": s.get('consistency_score', 0.0) | |
} for s in NovelDatabase.get_stages(self.current_session_id)] | |
for stage_idx in range(resume_from_stage, len(OPTIMIZED_STAGES)): | |
role, stage_name = OPTIMIZED_STAGES[stage_idx] | |
if stage_idx >= len(stages): | |
stages.append({"name": stage_name, "status": "active", "content": "", "consistency_score": 0.0}) | |
else: | |
stages[stage_idx]["status"] = "active" | |
yield "", stages, self.current_session_id | |
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) | |
stage_content = "" | |
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): | |
stage_content += chunk | |
stages[stage_idx]["content"] = stage_content | |
yield "", stages, self.current_session_id | |
consistency_score = 0.0 | |
if role.startswith("writer"): | |
writer_num = int(re.search(r'\d+', role).group()) | |
all_previous = self.get_all_content(stages, stage_idx) | |
errors = self.consistency_tracker.validate_consistency(writer_num, stage_content) | |
consistency_score = max(0, 10 - len(errors) * 2) | |
stages[stage_idx]["consistency_score"] = consistency_score | |
stages[stage_idx]["status"] = "complete" | |
NovelDatabase.save_stage( | |
self.current_session_id, stage_idx, stage_name, role, | |
stage_content, "complete", consistency_score | |
) | |
yield "", stages, self.current_session_id | |
final_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
final_report = self.generate_consistency_report(final_novel, language) | |
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) | |
yield f"β μμ€ μμ±! μ΄ {len(final_novel.split())}λ¨μ΄", stages, self.current_session_id | |
except Exception as e: | |
logger.error(f"μμ€ μμ± νλ‘μΈμ€ μ€λ₯: {e}", exc_info=True) | |
yield f"β μ€λ₯ λ°μ: {e}", stages if 'stages' in locals() else [], self.current_session_id | |
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str: | |
"""λ¨κ³λ³ ν둬ννΈ μμ± (μμ½ κΈ°λ₯ λ° μ£Όμ μ λ¬ κ°ν)""" | |
if stage_idx == 0: | |
return self.create_director_initial_prompt(query, language) | |
if stage_idx == 1: | |
return self.create_critic_director_prompt(stages[0]["content"], query, language) | |
if stage_idx == 2: | |
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language) | |
master_plan = stages[2]["content"] | |
if 3 <= stage_idx <= 12: # μκ° μ΄μ | |
writer_num = stage_idx - 2 | |
previous_content = self.get_all_content(stages, stage_idx) | |
summary = self.create_summary(previous_content, language) | |
return self.create_writer_prompt(writer_num, master_plan, summary, query, language) | |
if stage_idx == 13: # λΉνκ° μ€κ° κ²ν | |
all_content = self.get_all_content(stages, stage_idx) | |
return self.create_critic_consistency_prompt(all_content, query, language) | |
if 14 <= stage_idx <= 23: # μκ° μμ | |
writer_num = stage_idx - 13 | |
initial_content = stages[2 + writer_num]["content"] | |
feedback = stages[13]["content"] | |
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language) | |
if stage_idx == 24: # μ΅μ’ κ²ν | |
complete_novel = self.get_all_writer_content(stages) | |
return self.create_critic_final_prompt(complete_novel, language) | |
return "" | |
def create_summary(self, content: str, language: str) -> str: | |
"""LLMμ μ΄μ©ν΄ μ΄μ λ΄μ©μ μμ½""" | |
if not content.strip(): | |
return "μ΄μ λ΄μ©μ΄ μμ΅λλ€." if language == "Korean" else "No previous content." | |
prompt_text = "λ€μ μμ€ λ΄μ©μ 3~5κ°μ ν΅μ¬μ μΈ λ¬Έμ₯μΌλ‘ μμ½ν΄μ€. λ€μ μκ°κ° μ΄μΌκΈ°λ₯Ό μ΄μ΄κ°λ λ° νμν ν΅μ¬ μ 보(λ±μ₯μΈλ¬Όμ νμ¬ μν©, κ°μ , λ§μ§λ§ μ¬κ±΄)λ₯Ό ν¬ν¨ν΄μΌ ν΄." | |
if language != "Korean": | |
prompt_text = "Summarize the following novel content in 3-5 key sentences. Include crucial information for the next writer to continue the story (characters' current situation, emotions, and the last major event)." | |
summary_prompt = f"{prompt_text}\n\n---\n{content[-2000:]}" | |
try: | |
summary = self.call_llm_sync([{"role": "user", "content": summary_prompt}], "critic", language) | |
return summary | |
except Exception as e: | |
logger.error(f"μμ½ μμ± μ€ν¨: {e}") | |
return content[-1000:] | |
def get_all_content(self, stages: List[Dict], current_stage: int) -> str: | |
"""νμ¬κΉμ§μ λͺ¨λ λ΄μ© κ°μ Έμ€κΈ°""" | |
return "\n\n".join(s["content"] for i, s in enumerate(stages) if i < current_stage and s["content"]) | |
def get_all_writer_content(self, stages: List[Dict]) -> str: | |
"""λͺ¨λ μκ° μ΅μ’ μμ λ³Έ λ΄μ© κ°μ Έμ€κΈ°""" | |
return "\n\n".join(s["content"] for i, s in enumerate(stages) if 14 <= i <= 23 and s["content"]) | |
def generate_consistency_report(self, complete_novel: str, language: str) -> str: | |
"""μ΅μ’ λ³΄κ³ μ μμ± (LLM νΈμΆ)""" | |
prompt = self.create_critic_final_prompt(complete_novel, language) | |
try: | |
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language) | |
return report | |
except Exception as e: | |
logger.error(f"μ΅μ’ λ³΄κ³ μ μμ± μ€ν¨: {e}") | |
return "λ³΄κ³ μ μμ± μ€ μ€λ₯ λ°μ" | |
# --- μ νΈλ¦¬ν° ν¨μλ€ --- | |
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""λ©μΈ 쿼리 μ²λ¦¬ ν¨μ""" | |
if not query.strip(): | |
yield "", "", "β μ£Όμ λ₯Ό μ λ ₯ν΄μ£ΌμΈμ.", session_id | |
return | |
system = NovelWritingSystem() | |
stages_markdown = "" | |
novel_content = "" | |
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): | |
stages_markdown = format_stages_display(stages) | |
# μ΅μ’ μμ€ λ΄μ© κ°μ Έμ€κΈ° | |
if stages and all(s.get("status") == "complete" for s in stages[-10:]): | |
novel_content = NovelDatabase.get_writer_content(current_session_id) | |
novel_content = format_novel_display(novel_content) | |
yield stages_markdown, novel_content, status or "π μ²λ¦¬ μ€...", current_session_id | |
def get_active_sessions(language: str) -> List[str]: | |
"""νμ± μΈμ λͺ©λ‘ κ°μ Έμ€κΈ°""" | |
sessions = NovelDatabase.get_active_sessions() | |
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']})" | |
for s in sessions] | |
def auto_recover_session(language: str) -> Tuple[Optional[str], str]: | |
"""κ°μ₯ μ΅κ·Ό νμ± μΈμ μλ 볡ꡬ""" | |
latest_session = NovelDatabase.get_latest_active_session() | |
if latest_session: | |
return latest_session['session_id'], f"μΈμ {latest_session['session_id'][:8]}... 볡ꡬλ¨" | |
return None, "볡ꡬν μΈμ μ΄ μμ΅λλ€." | |
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""μΈμ μ¬κ° ν¨μ""" | |
if not session_id: | |
yield "", "", "β μΈμ IDκ° μμ΅λλ€.", session_id | |
return | |
# λλ‘λ€μ΄μμ μΈμ ID μΆμΆ | |
if "..." in session_id: | |
session_id = session_id.split("...")[0] | |
session = NovelDatabase.get_session(session_id) | |
if not session: | |
yield "", "", "β μΈμ μ μ°Ύμ μ μμ΅λλ€.", None | |
return | |
# process_queryλ₯Ό ν΅ν΄ μ¬κ° | |
yield from process_query(session['user_query'], session['language'], session_id) | |
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: | |
"""μμ€ λ€μ΄λ‘λ νμΌ μμ±""" | |
if not novel_text or not session_id: | |
return None | |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
filename = f"novel_{session_id[:8]}_{timestamp}" | |
try: | |
if format_type == "DOCX" and DOCX_AVAILABLE: | |
return export_to_docx(novel_text, filename, language) | |
else: | |
return export_to_txt(novel_text, filename) | |
except Exception as e: | |
logger.error(f"νμΌ μμ± μ€ν¨: {e}") | |
return None | |
def format_stages_display(stages: List[Dict]) -> str: | |
"""λ¨κ³λ³ μ§ν μν© λ§ν¬λ€μ΄ ν¬λ§·ν """ | |
markdown = "## π¬ μ§ν μν©\n\n" | |
for i, stage in enumerate(stages): | |
status_icon = "β " if stage['status'] == 'complete' else "π" if stage['status'] == 'active' else "β³" | |
markdown += f"{status_icon} **{stage['name']}**" | |
if stage.get('consistency_score', 0) > 0: | |
markdown += f" (μΌκ΄μ±: {stage['consistency_score']:.1f}/10)" | |
markdown += "\n" | |
if stage['content']: | |
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content'] | |
markdown += f"> {preview}\n\n" | |
return markdown | |
def format_novel_display(novel_text: str) -> str: | |
"""μμ€ λ΄μ© λ§ν¬λ€μ΄ ν¬λ§·ν """ | |
if not novel_text: | |
return "μμ§ μμ±λ λ΄μ©μ΄ μμ΅λλ€." | |
# νμ΄μ§ κ΅¬λΆ μΆκ° | |
pages = novel_text.split('\n\n') | |
formatted = "# π μμ±λ μμ€\n\n" | |
for i, page in enumerate(pages): | |
if page.strip(): | |
formatted += f"### νμ΄μ§ {i+1}\n\n{page}\n\n---\n\n" | |
return formatted | |
def export_to_docx(content: str, filename: str, language: str) -> str: | |
"""DOCX νμΌλ‘ λ΄λ³΄λ΄κΈ°""" | |
doc = Document() | |
# μ λͺ© μΆκ° | |
title = doc.add_heading('AI νμ μμ€', 0) | |
title.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
# λ©νλ°μ΄ν° | |
doc.add_paragraph(f"μμ±μΌ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") | |
doc.add_paragraph(f"μΈμ΄: {language}") | |
doc.add_page_break() | |
# λ³Έλ¬Έ μΆκ° | |
paragraphs = content.split('\n\n') | |
for para in paragraphs: | |
if para.strip(): | |
doc.add_paragraph(para.strip()) | |
# νμΌ μ μ₯ | |
filepath = f"{filename}.docx" | |
doc.save(filepath) | |
return filepath | |
def export_to_txt(content: str, filename: str) -> str: | |
"""TXT νμΌλ‘ λ΄λ³΄λ΄κΈ°""" | |
filepath = f"{filename}.txt" | |
with open(filepath, 'w', encoding='utf-8') as f: | |
f.write(content) | |
return filepath | |
# CSS μ€νμΌ | |
custom_css = """ | |
.gradio-container { | |
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); | |
min-height: 100vh; | |
} | |
.main-header { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 30px; | |
border-radius: 12px; | |
margin-bottom: 30px; | |
text-align: center; | |
color: white; | |
} | |
.input-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 20px; | |
border-radius: 12px; | |
margin-bottom: 20px; | |
} | |
.session-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
color: white; | |
} | |
#stages-display { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 20px; | |
border-radius: 12px; | |
max-height: 600px; | |
overflow-y: auto; | |
} | |
#novel-output { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 30px; | |
border-radius: 12px; | |
max-height: 400px; | |
overflow-y: auto; | |
} | |
.download-section { | |
background-color: rgba(255, 255, 255, 0.9); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
} | |
""" | |
# Gradio μΈν°νμ΄μ€ μμ± | |
def create_interface(): | |
with gr.Blocks(css=custom_css, title="AI νμ μμ€ μμ± μμ€ν ") as interface: | |
gr.HTML(""" | |
<div class="main-header"> | |
<h1 style="font-size: 2.5em; margin-bottom: 10px;"> | |
π AI νμ μμ€ μμ± μμ€ν | |
</h1> | |
<h3 style="color: #ccc; margin-bottom: 20px;"> | |
μΌκ΄μ± μ€μ¬μ μ°½μμ μμ€ μμ± | |
</h3> | |
<p style="font-size: 1.1em; color: #ddd; max-width: 800px; margin: 0 auto;"> | |
μ£Όμ λ₯Ό μ λ ₯νλ©΄ AI μμ΄μ νΈλ€μ΄ νμ νμ¬ 30νμ΄μ§ λΆλμ μμ±λ μμ€μ μμ±ν©λλ€. | |
<br> | |
κ°λ μ 1λͺ , λΉνκ° 1λͺ , μκ° 10λͺ μ΄ ν¨κ» μμ νλ©° μΌκ΄μ±μ μ μ§ν©λλ€. | |
</p> | |
</div> | |
""") | |
# μν κ΄λ¦¬ | |
current_session_id = gr.State(None) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
with gr.Group(elem_classes=["input-section"]): | |
query_input = gr.Textbox( | |
label="μμ€ μ£Όμ / Novel Theme", | |
placeholder="μμ€μ μ£Όμ λ μ΄κΈ° μμ΄λμ΄λ₯Ό μ λ ₯νμΈμ...\nEnter your novel theme or initial idea...", | |
lines=4 | |
) | |
language_select = gr.Radio( | |
choices=["English", "Korean"], | |
value="English", | |
label="μΈμ΄ / Language" | |
) | |
with gr.Row(): | |
submit_btn = gr.Button("π μμ€ μμ± μμ", variant="primary", scale=2) | |
clear_btn = gr.Button("ποΈ μ΄κΈ°ν", scale=1) | |
status_text = gr.Textbox( | |
label="μν", | |
interactive=False, | |
value="π μ€λΉ μλ£" | |
) | |
# μΈμ κ΄λ¦¬ | |
with gr.Group(elem_classes=["session-section"]): | |
gr.Markdown("### πΎ μ΄μ μΈμ μ¬κ°") | |
session_dropdown = gr.Dropdown( | |
label="μΈμ μ ν", | |
choices=[], | |
interactive=True | |
) | |
with gr.Row(): | |
refresh_btn = gr.Button("π λͺ©λ‘ μλ‘κ³ μΉ¨", scale=1) | |
resume_btn = gr.Button("βΆοΈ μ ν μ¬κ°", variant="secondary", scale=1) | |
auto_recover_btn = gr.Button("β»οΈ μλ 볡ꡬ", scale=1) | |
with gr.Column(scale=2): | |
with gr.Tab("π μμ± κ³Όμ "): | |
stages_display = gr.Markdown( | |
value="μμ± κ³Όμ μ΄ μ¬κΈ°μ νμλ©λλ€...", | |
elem_id="stages-display" | |
) | |
with gr.Tab("π μμ±λ μμ€"): | |
novel_output = gr.Markdown( | |
value="μμ±λ μμ€μ΄ μ¬κΈ°μ νμλ©λλ€...", | |
elem_id="novel-output" | |
) | |
with gr.Group(elem_classes=["download-section"]): | |
gr.Markdown("### π₯ μμ€ λ€μ΄λ‘λ") | |
with gr.Row(): | |
format_select = gr.Radio( | |
choices=["DOCX", "TXT"], | |
value="DOCX" if DOCX_AVAILABLE else "TXT", | |
label="νμ" | |
) | |
download_btn = gr.Button("β¬οΈ λ€μ΄λ‘λ", variant="secondary") | |
download_file = gr.File( | |
label="λ€μ΄λ‘λλ νμΌ", | |
visible=False | |
) | |
# μ¨κ²¨μ§ μν | |
novel_text_state = gr.State("") | |
# μμ | |
with gr.Row(): | |
gr.Examples( | |
examples=[ | |
["λ―Έλ λμμμ κΈ°μ΅μ κ±°λνλ μμΈμ μ΄μΌκΈ°"], | |
["μκ°μ΄ κ±°κΎΈλ‘ νλ₯΄λ λ§μμ λ―Έμ€ν°λ¦¬"], | |
["A scientist discovers a portal to parallel universes"], | |
["In a world where dreams can be traded, a dream thief's story"], | |
["Two AI entities fall in love while preventing a cyber war"], | |
["μ± μμΌλ‘ λ€μ΄κ° μ μλ λ₯λ ₯μ κ°μ§ μ¬μμ λͺ¨ν"] | |
], | |
inputs=query_input, | |
label="π‘ μμ μ£Όμ " | |
) | |
# μ΄λ²€νΈ νΈλ€λ¬ | |
def refresh_sessions(): | |
try: | |
sessions = get_active_sessions("English") | |
return gr.update(choices=sessions) | |
except Exception as e: | |
logger.error(f"Error refreshing sessions: {str(e)}") | |
return gr.update(choices=[]) | |
def handle_auto_recover(language): | |
session_id, message = auto_recover_session(language) | |
return session_id | |
# μ΄λ²€νΈ μ°κ²° | |
submit_btn.click( | |
fn=process_query, | |
inputs=[query_input, language_select, current_session_id], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
novel_output.change( | |
fn=lambda x: x, | |
inputs=[novel_output], | |
outputs=[novel_text_state] | |
) | |
resume_btn.click( | |
fn=lambda x: x, | |
inputs=[session_dropdown], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
auto_recover_btn.click( | |
fn=handle_auto_recover, | |
inputs=[language_select], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
refresh_btn.click( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
clear_btn.click( | |
fn=lambda: ("", "", "π μ€λΉ μλ£", "", None), | |
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] | |
) | |
def handle_download(format_type, language, session_id, novel_text): | |
if not session_id: | |
return gr.update(visible=False) | |
file_path = download_novel(novel_text, format_type, language, session_id) | |
if file_path: | |
return gr.update(value=file_path, visible=True) | |
else: | |
return gr.update(visible=False) | |
download_btn.click( | |
fn=handle_download, | |
inputs=[format_select, language_select, current_session_id, novel_text_state], | |
outputs=[download_file] | |
) | |
# μμ μ μΈμ λ‘λ | |
interface.load( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
return interface | |
# λ©μΈ μ€ν | |
if __name__ == "__main__": | |
logger.info("AI νμ μμ€ μμ± μμ€ν μμ...") | |
logger.info("=" * 60) | |
# νκ²½ νμΈ | |
logger.info(f"API μλν¬μΈνΈ: {API_URL}") | |
if BRAVE_SEARCH_API_KEY: | |
logger.info("μΉ κ²μμ΄ νμ±νλμμ΅λλ€.") | |
else: | |
logger.warning("μΉ κ²μμ΄ λΉνμ±νλμμ΅λλ€.") | |
if DOCX_AVAILABLE: | |
logger.info("DOCX λ΄λ³΄λ΄κΈ°κ° νμ±νλμμ΅λλ€.") | |
else: | |
logger.warning("DOCX λ΄λ³΄λ΄κΈ°κ° λΉνμ±νλμμ΅λλ€.") | |
logger.info("=" * 60) | |
# λ°μ΄ν°λ² μ΄μ€ μ΄κΈ°ν | |
logger.info("λ°μ΄ν°λ² μ΄μ€ μ΄κΈ°ν μ€...") | |
NovelDatabase.init_db() | |
logger.info("λ°μ΄ν°λ² μ΄μ€ μ΄κΈ°ν μλ£.") | |
# μΈν°νμ΄μ€ μμ± λ° μ€ν | |
interface = create_interface() | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=True | |
) |