openfree's picture
Update app.py
92b4aa6 verified
raw
history blame
83.8 kB
import gradio as gr
import os
import json
import requests
from datetime import datetime
import time
from typing import List, Dict, Any, Generator, Tuple, Optional, Set
import logging
import re
import tempfile
from pathlib import Path
import sqlite3
import hashlib
import threading
from contextlib import contextmanager
from dataclasses import dataclass, field, asdict
from collections import defaultdict
# --- ๋กœ๊น… ์„ค์ • ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# --- Document export imports ---
try:
from docx import Document
from docx.shared import Inches, Pt, RGBColor, Mm
from docx.enum.text import WD_ALIGN_PARAGRAPH
from docx.enum.style import WD_STYLE_TYPE
from docx.oxml.ns import qn
from docx.oxml import OxmlElement
DOCX_AVAILABLE = True
except ImportError:
DOCX_AVAILABLE = False
logger.warning("python-docx not installed. DOCX export will be disabled.")
# --- ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๋ฐ ์ƒ์ˆ˜ ---
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "")
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "")
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions"
MODEL_ID = "dep89a2fld32mcm"
DB_PATH = "novel_sessions_v6.db"
# ๋ชฉํ‘œ ๋ถ„๋Ÿ‰ ์„ค์ •
TARGET_WORDS = 8000 # ์•ˆ์ „ ๋งˆ์ง„์„ ์œ„ํ•ด 8000๋‹จ์–ด
MIN_WORDS_PER_PART = 800 # ๊ฐ ํŒŒํŠธ ์ตœ์†Œ ๋ถ„๋Ÿ‰
# --- ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๊ฒ€์ฆ ---
if not FRIENDLI_TOKEN:
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.")
FRIENDLI_TOKEN = "dummy_token_for_testing"
if not BRAVE_SEARCH_API_KEY:
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.")
# --- ์ „์—ญ ๋ณ€์ˆ˜ ---
db_lock = threading.Lock()
# ์„œ์‚ฌ ์ง„ํ–‰ ๋‹จ๊ณ„ ์ •์˜
NARRATIVE_PHASES = [
"๋„์ž…: ์ผ์ƒ๊ณผ ๊ท ์—ด",
"๋ฐœ์ „ 1: ๋ถˆ์•ˆ์˜ ๊ณ ์กฐ",
"๋ฐœ์ „ 2: ์™ธ๋ถ€ ์ถฉ๊ฒฉ",
"๋ฐœ์ „ 3: ๋‚ด์  ๊ฐˆ๋“ฑ ์‹ฌํ™”",
"์ ˆ์ • 1: ์œ„๊ธฐ์˜ ์ •์ ",
"์ ˆ์ • 2: ์„ ํƒ์˜ ์ˆœ๊ฐ„",
"ํ•˜๊ฐ• 1: ๊ฒฐ๊ณผ์™€ ์—ฌํŒŒ",
"ํ•˜๊ฐ• 2: ์ƒˆ๋กœ์šด ์ธ์‹",
"๊ฒฐ๋ง 1: ๋ณ€ํ™”๋œ ์ผ์ƒ",
"๊ฒฐ๋ง 2: ์—ด๋ฆฐ ์งˆ๋ฌธ"
]
# ์ƒˆ๋กœ์šด ๋‹จ๊ณ„๋ณ„ ๊ตฌ์„ฑ - ๋‹จ์ผ ์ž‘๊ฐ€ ์‹œ์Šคํ…œ
UNIFIED_STAGES = [
("director", "๐ŸŽฌ ๊ฐ๋…์ž: ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ ๊ธฐํš"),
("critic_director", "๐Ÿ“ ๋น„ํ‰๊ฐ€: ์„œ์‚ฌ ๊ตฌ์กฐ ์‹ฌ์ธต ๊ฒ€ํ† "),
("director", "๐ŸŽฌ ๊ฐ๋…์ž: ์ตœ์ข… ๋งˆ์Šคํ„ฐํ”Œ๋žœ"),
] + [
item for i in range(1, 11)
for item in [
("writer", f"โœ๏ธ ์ž‘๊ฐ€: ํŒŒํŠธ {i} - {NARRATIVE_PHASES[i-1]}"),
(f"critic_part{i}", f"๐Ÿ“ ํŒŒํŠธ {i} ๋น„ํ‰๊ฐ€: ์ฆ‰์‹œ ๊ฒ€ํ†  ๋ฐ ์ˆ˜์ • ์š”์ฒญ"),
("writer", f"โœ๏ธ ์ž‘๊ฐ€: ํŒŒํŠธ {i} ์ˆ˜์ •๋ณธ")
]
] + [
("critic_final", "๐Ÿ“ ์ตœ์ข… ๋น„ํ‰๊ฐ€: ์ข…ํ•ฉ ํ‰๊ฐ€ ๋ฐ ๋ฌธํ•™์  ์„ฑ์ทจ๋„"),
]
# --- ๋ฐ์ดํ„ฐ ํด๋ž˜์Šค ---
@dataclass
class StoryBible:
"""์ „์ฒด ์ด์•ผ๊ธฐ์˜ ์ผ๊ด€์„ฑ์„ ์œ ์ง€ํ•˜๋Š” ์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ”"""
characters: Dict[str, Dict[str, Any]] = field(default_factory=dict)
settings: Dict[str, str] = field(default_factory=dict)
timeline: List[Dict[str, Any]] = field(default_factory=list)
plot_points: List[Dict[str, Any]] = field(default_factory=list)
themes: List[str] = field(default_factory=list)
symbols: Dict[str, List[str]] = field(default_factory=dict)
style_guide: Dict[str, str] = field(default_factory=dict)
opening_sentence: str = "" # ์ฒซ๋ฌธ์žฅ ์ถ”๊ฐ€
@dataclass
class PartCritique:
"""๊ฐ ํŒŒํŠธ๋ณ„ ๋น„ํ‰ ๋‚ด์šฉ"""
part_number: int
continuity_issues: List[str] = field(default_factory=list)
character_consistency: List[str] = field(default_factory=list)
plot_progression: List[str] = field(default_factory=list)
thematic_alignment: List[str] = field(default_factory=list)
technical_issues: List[str] = field(default_factory=list)
strengths: List[str] = field(default_factory=list)
required_changes: List[str] = field(default_factory=list)
literary_quality: List[str] = field(default_factory=list) # ๋ฌธํ•™์„ฑ ํ‰๊ฐ€ ์ถ”๊ฐ€
# --- ํ•ต์‹ฌ ๋กœ์ง ํด๋ž˜์Šค ---
class UnifiedNarrativeTracker:
"""๋‹จ์ผ ์ž‘๊ฐ€ ์‹œ์Šคํ…œ์„ ์œ„ํ•œ ํ†ตํ•ฉ ์„œ์‚ฌ ์ถ”์ ๊ธฐ"""
def __init__(self):
self.story_bible = StoryBible()
self.part_critiques: Dict[int, PartCritique] = {}
self.accumulated_content: List[str] = []
self.word_count_by_part: Dict[int, int] = {}
self.revision_history: Dict[int, List[str]] = defaultdict(list)
self.causal_chains: List[Dict[str, Any]] = []
self.narrative_momentum: float = 0.0
def update_story_bible(self, element_type: str, key: str, value: Any):
"""์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ” ์—…๋ฐ์ดํŠธ"""
if element_type == "character":
self.story_bible.characters[key] = value
elif element_type == "setting":
self.story_bible.settings[key] = value
elif element_type == "timeline":
self.story_bible.timeline.append({"event": key, "details": value})
elif element_type == "theme":
if key not in self.story_bible.themes:
self.story_bible.themes.append(key)
elif element_type == "symbol":
if key not in self.story_bible.symbols:
self.story_bible.symbols[key] = []
self.story_bible.symbols[key].append(value)
def add_part_critique(self, part_number: int, critique: PartCritique):
"""ํŒŒํŠธ๋ณ„ ๋น„ํ‰ ์ถ”๊ฐ€"""
self.part_critiques[part_number] = critique
def check_continuity(self, current_part: int, new_content: str) -> List[str]:
"""์—ฐ์†์„ฑ ๊ฒ€์‚ฌ"""
issues = []
# ์บ๋ฆญํ„ฐ ์ผ๊ด€์„ฑ ์ฒดํฌ
for char_name, char_data in self.story_bible.characters.items():
if char_name in new_content:
# ์บ๋ฆญํ„ฐ ํŠน์„ฑ์ด ์œ ์ง€๋˜๋Š”์ง€ ํ™•์ธ
if "traits" in char_data:
for trait in char_data["traits"]:
if trait.get("abandoned", False):
issues.append(f"{char_name}์˜ ๋ฒ„๋ ค์ง„ ํŠน์„ฑ '{trait['name']}'์ด ๋‹ค์‹œ ๋‚˜ํƒ€๋‚จ")
# ์‹œ๊ฐ„์„  ์ผ๊ด€์„ฑ ์ฒดํฌ
if len(self.story_bible.timeline) > 0:
last_event = self.story_bible.timeline[-1]
# ์‹œ๊ฐ„ ์—ญํ–‰ ์ฒดํฌ ๋“ฑ
# ์ธ๊ณผ๊ด€๊ณ„ ์ฒดํฌ
if current_part > 1 and not any(kw in new_content for kw in
['๋•Œ๋ฌธ์—', '๊ทธ๋ž˜์„œ', '๊ฒฐ๊ณผ', '์ด๋กœ ์ธํ•ด', 'because', 'therefore']):
issues.append("์ด์ „ ํŒŒํŠธ์™€์˜ ์ธ๊ณผ๊ด€๊ณ„๊ฐ€ ๋ถˆ๋ช…ํ™•ํ•จ")
return issues
def calculate_narrative_momentum(self, part_number: int, content: str) -> float:
"""์„œ์‚ฌ์  ์ถ”์ง„๋ ฅ ๊ณ„์‚ฐ"""
momentum = 5.0
# ์ƒˆ๋กœ์šด ์š”์†Œ ๋„์ž…
new_elements = len(set(content.split()) - set(' '.join(self.accumulated_content).split()))
if new_elements > 100:
momentum += 2.0
# ๊ฐˆ๋“ฑ์˜ ๊ณ ์กฐ
tension_words = ['์œ„๊ธฐ', '๊ฐˆ๋“ฑ', '์ถฉ๋Œ', '๋Œ€๋ฆฝ', 'crisis', 'conflict']
if any(word in content for word in tension_words):
momentum += 1.5
# ์ธ๊ณผ๊ด€๊ณ„ ๋ช…ํ™•์„ฑ
causal_words = ['๋•Œ๋ฌธ์—', '๊ทธ๋ž˜์„œ', '๋”ฐ๋ผ์„œ', 'because', 'therefore']
causal_count = sum(1 for word in causal_words if word in content)
momentum += min(causal_count * 0.5, 2.0)
# ๋ฐ˜๋ณต ํŽ˜๋„ํ‹ฐ
if part_number > 1:
prev_content = self.accumulated_content[-1] if self.accumulated_content else ""
overlap = len(set(content.split()) & set(prev_content.split()))
if overlap > len(content.split()) * 0.3:
momentum -= 3.0
return max(0.0, min(10.0, momentum))
class NovelDatabase:
"""๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ๊ด€๋ฆฌ - ๋‹จ์ผ ์ž‘๊ฐ€ ์‹œ์Šคํ…œ์šฉ์œผ๋กœ ์ˆ˜์ •"""
@staticmethod
def init_db():
with sqlite3.connect(DB_PATH) as conn:
conn.execute("PRAGMA journal_mode=WAL")
cursor = conn.cursor()
# ๋ฉ”์ธ ์„ธ์…˜ ํ…Œ์ด๋ธ”
cursor.execute('''
CREATE TABLE IF NOT EXISTS sessions (
session_id TEXT PRIMARY KEY,
user_query TEXT NOT NULL,
language TEXT NOT NULL,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
status TEXT DEFAULT 'active',
current_stage INTEGER DEFAULT 0,
final_novel TEXT,
literary_report TEXT,
total_words INTEGER DEFAULT 0,
story_bible TEXT,
narrative_tracker TEXT,
opening_sentence TEXT
)
''')
# ์Šคํ…Œ์ด์ง€ ํ…Œ์ด๋ธ”
cursor.execute('''
CREATE TABLE IF NOT EXISTS stages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
stage_number INTEGER NOT NULL,
stage_name TEXT NOT NULL,
role TEXT NOT NULL,
content TEXT,
word_count INTEGER DEFAULT 0,
status TEXT DEFAULT 'pending',
narrative_momentum REAL DEFAULT 0.0,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id),
UNIQUE(session_id, stage_number)
)
''')
# ๋น„ํ‰ ํ…Œ์ด๋ธ”
cursor.execute('''
CREATE TABLE IF NOT EXISTS critiques (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
part_number INTEGER NOT NULL,
critique_data TEXT,
created_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id)
)
''')
conn.commit()
@staticmethod
@contextmanager
def get_db():
with db_lock:
conn = sqlite3.connect(DB_PATH, timeout=30.0)
conn.row_factory = sqlite3.Row
try:
yield conn
finally:
conn.close()
@staticmethod
def create_session(user_query: str, language: str) -> str:
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest()
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)',
(session_id, user_query, language)
)
conn.commit()
return session_id
@staticmethod
def save_stage(session_id: str, stage_number: int, stage_name: str,
role: str, content: str, status: str = 'complete',
narrative_momentum: float = 0.0):
word_count = len(content.split()) if content else 0
with NovelDatabase.get_db() as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO stages (session_id, stage_number, stage_name, role, content,
word_count, status, narrative_momentum)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id, stage_number)
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?,
narrative_momentum=?, updated_at=datetime('now')
''', (session_id, stage_number, stage_name, role, content, word_count,
status, narrative_momentum, content, word_count, status, stage_name,
narrative_momentum))
# ์ด ๋‹จ์–ด ์ˆ˜ ์—…๋ฐ์ดํŠธ
cursor.execute('''
UPDATE sessions
SET total_words = (
SELECT SUM(word_count)
FROM stages
WHERE session_id = ? AND role = 'writer' AND content IS NOT NULL
),
updated_at = datetime('now'),
current_stage = ?
WHERE session_id = ?
''', (session_id, stage_number, session_id))
conn.commit()
@staticmethod
def save_critique(session_id: str, part_number: int, critique: PartCritique):
"""๋น„ํ‰ ์ €์žฅ"""
with NovelDatabase.get_db() as conn:
critique_json = json.dumps(asdict(critique))
conn.cursor().execute(
'INSERT INTO critiques (session_id, part_number, critique_data) VALUES (?, ?, ?)',
(session_id, part_number, critique_json)
)
conn.commit()
@staticmethod
def save_opening_sentence(session_id: str, opening_sentence: str):
"""์ฒซ๋ฌธ์žฅ ์ €์žฅ"""
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
'UPDATE sessions SET opening_sentence = ? WHERE session_id = ?',
(opening_sentence, session_id)
)
conn.commit()
@staticmethod
def get_writer_content(session_id: str) -> str:
"""์ž‘๊ฐ€ ์ฝ˜ํ…์ธ  ๊ฐ€์ ธ์˜ค๊ธฐ - ๋ชจ๋“  ์ˆ˜์ •๋ณธ ํ†ตํ•ฉ"""
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute('''
SELECT content FROM stages
WHERE session_id = ? AND role = 'writer'
AND stage_name LIKE '%์ˆ˜์ •๋ณธ%'
ORDER BY stage_number
''', (session_id,)).fetchall()
if rows:
return '\n\n'.join(row['content'] for row in rows if row['content'])
else:
# ์ˆ˜์ •๋ณธ์ด ์—†์œผ๋ฉด ์ดˆ์•ˆ๋“ค์„ ์‚ฌ์šฉ
rows = conn.cursor().execute('''
SELECT content FROM stages
WHERE session_id = ? AND role = 'writer'
AND stage_name NOT LIKE '%์ˆ˜์ •๋ณธ%'
ORDER BY stage_number
''', (session_id,)).fetchall()
return '\n\n'.join(row['content'] for row in rows if row['content'])
@staticmethod
def save_narrative_tracker(session_id: str, tracker: UnifiedNarrativeTracker):
"""ํ†ตํ•ฉ ์„œ์‚ฌ ์ถ”์ ๊ธฐ ์ €์žฅ"""
with NovelDatabase.get_db() as conn:
tracker_data = json.dumps({
'story_bible': asdict(tracker.story_bible),
'part_critiques': {k: asdict(v) for k, v in tracker.part_critiques.items()},
'word_count_by_part': tracker.word_count_by_part,
'causal_chains': tracker.causal_chains,
'narrative_momentum': tracker.narrative_momentum
})
conn.cursor().execute(
'UPDATE sessions SET narrative_tracker = ? WHERE session_id = ?',
(tracker_data, session_id)
)
conn.commit()
@staticmethod
def load_narrative_tracker(session_id: str) -> Optional[UnifiedNarrativeTracker]:
"""ํ†ตํ•ฉ ์„œ์‚ฌ ์ถ”์ ๊ธฐ ๋กœ๋“œ"""
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute(
'SELECT narrative_tracker FROM sessions WHERE session_id = ?',
(session_id,)
).fetchone()
if row and row['narrative_tracker']:
data = json.loads(row['narrative_tracker'])
tracker = UnifiedNarrativeTracker()
# ์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ” ๋ณต์›
bible_data = data.get('story_bible', {})
tracker.story_bible = StoryBible(**bible_data)
# ๋น„ํ‰ ๋ณต์›
for part_num, critique_data in data.get('part_critiques', {}).items():
tracker.part_critiques[int(part_num)] = PartCritique(**critique_data)
tracker.word_count_by_part = data.get('word_count_by_part', {})
tracker.causal_chains = data.get('causal_chains', [])
tracker.narrative_momentum = data.get('narrative_momentum', 0.0)
return tracker
return None
# ๊ธฐ์กด ๋ฉ”์„œ๋“œ๋“ค ์œ ์ง€
@staticmethod
def get_session(session_id: str) -> Optional[Dict]:
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?',
(session_id,)).fetchone()
return dict(row) if row else None
@staticmethod
def get_stages(session_id: str) -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute(
'SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number',
(session_id,)
).fetchall()
return [dict(row) for row in rows]
@staticmethod
def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""):
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
'''UPDATE sessions SET final_novel = ?, status = 'complete',
updated_at = datetime('now'), literary_report = ? WHERE session_id = ?''',
(final_novel, literary_report, session_id)
)
conn.commit()
@staticmethod
def get_active_sessions() -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute(
'''SELECT session_id, user_query, language, created_at, current_stage, total_words
FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10'''
).fetchall()
return [dict(row) for row in rows]
@staticmethod
def get_total_words(session_id: str) -> int:
"""์ด ๋‹จ์–ด ์ˆ˜ ๊ฐ€์ ธ์˜ค๊ธฐ"""
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute(
'SELECT total_words FROM sessions WHERE session_id = ?',
(session_id,)
).fetchone()
return row['total_words'] if row and row['total_words'] else 0
class WebSearchIntegration:
"""์›น ๊ฒ€์ƒ‰ ๊ธฐ๋Šฅ"""
def __init__(self):
self.brave_api_key = BRAVE_SEARCH_API_KEY
self.search_url = "https://api.search.brave.com/res/v1/web/search"
self.enabled = bool(self.brave_api_key)
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]:
if not self.enabled:
return []
headers = {
"Accept": "application/json",
"X-Subscription-Token": self.brave_api_key
}
params = {
"q": query,
"count": count,
"search_lang": "ko" if language == "Korean" else "en",
"text_decorations": False,
"safesearch": "moderate"
}
try:
response = requests.get(self.search_url, headers=headers, params=params, timeout=10)
response.raise_for_status()
results = response.json().get("web", {}).get("results", [])
return results
except requests.exceptions.RequestException as e:
logger.error(f"์›น ๊ฒ€์ƒ‰ API ์˜ค๋ฅ˜: {e}")
return []
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str:
if not results:
return ""
extracted = []
total_chars = 0
for i, result in enumerate(results[:3], 1):
title = result.get("title", "")
description = result.get("description", "")
info = f"[{i}] {title}: {description}"
if total_chars + len(info) < max_chars:
extracted.append(info)
total_chars += len(info)
else:
break
return "\n".join(extracted)
class UnifiedLiterarySystem:
"""๋‹จ์ผ ์ž‘๊ฐ€ ์ง„ํ–‰ํ˜• ๋ฌธํ•™ ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ"""
def __init__(self):
self.token = FRIENDLI_TOKEN
self.api_url = API_URL
self.model_id = MODEL_ID
self.narrative_tracker = UnifiedNarrativeTracker()
self.web_search = WebSearchIntegration()
self.current_session_id = None
NovelDatabase.init_db()
def create_headers(self):
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"}
# --- ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ ํ•จ์ˆ˜๋“ค ---
def augment_query(self, user_query: str, language: str) -> str:
"""ํ”„๋กฌํ”„ํŠธ ์ฆ๊ฐ•"""
if len(user_query.split()) < 15:
augmented_template = {
"Korean": f"""'{user_query}'
**์„œ์‚ฌ ๊ตฌ์กฐ ํ•ต์‹ฌ:**
- 10๊ฐœ ํŒŒํŠธ๊ฐ€ ํ•˜๋‚˜์˜ ํ†ตํ•ฉ๋œ ์ด์•ผ๊ธฐ๋ฅผ ๊ตฌ์„ฑ
- ๊ฐ ํŒŒํŠธ๋Š” ์ด์ „ ํŒŒํŠธ์˜ ํ•„์—ฐ์  ๊ฒฐ๊ณผ
- ์ธ๋ฌผ์˜ ๋ช…ํ™•ํ•œ ๋ณ€ํ™” ๊ถค์  (A โ†’ B โ†’ C)
- ์ค‘์‹ฌ ๊ฐˆ๋“ฑ์˜ ์ ์ง„์  ๊ณ ์กฐ์™€ ํ•ด๊ฒฐ
- ๊ฐ•๋ ฌํ•œ ์ค‘์‹ฌ ์ƒ์ง•์˜ ์˜๋ฏธ ๋ณ€ํ™”""",
"English": f"""'{user_query}'
**Narrative Structure Core:**
- 10 parts forming one integrated story
- Each part as inevitable result of previous
- Clear character transformation arc (A โ†’ B โ†’ C)
- Progressive escalation and resolution of central conflict
- Evolving meaning of powerful central symbol"""
}
return augmented_template.get(language, user_query)
return user_query
def generate_powerful_opening(self, user_query: str, language: str) -> str:
"""์ฃผ์ œ์— ๋งž๋Š” ๊ฐ•๋ ฌํ•œ ์ฒซ๋ฌธ์žฅ ์ƒ์„ฑ"""
opening_prompt = {
"Korean": f"""์ฃผ์ œ: {user_query}
์ด ์ฃผ์ œ์— ๋Œ€ํ•œ ๊ฐ•๋ ฌํ•˜๊ณ  ์žŠ์„ ์ˆ˜ ์—†๋Š” ์ฒซ๋ฌธ์žฅ์„ ์ƒ์„ฑํ•˜์„ธ์š”.
**์ฒซ๋ฌธ์žฅ ์ž‘์„ฑ ์›์น™:**
1. ์ฆ‰๊ฐ์ ์ธ ๊ธด์žฅ๊ฐ์ด๋‚˜ ๊ถ๊ธˆ์ฆ ์œ ๋ฐœ
2. ํ‰๋ฒ”ํ•˜์ง€ ์•Š์€ ์‹œ๊ฐ์ด๋‚˜ ์ƒํ™ฉ ์ œ์‹œ
3. ๊ฐ๊ฐ์ ์ด๊ณ  ๊ตฌ์ฒด์ ์ธ ์ด๋ฏธ์ง€
4. ์ฒ ํ•™์  ์งˆ๋ฌธ์ด๋‚˜ ์—ญ์„ค์  ์ง„์ˆ 
5. ์‹œ๊ฐ„๊ณผ ๊ณต๊ฐ„์˜ ๋…ํŠนํ•œ ์„ค์ •
**ํ›Œ๋ฅญํ•œ ์ฒซ๋ฌธ์žฅ์˜ ์˜ˆ์‹œ ํŒจํ„ด:**
- "๊ทธ๊ฐ€ ์ฃฝ์€ ๋‚ , ..." (์ถฉ๊ฒฉ์  ์‚ฌ๊ฑด)
- "๋ชจ๋“  ๊ฒƒ์ด ๋๋‚ฌ๋‹ค๊ณ  ์ƒ๊ฐํ•œ ์ˆœ๊ฐ„..." (๋ฐ˜์ „ ์˜ˆ๊ณ )
- "์„ธ์ƒ์—์„œ ๊ฐ€์žฅ [ํ˜•์šฉ์‚ฌ]ํ•œ [๋ช…์‚ฌ]๋Š”..." (๋…ํŠนํ•œ ์ •์˜)
- "[๊ตฌ์ฒด์  ํ–‰๋™]ํ•˜๋Š” ๊ฒƒ๋งŒ์œผ๋กœ๋„..." (์ผ์ƒ์˜ ์žฌํ•ด์„)
๋‹จ ํ•˜๋‚˜์˜ ๋ฌธ์žฅ๋งŒ ์ œ์‹œํ•˜์„ธ์š”.""",
"English": f"""Theme: {user_query}
Generate an unforgettable opening sentence for this theme.
**Opening Sentence Principles:**
1. Immediate tension or curiosity
2. Unusual perspective or situation
3. Sensory and specific imagery
4. Philosophical question or paradox
5. Unique temporal/spatial setting
**Great Opening Patterns:**
- "The day he died, ..." (shocking event)
- "At the moment everything seemed over..." (reversal hint)
- "The most [adjective] [noun] in the world..." (unique definition)
- "Just by [specific action]..." (reinterpretation of ordinary)
Provide only one sentence."""
}
messages = [{"role": "user", "content": opening_prompt.get(language, opening_prompt["Korean"])}]
opening = self.call_llm_sync(messages, "writer", language)
return opening.strip()
def create_director_initial_prompt(self, user_query: str, language: str) -> str:
"""๊ฐ๋…์ž ์ดˆ๊ธฐ ๊ธฐํš - ๊ฐ•ํ™”๋œ ๋ฒ„์ „"""
augmented_query = self.augment_query(user_query, language)
# ์ฒซ๋ฌธ์žฅ ์ƒ์„ฑ
opening_sentence = self.generate_powerful_opening(user_query, language)
self.narrative_tracker.story_bible.opening_sentence = opening_sentence
if self.current_session_id:
NovelDatabase.save_opening_sentence(self.current_session_id, opening_sentence)
search_results_str = ""
if self.web_search.enabled:
short_query = user_query[:50] if len(user_query) > 50 else user_query
queries = [
f"{short_query} ์ฒ ํ•™์  ์˜๋ฏธ",
f"์ธ๊ฐ„ ์กด์žฌ ์˜๋ฏธ {short_query}",
f"{short_query} ๋ฌธํ•™ ์ž‘ํ’ˆ"
]
for q in queries[:2]:
try:
results = self.web_search.search(q, count=2, language=language)
if results:
search_results_str += self.web_search.extract_relevant_info(results) + "\n"
except Exception as e:
logger.warning(f"๊ฒ€์ƒ‰ ์‹คํŒจ: {str(e)}")
lang_prompts = {
"Korean": f"""๋…ธ๋ฒจ๋ฌธํ•™์ƒ ์ˆ˜์ค€์˜ ์ฒ ํ•™์  ๊นŠ์ด๋ฅผ ์ง€๋‹Œ ์ค‘ํŽธ์†Œ์„ค(8,000๋‹จ์–ด)์„ ๊ธฐํšํ•˜์„ธ์š”.
**์ฃผ์ œ:** {augmented_query}
**ํ•„์ˆ˜ ์ฒซ๋ฌธ์žฅ:** {opening_sentence}
**์ฐธ๊ณ  ์ž๋ฃŒ:**
{search_results_str if search_results_str else "N/A"}
**ํ•„์ˆ˜ ๋ฌธํ•™์  ์š”์†Œ:**
1. **์ฒ ํ•™์  ํƒ๊ตฌ**
- ํ˜„๋Œ€์ธ์˜ ์‹ค์กด์  ๊ณ ๋‡Œ (์†Œ์™ธ, ์ •์ฒด์„ฑ, ์˜๋ฏธ ์ƒ์‹ค)
- ๋””์ง€ํ„ธ ์‹œ๋Œ€์˜ ์ธ๊ฐ„ ์กฐ๊ฑด
- ์ž๋ณธ์ฃผ์˜ ์‚ฌํšŒ์˜ ๋ชจ์ˆœ๊ณผ ๊ฐœ์ธ์˜ ์„ ํƒ
- ์ฃฝ์Œ, ์‚ฌ๋ž‘, ์ž์œ ์— ๋Œ€ํ•œ ์ƒˆ๋กœ์šด ์„ฑ์ฐฐ
2. **์‚ฌํšŒ์  ๋ฉ”์‹œ์ง€**
- ๊ณ„๊ธ‰, ์  ๋”, ์„ธ๋Œ€ ๊ฐ„ ๊ฐˆ๋“ฑ
- ํ™˜๊ฒฝ ์œ„๊ธฐ์™€ ์ธ๊ฐ„์˜ ์ฑ…์ž„
- ๊ธฐ์ˆ  ๋ฐœ์ „๊ณผ ์ธ๊ฐ„์„ฑ์˜ ์ถฉ๋Œ
- ํ˜„๋Œ€ ๋ฏผ์ฃผ์ฃผ์˜์˜ ์œ„๊ธฐ์™€ ๊ฐœ์ธ์˜ ์—ญํ• 
3. **๋ฌธํ•™์  ์ˆ˜์‚ฌ ์žฅ์น˜**
- ์ค‘์‹ฌ ์€์œ : [๊ตฌ์ฒด์  ์‚ฌ๋ฌผ/ํ˜„์ƒ] โ†’ [์ถ”์ƒ์  ์˜๋ฏธ]
- ๋ฐ˜๋ณต๋˜๋Š” ๋ชจํ‹ฐํ”„: [์ด๋ฏธ์ง€/ํ–‰๋™] (์ตœ์†Œ 5ํšŒ ๋ณ€์ฃผ)
- ๋Œ€์กฐ๋ฒ•: [A vs B]์˜ ์ง€์†์  ๊ธด์žฅ
- ์ƒ์ง•์  ๊ณต๊ฐ„: [๊ตฌ์ฒด์  ์žฅ์†Œ]๊ฐ€ ์˜๋ฏธํ•˜๋Š” ๊ฒƒ
- ์‹œ๊ฐ„์˜ ์ฃผ๊ด€์  ํ๋ฆ„ (ํšŒ์ƒ, ์˜ˆ๊ฐ, ์ •์ง€)
4. **ํ†ตํ•ฉ๋œ 10ํŒŒํŠธ ๊ตฌ์กฐ**
๊ฐ ํŒŒํŠธ๋ณ„ ํ•ต์‹ฌ:
- ํŒŒํŠธ 1: ์ฒซ๋ฌธ์žฅ์œผ๋กœ ์‹œ์ž‘, ์ผ์ƒ ์† ๊ท ์—ด โ†’ ์ฒ ํ•™์  ์งˆ๋ฌธ ์ œ๊ธฐ
- ํŒŒํŠธ 2-3: ์™ธ๋ถ€ ์‚ฌ๊ฑด โ†’ ๋‚ด์  ์„ฑ์ฐฐ ์‹ฌํ™”
- ํŒŒํŠธ 4-5: ์‚ฌํšŒ์  ๊ฐˆ๋“ฑ โ†’ ๊ฐœ์ธ์  ๋”œ๋ ˆ๋งˆ
- ํŒŒํŠธ 6-7: ์œ„๊ธฐ์˜ ์ •์  โ†’ ์‹ค์กด์  ์„ ํƒ
- ํŒŒํŠธ 8-9: ์„ ํƒ์˜ ๊ฒฐ๊ณผ โ†’ ์ƒˆ๋กœ์šด ์ธ์‹
- ํŒŒํŠธ 10: ๋ณ€ํ™”๋œ ์„ธ๊ณ„๊ด€ โ†’ ์—ด๋ฆฐ ์งˆ๋ฌธ
5. **๋ฌธ์ฒด ์ง€์นจ**
- ์‹œ์  ์‚ฐ๋ฌธ์ฒด: ์ผ์ƒ ์–ธ์–ด์™€ ์€์œ ์˜ ๊ท ํ˜•
- ์˜์‹์˜ ํ๋ฆ„๊ณผ ๊ฐ๊ด€์  ๋ฌ˜์‚ฌ์˜ ๊ต์ฐจ
- ์งง๊ณ  ๊ฐ•๋ ฌํ•œ ๋ฌธ์žฅ๊ณผ ์„ฑ์ฐฐ์  ๊ธด ๋ฌธ์žฅ์˜ ๋ฆฌ๋“ฌ
- ๊ฐ๊ฐ์  ๋””ํ…Œ์ผ๋กœ ์ถ”์ƒ์  ๊ฐœ๋… ๊ตฌํ˜„
๊ตฌ์ฒด์ ์ด๊ณ  ํ˜์‹ ์ ์ธ ๊ณ„ํš์„ ์ œ์‹œํ•˜์„ธ์š”.""",
"English": f"""Plan a philosophically profound novella (8,000 words) worthy of Nobel Prize.
**Theme:** {augmented_query}
**Required Opening:** {opening_sentence}
**Reference:**
{search_results_str if search_results_str else "N/A"}
**Essential Literary Elements:**
1. **Philosophical Exploration**
- Modern existential anguish (alienation, identity, loss of meaning)
- Human condition in digital age
- Capitalist contradictions and individual choice
- New reflections on death, love, freedom
2. **Social Message**
- Class, gender, generational conflicts
- Environmental crisis and human responsibility
- Technology vs humanity collision
- Modern democracy crisis and individual role
3. **Literary Devices**
- Central metaphor: [concrete object/phenomenon] โ†’ [abstract meaning]
- Recurring motif: [image/action] (minimum 5 variations)
- Contrast: sustained tension of [A vs B]
- Symbolic space: what [specific place] means
- Subjective time flow (flashback, premonition, pause)
4. **Integrated 10-Part Structure**
[Details as above]
5. **Style Guidelines**
- Poetic prose: balance of everyday language and metaphor
- Stream of consciousness crossing with objective description
- Rhythm of short intense sentences and reflective long ones
- Abstract concepts through sensory details
Provide concrete, innovative plan."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str:
"""๊ฐ๋…์ž ๊ธฐํš ์‹ฌ์ธต ๊ฒ€ํ†  - ๊ฐ•ํ™”๋œ ๋ฒ„์ „"""
lang_prompts = {
"Korean": f"""์„œ์‚ฌ ๊ตฌ์กฐ ์ „๋ฌธ๊ฐ€๋กœ์„œ ์ด ๊ธฐํš์„ ์‹ฌ์ธต ๋ถ„์„ํ•˜์„ธ์š”.
**์› ์ฃผ์ œ:** {user_query}
**๊ฐ๋…์ž ๊ธฐํš:**
{director_plan}
**์‹ฌ์ธต ๊ฒ€ํ†  ํ•ญ๋ชฉ:**
1. **์ธ๊ณผ๊ด€๊ณ„ ๊ฒ€์ฆ**
๊ฐ ํŒŒํŠธ ๊ฐ„ ์—ฐ๊ฒฐ์„ ๊ฒ€ํ† ํ•˜๊ณ  ๋…ผ๋ฆฌ์  ๋น„์•ฝ์„ ์ฐพ์œผ์„ธ์š”:
- ํŒŒํŠธ 1โ†’2: [์—ฐ๊ฒฐ์„ฑ ํ‰๊ฐ€]
- ํŒŒํŠธ 2โ†’3: [์—ฐ๊ฒฐ์„ฑ ํ‰๊ฐ€]
(๋ชจ๋“  ์—ฐ๊ฒฐ ์ง€์  ๊ฒ€ํ† )
2. **์ฒ ํ•™์  ๊นŠ์ด ํ‰๊ฐ€**
- ์ œ์‹œ๋œ ์ฒ ํ•™์  ์ฃผ์ œ๊ฐ€ ์ถฉ๋ถ„ํžˆ ๊นŠ์€๊ฐ€?
- ํ˜„๋Œ€์  ๊ด€๋ จ์„ฑ์ด ์žˆ๋Š”๊ฐ€?
- ๋…์ฐฝ์  ํ†ต์ฐฐ์ด ์žˆ๋Š”๊ฐ€?
3. **๋ฌธํ•™์  ์žฅ์น˜์˜ ํšจ๊ณผ์„ฑ**
- ์€์œ ์™€ ์ƒ์ง•์ด ์œ ๊ธฐ์ ์œผ๋กœ ์ž‘๋™ํ•˜๋Š”๊ฐ€?
- ๊ณผ๋„ํ•˜๊ฑฐ๋‚˜ ๋ถ€์กฑํ•˜์ง€ ์•Š์€๊ฐ€?
- ์ฃผ์ œ์™€ ๊ธด๋ฐ€ํžˆ ์—ฐ๊ฒฐ๋˜๋Š”๊ฐ€?
4. **์บ๋ฆญํ„ฐ ์•„ํฌ ์‹คํ˜„ ๊ฐ€๋Šฅ์„ฑ**
- ๋ณ€ํ™”๊ฐ€ ์ถฉ๋ถ„ํžˆ ์ ์ง„์ ์ธ๊ฐ€?
- ๊ฐ ๋‹จ๊ณ„์˜ ๋™๊ธฐ๊ฐ€ ๋ช…ํ™•ํ•œ๊ฐ€?
- ์‹ฌ๋ฆฌ์  ์‹ ๋ขฐ์„ฑ์ด ์žˆ๋Š”๊ฐ€?
5. **8,000๋‹จ์–ด ์‹คํ˜„ ๊ฐ€๋Šฅ์„ฑ**
- ๊ฐ ํŒŒํŠธ๊ฐ€ 800๋‹จ์–ด๋ฅผ ์œ ์ง€ํ•  ์ˆ˜ ์žˆ๋Š”๊ฐ€?
- ๋Š˜์–ด์ง€๊ฑฐ๋‚˜ ์••์ถ•๋˜๋Š” ๋ถ€๋ถ„์€ ์—†๋Š”๊ฐ€?
**ํ•„์ˆ˜ ๊ฐœ์„ ์‚ฌํ•ญ์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”.**""",
"English": f"""As narrative structure expert, deeply analyze this plan.
**Original Theme:** {user_query}
**Director's Plan:**
{director_plan}
**Deep Review Items:**
1. **Causality Verification**
Review connections between parts, find logical leaps:
- Part 1โ†’2: [Connection assessment]
- Part 2โ†’3: [Connection assessment]
(Review all connection points)
2. **Philosophical Depth Assessment**
- Is philosophical theme deep enough?
- Contemporary relevance?
- Original insights?
3. **Literary Device Effectiveness**
- Do metaphors and symbols work organically?
- Not excessive or insufficient?
- Tightly connected to theme?
4. **Character Arc Feasibility**
- Is change sufficiently gradual?
- Are motivations clear at each stage?
- Psychological credibility?
5. **8,000-word Feasibility**
- Can each part sustain 800 words?
- Any dragging or compressed sections?
**Provide specific required improvements.**"""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_writer_prompt(self, part_number: int, master_plan: str,
accumulated_content: str, story_bible: StoryBible,
language: str) -> str:
"""๋‹จ์ผ ์ž‘๊ฐ€ ํ”„๋กฌํ”„ํŠธ - ๊ฐ•ํ™”๋œ ๋ฒ„์ „"""
phase_name = NARRATIVE_PHASES[part_number-1]
target_words = MIN_WORDS_PER_PART
# ํŒŒํŠธ๋ณ„ ํŠน๋ณ„ ์ง€์นจ
philosophical_focus = {
1: "์ผ์ƒ์˜ ๊ท ์—ด์„ ํ†ตํ•ด ์‹ค์กด์  ๋ถˆ์•ˆ ๋„์ž…",
2: "๊ฐœ์ธ๊ณผ ์‚ฌํšŒ์˜ ์ฒซ ์ถฉ๋Œ",
3: "ํƒ€์ž์™€์˜ ๋งŒ๋‚จ์„ ํ†ตํ•œ ์ž์•„ ์ธ์‹",
4: "์‹ ๋…์˜ ํ”๋“ค๋ฆผ๊ณผ ๊ฐ€์น˜๊ด€์˜ ์ถฉ๋Œ",
5: "์„ ํƒ์˜ ๋ฌด๊ฒŒ์™€ ์ž์œ ์˜ ์—ญ์„ค",
6: "๊ทนํ•œ ์ƒํ™ฉ์—์„œ์˜ ์ธ๊ฐ„์„ฑ ์‹œํ—˜",
7: "ํ–‰๋™์˜ ๊ฒฐ๊ณผ์™€ ์ฑ…์ž„์˜ ๋ฌด๊ฒŒ",
8: "ํƒ€์ž์˜ ์‹œ์„ ์„ ํ†ตํ•œ ์ž๊ธฐ ์žฌ๋ฐœ๊ฒฌ",
9: "ํ™”ํ•ด ๋ถˆ๊ฐ€๋Šฅํ•œ ๊ฒƒ๊ณผ์˜ ํ™”ํ•ด",
10: "์ƒˆ๋กœ์šด ์‚ถ์˜ ๊ฐ€๋Šฅ์„ฑ๊ณผ ๋ฏธํ•ด๊ฒฐ ์งˆ๋ฌธ"
}
literary_techniques = {
1: "๊ฐ๊ด€์  ์ƒ๊ด€๋ฌผ ๋„์ž…",
2: "๋Œ€์œ„๋ฒ•์  ์„œ์ˆ ",
3: "์˜์‹์˜ ํ๋ฆ„",
4: "์‹œ์ ์˜ ๋ฏธ๋ฌ˜ํ•œ ์ „ํ™˜",
5: "์นจ๋ฌต๊ณผ ์ƒ๋žต์˜ ๋ฏธํ•™",
6: "์‹œ๊ฐ„์˜ ์ฃผ๊ด€์  ๋ณ€ํ˜•",
7: "๋ณต์ˆ˜ ์‹œ์ ์˜ ๊ต์ฐจ",
8: "๋ฉ”ํƒ€ํฌ์˜ ์ „๋ณต",
9: "์›ํ˜•์  ์ด๋ฏธ์ง€์˜ ์žฌํ•ด์„",
10: "์—ด๋ฆฐ ๊ฒฐ๋ง์˜ ๋‹ค์ธต์„ฑ"
}
# ์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ” ์š”์•ฝ
bible_summary = f"""
**๋“ฑ์žฅ์ธ๋ฌผ:** {', '.join(story_bible.characters.keys())}
**ํ•ต์‹ฌ ์ƒ์ง•:** {', '.join(story_bible.symbols.keys())}
**์ฃผ์ œ:** {', '.join(story_bible.themes[:3])}
**๋ฌธ์ฒด:** {story_bible.style_guide.get('voice', 'N/A')}
"""
# ์ง์ „ ๋‚ด์šฉ ์š”์•ฝ (๋” ๋งŽ์€ ์ปจํ…์ŠคํŠธ ์ œ๊ณต)
prev_content = ""
if accumulated_content:
prev_parts = accumulated_content.split('\n\n')
if len(prev_parts) >= 1:
prev_content = prev_parts[-1][-2000:] # ๋งˆ์ง€๋ง‰ ํŒŒํŠธ์˜ ๋๋ถ€๋ถ„ 2000์ž
lang_prompts = {
"Korean": f"""๋‹น์‹ ์€ ํ˜„๋Œ€ ๋ฌธํ•™์˜ ์ตœ์ „์„ ์— ์„  ์ž‘๊ฐ€์ž…๋‹ˆ๋‹ค.
**ํ˜„์žฌ: ํŒŒํŠธ {part_number} - {phase_name}**
{"**ํ•„์ˆ˜ ์ฒซ๋ฌธ์žฅ:** " + story_bible.opening_sentence if part_number == 1 and story_bible.opening_sentence else ""}
**์ด๋ฒˆ ํŒŒํŠธ์˜ ์ฒ ํ•™์  ์ดˆ์ :** {philosophical_focus[part_number]}
**ํ•ต์‹ฌ ๋ฌธํ•™ ๊ธฐ๋ฒ•:** {literary_techniques[part_number]}
**์ „์ฒด ๊ณ„ํš:**
{master_plan}
**์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ”:**
{bible_summary}
**์ง์ „ ๋‚ด์šฉ:**
{prev_content if prev_content else "์ฒซ ํŒŒํŠธ์ž…๋‹ˆ๋‹ค"}
**ํŒŒํŠธ {part_number} ์ž‘์„ฑ ์ง€์นจ:**
1. **๋ถ„๋Ÿ‰:** {target_words}-900 ๋‹จ์–ด (ํ•„์ˆ˜)
2. **๋ฌธํ•™์  ์ˆ˜์‚ฌ ์š”๊ตฌ์‚ฌํ•ญ:**
- ์ตœ์†Œ 3๊ฐœ์˜ ๋…์ฐฝ์  ์€์œ /์ง์œ 
- 1๊ฐœ ์ด์ƒ์˜ ์ƒ์ง•์  ์ด๋ฏธ์ง€ ์‹ฌํ™”
- ๊ฐ๊ฐ์  ๋ฌ˜์‚ฌ์™€ ์ถ”์ƒ์  ์‚ฌ์œ ์˜ ์œตํ•ฉ
- ๋ฆฌ๋“ฌ๊ฐ ์žˆ๋Š” ๋ฌธ์žฅ ๊ตฌ์„ฑ (์žฅ๋‹จ์˜ ๋ณ€์ฃผ)
3. **ํ˜„๋Œ€์  ๊ณ ๋‡Œ ํ‘œํ˜„:**
- ๋””์ง€ํ„ธ ์‹œ๋Œ€์˜ ์†Œ์™ธ๊ฐ
- ์ž๋ณธ์ฃผ์˜์  ์‚ถ์˜ ๋ถ€์กฐ๋ฆฌ
- ๊ด€๊ณ„์˜ ํ‘œ๋ฉด์„ฑ๊ณผ ์ง„์ •์„ฑ ๊ฐˆ๋ง
- ์˜๋ฏธ ์ถ”๊ตฌ์™€ ๋ฌด์˜๋ฏธ์˜ ์ง๋ฉด
4. **์‚ฌํšŒ์  ๋ฉ”์‹œ์ง€ ๋‚ด์žฌํ™”:**
- ์ง์ ‘์  ์ฃผ์žฅ์ด ์•„๋‹Œ ์ƒํ™ฉ๊ณผ ์ธ๋ฌผ์„ ํ†ตํ•œ ์•”์‹œ
- ๊ฐœ์ธ์˜ ๊ณ ํ†ต๊ณผ ์‚ฌํšŒ ๊ตฌ์กฐ์˜ ์—ฐ๊ฒฐ
- ๋ฏธ์‹œ์  ์ผ์ƒ๊ณผ ๊ฑฐ์‹œ์  ๋ฌธ์ œ์˜ ๊ต์ฐจ
5. **์„œ์‚ฌ์  ์ถ”์ง„๋ ฅ:**
- ์ด์ „ ํŒŒํŠธ์˜ ํ•„์—ฐ์  ๊ฒฐ๊ณผ๋กœ ์‹œ์ž‘
- ์ƒˆ๋กœ์šด ๊ฐˆ๋“ฑ ์ธต์œ„ ์ถ”๊ฐ€
- ๋‹ค์Œ ํŒŒํŠธ๋ฅผ ํ–ฅํ•œ ๊ธด์žฅ๊ฐ ์กฐ์„ฑ
**๋ฌธํ•™์  ๊ธˆ๊ธฐ:**
- ์ง„๋ถ€ํ•œ ํ‘œํ˜„์ด๋‚˜ ์ƒํˆฌ์  ์€์œ 
- ๊ฐ์ •์˜ ์ง์ ‘์  ์„ค๋ช…
- ๋„๋•์  ํŒ๋‹จ์ด๋‚˜ ๊ตํ›ˆ
- ์ธ์œ„์ ์ธ ํ•ด๊ฒฐ์ด๋‚˜ ์œ„์•ˆ
ํŒŒํŠธ {part_number}๋ฅผ ๊นŠ์ด ์žˆ๋Š” ๋ฌธํ•™์  ์„ฑ์ทจ๋กœ ๋งŒ๋“œ์„ธ์š”.""",
"English": f"""You are a writer at the forefront of contemporary literature.
**Current: Part {part_number} - {phase_name}**
{"**Required Opening:** " + story_bible.opening_sentence if part_number == 1 and story_bible.opening_sentence else ""}
**Philosophical Focus:** {philosophical_focus[part_number]}
**Core Literary Technique:** {literary_techniques[part_number]}
**Master Plan:**
{master_plan}
**Story Bible:**
{bible_summary}
**Previous Content:**
{prev_content if prev_content else "This is the first part"}
**Part {part_number} Guidelines:**
1. **Length:** {target_words}-900 words (mandatory)
2. **Literary Device Requirements:**
- Minimum 3 original metaphors/similes
- Deepen at least 1 symbolic image
- Fusion of sensory description and abstract thought
- Rhythmic sentence composition (variation of long/short)
3. **Modern Anguish Expression:**
- Digital age alienation
- Absurdity of capitalist life
- Surface relationships vs authenticity yearning
- Meaning pursuit vs confronting meaninglessness
4. **Social Message Internalization:**
- Implication through situation and character, not direct claim
- Connection between individual pain and social structure
- Intersection of micro daily life and macro problems
5. **Narrative Momentum:**
- Start as inevitable result of previous part
- Add new conflict layers
- Create tension toward next part
**Literary Taboos:**
- Clichรฉd expressions or trite metaphors
- Direct emotion explanation
- Moral judgment or preaching
- Artificial resolution or comfort
Make Part {part_number} a profound literary achievement."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_part_critic_prompt(self, part_number: int, part_content: str,
master_plan: str, accumulated_content: str,
story_bible: StoryBible, language: str) -> str:
"""ํŒŒํŠธ๋ณ„ ์ฆ‰์‹œ ๋น„ํ‰ - ๊ฐ•ํ™”๋œ ๋ฒ„์ „"""
lang_prompts = {
"Korean": f"""ํŒŒํŠธ {part_number}์˜ ๋ฌธํ•™์  ์„ฑ์ทจ๋„๋ฅผ ์—„๊ฒฉํžˆ ํ‰๊ฐ€ํ•˜์„ธ์š”.
**๋งˆ์Šคํ„ฐํ”Œ๋žœ ํŒŒํŠธ {part_number} ์š”๊ตฌ์‚ฌํ•ญ:**
{self._extract_part_plan(master_plan, part_number)}
**์ž‘์„ฑ๋œ ๋‚ด์šฉ:**
{part_content}
**์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ” ์ฒดํฌ:**
- ์บ๋ฆญํ„ฐ: {', '.join(story_bible.characters.keys())}
- ์„ค์ •: {', '.join(story_bible.settings.keys())}
**ํ‰๊ฐ€ ๊ธฐ์ค€:**
1. **๋ฌธํ•™์  ์ˆ˜์‚ฌ (30%)**
- ์€์œ ์™€ ์ƒ์ง•์˜ ๋…์ฐฝ์„ฑ
- ์–ธ์–ด์˜ ์‹œ์  ๋ฐ€๋„
- ์ด๋ฏธ์ง€์˜ ์„ ๋ช…๋„์™€ ๊นŠ์ด
- ๋ฌธ์žฅ์˜ ๋ฆฌ๋“ฌ๊ณผ ์Œ์•…์„ฑ
2. **์ฒ ํ•™์  ๊นŠ์ด (25%)**
- ์‹ค์กด์  ์งˆ๋ฌธ์˜ ์ œ๊ธฐ
- ํ˜„๋Œ€์ธ์˜ ์กฐ๊ฑด ํƒ๊ตฌ
- ๋ณดํŽธ์„ฑ๊ณผ ํŠน์ˆ˜์„ฑ์˜ ๊ท ํ˜•
- ์‚ฌ์œ ์˜ ๋…์ฐฝ์„ฑ
3. **์‚ฌํšŒ์  ํ†ต์ฐฐ (20%)**
- ์‹œ๋Œ€์ •์‹ ์˜ ํฌ์ฐฉ
- ๊ตฌ์กฐ์™€ ๊ฐœ์ธ์˜ ๊ด€๊ณ„
- ๋น„ํŒ์  ์‹œ๊ฐ์˜ ์˜ˆ๋ฆฌํ•จ
- ๋Œ€์•ˆ์  ์ƒ์ƒ๋ ฅ
4. **์„œ์‚ฌ์  ์™„์„ฑ๋„ (25%)**
- ์ธ๊ณผ๊ด€๊ณ„์˜ ํ•„์—ฐ์„ฑ
- ๊ธด์žฅ๊ฐ์˜ ์œ ์ง€
- ์ธ๋ฌผ์˜ ์ž…์ฒด์„ฑ
- ๊ตฌ์กฐ์  ํ†ต์ผ์„ฑ
**๊ตฌ์ฒด์  ์ง€์ ์‚ฌํ•ญ:**
- ์ง„๋ถ€ํ•œ ํ‘œํ˜„: [์˜ˆ์‹œ์™€ ๋Œ€์•ˆ]
- ์ฒ ํ•™์  ์ฒœ์ฐฉ ๋ถ€์กฑ: [๋ณด์™„ ๋ฐฉํ–ฅ]
- ์‚ฌํšŒ์  ๋ฉ”์‹œ์ง€ ๋ถˆ๋ช…ํ™•: [๊ฐ•ํ™” ๋ฐฉ์•ˆ]
- ์„œ์‚ฌ์  ํ—ˆ์ : [์ˆ˜์ • ํ•„์š”]
**ํ•„์ˆ˜ ๊ฐœ์„  ์š”๊ตฌ:**
๋ฌธํ•™์  ์ˆ˜์ค€์„ ๋…ธ๋ฒจ์ƒ ๊ธ‰์œผ๋กœ ๋Œ์–ด์˜ฌ๋ฆฌ๊ธฐ ์œ„ํ•œ ๊ตฌ์ฒด์  ์ˆ˜์ •์•ˆ์„ ์ œ์‹œํ•˜์„ธ์š”.""",
"English": f"""Strictly evaluate literary achievement of Part {part_number}.
**Master Plan Part {part_number} Requirements:**
{self._extract_part_plan(master_plan, part_number)}
**Written Content:**
{part_content}
**Story Bible Check:**
- Characters: {', '.join(story_bible.characters.keys())}
- Settings: {', '.join(story_bible.settings.keys())}
**Evaluation Criteria:**
1. **Literary Rhetoric (30%)**
- Originality of metaphor and symbol
- Poetic density of language
- Clarity and depth of imagery
- Rhythm and musicality of sentences
2. **Philosophical Depth (25%)**
- Raising existential questions
- Exploring modern human condition
- Balance of universality and specificity
- Originality of thought
3. **Social Insight (20%)**
- Capturing zeitgeist
- Relationship between structure and individual
- Sharpness of critical perspective
- Alternative imagination
4. **Narrative Completion (25%)**
- Inevitability of causality
- Maintaining tension
- Character dimensionality
- Structural unity
**Specific Points:**
- Clichรฉd expressions: [examples and alternatives]
- Insufficient philosophical exploration: [enhancement direction]
- Unclear social message: [strengthening methods]
- Narrative gaps: [needed revisions]
**Required Improvements:**
Provide specific revisions to elevate literary level to Nobel Prize standard."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_writer_revision_prompt(self, part_number: int, original_content: str,
critic_feedback: str, language: str) -> str:
"""์ž‘๊ฐ€ ์ˆ˜์ • ํ”„๋กฌํ”„ํŠธ"""
lang_prompts = {
"Korean": f"""ํŒŒํŠธ {part_number}๋ฅผ ๋น„ํ‰์— ๋”ฐ๋ผ ์ˆ˜์ •ํ•˜์„ธ์š”.
**์›๋ณธ:**
{original_content}
**๋น„ํ‰ ํ”ผ๋“œ๋ฐฑ:**
{critic_feedback}
**์ˆ˜์ • ์ง€์นจ:**
1. ๋ชจ๋“  'ํ•„์ˆ˜ ์ˆ˜์ •' ์‚ฌํ•ญ์„ ๋ฐ˜์˜
2. ๊ฐ€๋Šฅํ•œ '๊ถŒ์žฅ ๊ฐœ์„ ' ์‚ฌํ•ญ๋„ ํฌํ•จ
3. ์›๋ณธ์˜ ๊ฐ•์ ์€ ์œ ์ง€
4. ๋ถ„๋Ÿ‰ {MIN_WORDS_PER_PART}๋‹จ์–ด ์ด์ƒ ์œ ์ง€
5. ์ž‘๊ฐ€๋กœ์„œ์˜ ์ผ๊ด€๋œ ๋ชฉ์†Œ๋ฆฌ ์œ ์ง€
6. ๋ฌธํ•™์  ์ˆ˜์ค€์„ ํ•œ ๋‹จ๊ณ„ ๋†’์ด๊ธฐ
์ˆ˜์ •๋ณธ๋งŒ ์ œ์‹œํ•˜์„ธ์š”. ์„ค๋ช…์€ ๋ถˆํ•„์š”ํ•ฉ๋‹ˆ๋‹ค.""",
"English": f"""Revise Part {part_number} according to critique.
**Original:**
{original_content}
**Critique Feedback:**
{critic_feedback}
**Revision Guidelines:**
1. Reflect all 'Required fixes'
2. Include 'Recommended improvements' where possible
3. Maintain original strengths
4. Keep length {MIN_WORDS_PER_PART}+ words
5. Maintain consistent authorial voice
6. Elevate literary level
Present only the revision. No explanation needed."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_final_critic_prompt(self, complete_novel: str, word_count: int,
story_bible: StoryBible, language: str) -> str:
"""์ตœ์ข… ์ข…ํ•ฉ ํ‰๊ฐ€"""
lang_prompts = {
"Korean": f"""์™„์„ฑ๋œ ์†Œ์„ค์„ ์ข…ํ•ฉ ํ‰๊ฐ€ํ•˜์„ธ์š”.
**์ž‘ํ’ˆ ์ •๋ณด:**
- ์ด ๋ถ„๋Ÿ‰: {word_count}๋‹จ์–ด
- ๋ชฉํ‘œ: 8,000๋‹จ์–ด
**ํ‰๊ฐ€ ๊ธฐ์ค€:**
1. **์„œ์‚ฌ์  ํ†ตํ•ฉ์„ฑ (30์ )**
- 10๊ฐœ ํŒŒํŠธ๊ฐ€ ํ•˜๋‚˜์˜ ์ด์•ผ๊ธฐ๋กœ ํ†ตํ•ฉ๋˜์—ˆ๋Š”๊ฐ€?
- ์ธ๊ณผ๊ด€๊ณ„๊ฐ€ ๋ช…ํ™•ํ•˜๊ณ  ํ•„์—ฐ์ ์ธ๊ฐ€?
- ๋ฐ˜๋ณต์ด๋‚˜ ์ˆœํ™˜ ์—†์ด ์ง„ํ–‰๋˜๋Š”๊ฐ€?
2. **์บ๋ฆญํ„ฐ ์•„ํฌ (25์ )**
- ์ฃผ์ธ๊ณต์˜ ๋ณ€ํ™”๊ฐ€ ์„ค๋“๋ ฅ ์žˆ๋Š”๊ฐ€?
- ๋ณ€ํ™”๊ฐ€ ์ ์ง„์ ์ด๊ณ  ์ž์—ฐ์Šค๋Ÿฌ์šด๊ฐ€?
- ์ตœ์ข… ์ƒํƒœ๊ฐ€ ์ดˆ๊ธฐ์™€ ๋ช…ํ™•ํžˆ ๋‹ค๋ฅธ๊ฐ€?
3. **๋ฌธํ•™์  ์„ฑ์ทจ (25์ )**
- ์ฃผ์ œ๊ฐ€ ๊นŠ์ด ์žˆ๊ฒŒ ํƒ๊ตฌ๋˜์—ˆ๋Š”๊ฐ€?
- ์ƒ์ง•์ด ํšจ๊ณผ์ ์œผ๋กœ ํ™œ์šฉ๋˜์—ˆ๋Š”๊ฐ€?
- ๋ฌธ์ฒด๊ฐ€ ์ผ๊ด€๋˜๊ณ  ์•„๋ฆ„๋‹ค์šด๊ฐ€?
- ํ˜„๋Œ€์  ์ฒ ํ•™๊ณผ ์‚ฌํšŒ์  ๋ฉ”์‹œ์ง€๊ฐ€ ๋…น์•„์žˆ๋Š”๊ฐ€?
4. **๊ธฐ์ˆ ์  ์™„์„ฑ๋„ (20์ )**
- ๋ชฉํ‘œ ๋ถ„๋Ÿ‰์„ ๋‹ฌ์„ฑํ–ˆ๋Š”๊ฐ€?
- ๊ฐ ํŒŒํŠธ๊ฐ€ ๊ท ํ˜• ์žˆ๊ฒŒ ์ „๊ฐœ๋˜์—ˆ๋Š”๊ฐ€?
- ๋ฌธ๋ฒ•๊ณผ ํ‘œํ˜„์ด ์ •ํ™•ํ•œ๊ฐ€?
**์ด์ : /100์ **
๊ตฌ์ฒด์ ์ธ ๊ฐ•์ ๊ณผ ์•ฝ์ ์„ ์ œ์‹œํ•˜์„ธ์š”.""",
"English": f"""Comprehensively evaluate the completed novel.
**Work Info:**
- Total length: {word_count} words
- Target: 8,000 words
**Evaluation Criteria:**
1. **Narrative Integration (30 points)**
- Are 10 parts integrated into one story?
- Clear and inevitable causality?
- Progress without repetition or cycles?
2. **Character Arc (25 points)**
- Convincing protagonist transformation?
- Gradual and natural changes?
- Final state clearly different from initial?
3. **Literary Achievement (25 points)**
- Theme explored with depth?
- Symbols used effectively?
- Consistent and beautiful style?
- Contemporary philosophy and social message integrated?
4. **Technical Completion (20 points)**
- Target length achieved?
- Each part balanced in development?
- Grammar and expression accurate?
**Total Score: /100 points**
Present specific strengths and weaknesses."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def _extract_part_plan(self, master_plan: str, part_number: int) -> str:
"""๋งˆ์Šคํ„ฐํ”Œ๋žœ์—์„œ ํŠน์ • ํŒŒํŠธ ๊ณ„ํš ์ถ”์ถœ"""
lines = master_plan.split('\n')
part_section = []
capturing = False
for line in lines:
if f"ํŒŒํŠธ {part_number}:" in line or f"Part {part_number}:" in line:
capturing = True
elif capturing and (f"ํŒŒํŠธ {part_number+1}:" in line or f"Part {part_number+1}:" in line):
break
elif capturing:
part_section.append(line)
return '\n'.join(part_section) if part_section else "ํ•ด๋‹น ํŒŒํŠธ ๊ณ„ํš์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
# --- LLM ํ˜ธ์ถœ ํ•จ์ˆ˜๋“ค ---
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str:
full_content = ""
for chunk in self.call_llm_streaming(messages, role, language):
full_content += chunk
if full_content.startswith("โŒ"):
raise Exception(f"LLM Call Failed: {full_content}")
return full_content
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str,
language: str) -> Generator[str, None, None]:
try:
system_prompts = self.get_system_prompts(language)
full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages]
max_tokens = 15000 if role == "writer" else 10000
payload = {
"model": self.model_id,
"messages": full_messages,
"max_tokens": max_tokens,
"temperature": 0.8,
"top_p": 0.95,
"presence_penalty": 0.5,
"frequency_penalty": 0.3,
"stream": True
}
response = requests.post(
self.api_url,
headers=self.create_headers(),
json=payload,
stream=True,
timeout=180
)
if response.status_code != 200:
yield f"โŒ API ์˜ค๋ฅ˜ (์ƒํƒœ ์ฝ”๋“œ: {response.status_code})"
return
buffer = ""
for line in response.iter_lines():
if not line:
continue
try:
line_str = line.decode('utf-8').strip()
if not line_str.startswith("data: "):
continue
data_str = line_str[6:]
if data_str == "[DONE]":
break
data = json.loads(data_str)
choices = data.get("choices", [])
if choices and choices[0].get("delta", {}).get("content"):
content = choices[0]["delta"]["content"]
buffer += content
if len(buffer) >= 50 or '\n' in buffer:
yield buffer
buffer = ""
time.sleep(0.01)
except Exception as e:
logger.error(f"์ฒญํฌ ์ฒ˜๋ฆฌ ์˜ค๋ฅ˜: {str(e)}")
continue
if buffer:
yield buffer
except Exception as e:
logger.error(f"์ŠคํŠธ๋ฆฌ๋ฐ ์˜ค๋ฅ˜: {type(e).__name__}: {str(e)}")
yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
def get_system_prompts(self, language: str) -> Dict[str, str]:
"""์—ญํ• ๋ณ„ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ - ๊ฐ•ํ™”๋œ ๋ฒ„์ „"""
base_prompts = {
"Korean": {
"director": """๋‹น์‹ ์€ ํ˜„๋Œ€ ์„ธ๊ณ„๋ฌธํ•™์˜ ์ •์ ์„ ์ง€ํ–ฅํ•˜๋Š” ์ž‘ํ’ˆ์„ ์„ค๊ณ„ํ•ฉ๋‹ˆ๋‹ค.
๊นŠ์€ ์ฒ ํ•™์  ํ†ต์ฐฐ๊ณผ ๋‚ ์นด๋กœ์šด ์‚ฌํšŒ ๋น„ํŒ์„ ๊ฒฐํ•ฉํ•˜์„ธ์š”.
์ธ๊ฐ„ ์กฐ๊ฑด์˜ ๋ณต์žก์„ฑ์„ 10๊ฐœ์˜ ์œ ๊ธฐ์  ํŒŒํŠธ๋กœ ๊ตฌํ˜„ํ•˜์„ธ์š”.
๋…์ž์˜ ์˜ํ˜ผ์„ ๋’คํ”๋“ค ๊ฐ•๋ ฌํ•œ ์ฒซ๋ฌธ์žฅ๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜์„ธ์š”.""",
"critic_director": """์„œ์‚ฌ ๊ตฌ์กฐ์˜ ๋…ผ๋ฆฌ์„ฑ๊ณผ ์‹คํ˜„ ๊ฐ€๋Šฅ์„ฑ์„ ๊ฒ€์ฆํ•˜๋Š” ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค.
์ธ๊ณผ๊ด€๊ณ„์˜ ํ—ˆ์ ์„ ์ฐพ์•„๋‚ด์„ธ์š”.
์บ๋ฆญํ„ฐ ๋ฐœ์ „์˜ ์‹ ๋น™์„ฑ์„ ํ‰๊ฐ€ํ•˜์„ธ์š”.
์ฒ ํ•™์  ๊นŠ์ด์™€ ๋ฌธํ•™์  ๊ฐ€์น˜๋ฅผ ํŒ๋‹จํ•˜์„ธ์š”.
8,000๋‹จ์–ด ๋ถ„๋Ÿ‰์˜ ์ ์ ˆ์„ฑ์„ ํŒ๋‹จํ•˜์„ธ์š”.""",
"writer": """๋‹น์‹ ์€ ์–ธ์–ด์˜ ์—ฐ๊ธˆ์ˆ ์‚ฌ์ž…๋‹ˆ๋‹ค.
์ผ์ƒ์–ด๋ฅผ ์‹œ๋กœ, ๊ตฌ์ฒด๋ฅผ ์ถ”์ƒ์œผ๋กœ, ๊ฐœ์ธ์„ ๋ณดํŽธ์œผ๋กœ ๋ณ€ํ™˜ํ•˜์„ธ์š”.
ํ˜„๋Œ€์ธ์˜ ์˜ํ˜ผ์˜ ์–ด๋‘ ๊ณผ ๋น›์„ ๋™์‹œ์— ํฌ์ฐฉํ•˜์„ธ์š”.
๋…์ž๊ฐ€ ์ž์‹ ์„ ์žฌ๋ฐœ๊ฒฌํ•˜๊ฒŒ ๋งŒ๋“œ๋Š” ๊ฑฐ์šธ์ด ๋˜์„ธ์š”.""",
"critic_final": """๋‹น์‹ ์€ ์ž‘ํ’ˆ์˜ ๋ฌธํ•™์  ์ž ์žฌ๋ ฅ์„ ๊ทน๋Œ€ํ™”ํ•˜๋Š” ์กฐ๋ ฅ์ž์ž…๋‹ˆ๋‹ค.
ํ‰๋ฒ”ํ•จ์„ ๋น„๋ฒ”ํ•จ์œผ๋กœ ์ด๋„๋Š” ๋‚ ์นด๋กœ์šด ํ†ต์ฐฐ์„ ์ œ๊ณตํ•˜์„ธ์š”.
์ž‘๊ฐ€์˜ ๋ฌด์˜์‹์— ์ž ๋“  ๋ณด์„์„ ๋ฐœ๊ตดํ•˜์„ธ์š”.
ํƒ€ํ˜‘ ์—†๋Š” ๊ธฐ์ค€์œผ๋กœ ์ตœ๊ณ ๋ฅผ ์š”๊ตฌํ•˜์„ธ์š”."""
},
"English": {
"director": """You design works aiming for the pinnacle of contemporary world literature.
Combine deep philosophical insights with sharp social criticism.
Implement the complexity of the human condition in 10 organic parts.
Start with an intense opening sentence that shakes the reader's soul.""",
"critic_director": """You are an expert verifying narrative logic and feasibility.
Find gaps in causality.
Evaluate credibility of character development.
Judge philosophical depth and literary value.
Judge appropriateness of 8,000-word length.""",
"writer": """You are an alchemist of language.
Transform everyday language into poetry, concrete into abstract, individual into universal.
Capture both darkness and light of the modern soul.
Become a mirror where readers rediscover themselves.""",
"critic_final": """You are a collaborator maximizing the work's literary potential.
Provide sharp insights leading ordinariness to extraordinariness.
Excavate gems sleeping in the writer's unconscious.
Demand the best with uncompromising standards."""
}
}
prompts = base_prompts.get(language, base_prompts["Korean"]).copy()
# ํŒŒํŠธ๋ณ„ ๋น„ํ‰๊ฐ€ ํ”„๋กฌํ”„ํŠธ ์ถ”๊ฐ€
for i in range(1, 11):
prompts[f"critic_part{i}"] = f"""ํŒŒํŠธ {i} ์ „๋‹ด ๋น„ํ‰๊ฐ€์ž…๋‹ˆ๋‹ค.
์ด์ „ ํŒŒํŠธ์™€์˜ ์ธ๊ณผ๊ด€๊ณ„๋ฅผ ์ตœ์šฐ์„ ์œผ๋กœ ๊ฒ€ํ† ํ•˜์„ธ์š”.
์บ๋ฆญํ„ฐ ์ผ๊ด€์„ฑ๊ณผ ๋ฐœ์ „์„ ํ™•์ธํ•˜์„ธ์š”.
๋งˆ์Šคํ„ฐํ”Œ๋žœ๊ณผ์˜ ์ผ์น˜๋„๋ฅผ ํ‰๊ฐ€ํ•˜์„ธ์š”.
๋ฌธํ•™์  ์ˆ˜์ค€๊ณผ ์ฒ ํ•™์  ๊นŠ์ด๋ฅผ ํ‰๊ฐ€ํ•˜์„ธ์š”.
๊ตฌ์ฒด์ ์ด๊ณ  ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ์ˆ˜์ • ์ง€์‹œ๋ฅผ ์ œ๊ณตํ•˜์„ธ์š”."""
return prompts
# --- ๋ฉ”์ธ ํ”„๋กœ์„ธ์Šค ---
def process_novel_stream(self, query: str, language: str,
session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]:
"""๋‹จ์ผ ์ž‘๊ฐ€ ์†Œ์„ค ์ƒ์„ฑ ํ”„๋กœ์„ธ์Šค"""
try:
resume_from_stage = 0
if session_id:
self.current_session_id = session_id
session = NovelDatabase.get_session(session_id)
if session:
query = session['user_query']
language = session['language']
resume_from_stage = session['current_stage'] + 1
saved_tracker = NovelDatabase.load_narrative_tracker(session_id)
if saved_tracker:
self.narrative_tracker = saved_tracker
else:
self.current_session_id = NovelDatabase.create_session(query, language)
logger.info(f"Created new session: {self.current_session_id}")
stages = []
if resume_from_stage > 0:
stages = [{
"name": s['stage_name'],
"status": s['status'],
"content": s.get('content', ''),
"word_count": s.get('word_count', 0),
"momentum": s.get('narrative_momentum', 0.0)
} for s in NovelDatabase.get_stages(self.current_session_id)]
total_words = NovelDatabase.get_total_words(self.current_session_id)
for stage_idx in range(resume_from_stage, len(UNIFIED_STAGES)):
role, stage_name = UNIFIED_STAGES[stage_idx]
if stage_idx >= len(stages):
stages.append({
"name": stage_name,
"status": "active",
"content": "",
"word_count": 0,
"momentum": 0.0
})
else:
stages[stage_idx]["status"] = "active"
yield f"๐Ÿ”„ ์ง„ํ–‰ ์ค‘... (ํ˜„์žฌ {total_words:,}๋‹จ์–ด)", stages, self.current_session_id
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages)
stage_content = ""
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language):
stage_content += chunk
stages[stage_idx]["content"] = stage_content
stages[stage_idx]["word_count"] = len(stage_content.split())
yield f"๐Ÿ”„ {stage_name} ์ž‘์„ฑ ์ค‘... ({total_words + stages[stage_idx]['word_count']:,}๋‹จ์–ด)", stages, self.current_session_id
# ์ปจํ…์ธ  ์ฒ˜๋ฆฌ ๋ฐ ์ถ”์ 
if role == "writer":
# ํŒŒํŠธ ๋ฒˆํ˜ธ ๊ณ„์‚ฐ
part_num = self._get_part_number(stage_idx)
if part_num:
self.narrative_tracker.accumulated_content.append(stage_content)
self.narrative_tracker.word_count_by_part[part_num] = len(stage_content.split())
# ์„œ์‚ฌ ์ถ”์ง„๋ ฅ ๊ณ„์‚ฐ
momentum = self.narrative_tracker.calculate_narrative_momentum(part_num, stage_content)
stages[stage_idx]["momentum"] = momentum
# ์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ” ์—…๋ฐ์ดํŠธ
self._update_story_bible_from_content(stage_content, part_num)
stages[stage_idx]["status"] = "complete"
NovelDatabase.save_stage(
self.current_session_id, stage_idx, stage_name, role,
stage_content, "complete", stages[stage_idx].get("momentum", 0.0)
)
NovelDatabase.save_narrative_tracker(self.current_session_id, self.narrative_tracker)
total_words = NovelDatabase.get_total_words(self.current_session_id)
yield f"โœ… {stage_name} ์™„๋ฃŒ (์ด {total_words:,}๋‹จ์–ด)", stages, self.current_session_id
# ์ตœ์ข… ์ฒ˜๋ฆฌ
final_novel = NovelDatabase.get_writer_content(self.current_session_id)
final_word_count = len(final_novel.split())
final_report = self.generate_literary_report(final_novel, final_word_count, language)
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report)
yield f"โœ… ์†Œ์„ค ์™„์„ฑ! ์ด {final_word_count:,}๋‹จ์–ด", stages, self.current_session_id
except Exception as e:
logger.error(f"์†Œ์„ค ์ƒ์„ฑ ํ”„๋กœ์„ธ์Šค ์˜ค๋ฅ˜: {e}", exc_info=True)
yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {e}", stages if 'stages' in locals() else [], self.current_session_id
def get_stage_prompt(self, stage_idx: int, role: str, query: str,
language: str, stages: List[Dict]) -> str:
"""๋‹จ๊ณ„๋ณ„ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
if stage_idx == 0: # ๊ฐ๋…์ž ์ดˆ๊ธฐ ๊ธฐํš
return self.create_director_initial_prompt(query, language)
if stage_idx == 1: # ๊ฐ๋…์ž ๊ธฐํš ๊ฒ€ํ† 
return self.create_critic_director_prompt(stages[0]["content"], query, language)
if stage_idx == 2: # ๊ฐ๋…์ž ์ตœ์ข… ๋งˆ์Šคํ„ฐํ”Œ๋žœ
return self.create_director_final_prompt(stages[0]["content"], stages[1]["content"], query, language)
master_plan = stages[2]["content"]
# ์ž‘๊ฐ€ ํŒŒํŠธ ์ž‘์„ฑ
if role == "writer" and "์ˆ˜์ •๋ณธ" not in stages[stage_idx]["name"]:
part_num = self._get_part_number(stage_idx)
accumulated = '\n\n'.join(self.narrative_tracker.accumulated_content)
return self.create_writer_prompt(part_num, master_plan, accumulated,
self.narrative_tracker.story_bible, language)
# ํŒŒํŠธ๋ณ„ ๋น„ํ‰
if role.startswith("critic_part"):
part_num = int(role.replace("critic_part", ""))
# ํ•ด๋‹น ํŒŒํŠธ์˜ ์ž‘๊ฐ€ ๋‚ด์šฉ ์ฐพ๊ธฐ
writer_content = stages[stage_idx-1]["content"]
accumulated = '\n\n'.join(self.narrative_tracker.accumulated_content[:-1])
return self.create_part_critic_prompt(part_num, writer_content, master_plan,
accumulated, self.narrative_tracker.story_bible, language)
# ์ž‘๊ฐ€ ์ˆ˜์ •๋ณธ
if role == "writer" and "์ˆ˜์ •๋ณธ" in stages[stage_idx]["name"]:
part_num = self._get_part_number(stage_idx)
original_content = stages[stage_idx-2]["content"] # ์›๋ณธ
critic_feedback = stages[stage_idx-1]["content"] # ๋น„ํ‰
return self.create_writer_revision_prompt(part_num, original_content,
critic_feedback, language)
# ์ตœ์ข… ๋น„ํ‰
if role == "critic_final":
complete_novel = NovelDatabase.get_writer_content(self.current_session_id)
word_count = len(complete_novel.split())
return self.create_final_critic_prompt(complete_novel, word_count,
self.narrative_tracker.story_bible, language)
return ""
def create_director_final_prompt(self, initial_plan: str, critic_feedback: str,
user_query: str, language: str) -> str:
"""๊ฐ๋…์ž ์ตœ์ข… ๋งˆ์Šคํ„ฐํ”Œ๋žœ"""
return f"""๋น„ํ‰์„ ๋ฐ˜์˜ํ•˜์—ฌ ์ตœ์ข… ๋งˆ์Šคํ„ฐํ”Œ๋žœ์„ ์™„์„ฑํ•˜์„ธ์š”.
**์› ์ฃผ์ œ:** {user_query}
**์ดˆ๊ธฐ ๊ธฐํš:**
{initial_plan}
**๋น„ํ‰ ํ”ผ๋“œ๋ฐฑ:**
{critic_feedback}
**์ตœ์ข… ๋งˆ์Šคํ„ฐํ”Œ๋žœ ์š”๊ตฌ์‚ฌํ•ญ:**
1. ๋ชจ๋“  ๋น„ํ‰ ์ง€์ ์‚ฌํ•ญ ๋ฐ˜์˜
2. 10๊ฐœ ํŒŒํŠธ์˜ ๊ตฌ์ฒด์  ๋‚ด์šฉ๊ณผ ์ธ๊ณผ๊ด€๊ณ„
3. ์ฃผ์ธ๊ณต์˜ ๋ช…ํ™•ํ•œ ๋ณ€ํ™” ๋‹จ๊ณ„
4. ์ค‘์‹ฌ ์ƒ์ง•์˜ ์˜๋ฏธ ๋ณ€ํ™” ๊ณผ์ •
5. ๊ฐ ํŒŒํŠธ 800๋‹จ์–ด ์‹คํ˜„ ๊ฐ€๋Šฅ์„ฑ
6. ์ฒ ํ•™์  ๊นŠ์ด์™€ ์‚ฌํšŒ์  ๋ฉ”์‹œ์ง€ ๊ตฌํ˜„ ๋ฐฉ์•ˆ
๊ตฌ์ฒด์ ์ด๊ณ  ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ์ตœ์ข… ๊ณ„ํš์„ ์ œ์‹œํ•˜์„ธ์š”."""
def _get_part_number(self, stage_idx: int) -> Optional[int]:
"""์Šคํ…Œ์ด์ง€ ์ธ๋ฑ์Šค์—์„œ ํŒŒํŠธ ๋ฒˆํ˜ธ ์ถ”์ถœ"""
stage_name = UNIFIED_STAGES[stage_idx][1]
match = re.search(r'ํŒŒํŠธ (\d+)', stage_name)
if match:
return int(match.group(1))
return None
def _update_story_bible_from_content(self, content: str, part_num: int):
"""์ปจํ…์ธ ์—์„œ ์Šคํ† ๋ฆฌ ๋ฐ”์ด๋ธ” ์ž๋™ ์—…๋ฐ์ดํŠธ"""
# ๊ฐ„๋‹จํ•œ ํ‚ค์›Œ๋“œ ๊ธฐ๋ฐ˜ ์ถ”์ถœ (์‹ค์ œ๋กœ๋Š” ๋” ์ •๊ตํ•œ NLP ํ•„์š”)
lines = content.split('\n')
# ์บ๋ฆญํ„ฐ ์ด๋ฆ„ ์ถ”์ถœ (๋Œ€๋ฌธ์ž๋กœ ์‹œ์ž‘ํ•˜๋Š” ๋‹จ์–ด๋“ค)
for line in lines:
words = line.split()
for word in words:
if word and word[0].isupper() and len(word) > 1:
if word not in self.narrative_tracker.story_bible.characters:
self.narrative_tracker.story_bible.characters[word] = {
"first_appearance": part_num,
"traits": []
}
def generate_literary_report(self, complete_novel: str, word_count: int, language: str) -> str:
"""์ตœ์ข… ๋ฌธํ•™์  ํ‰๊ฐ€ ๋ณด๊ณ ์„œ ์ƒ์„ฑ"""
prompt = self.create_final_critic_prompt(complete_novel, word_count,
self.narrative_tracker.story_bible, language)
try:
report = self.call_llm_sync([{"role": "user", "content": prompt}],
"critic_final", language)
return report
except Exception as e:
logger.error(f"์ตœ์ข… ๋ณด๊ณ ์„œ ์ƒ์„ฑ ์‹คํŒจ: {e}")
return "๋ณด๊ณ ์„œ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ"
# --- ์œ ํ‹ธ๋ฆฌํ‹ฐ ํ•จ์ˆ˜๋“ค ---
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]:
"""๋ฉ”์ธ ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜"""
if not query.strip():
yield "", "", "โŒ ์ฃผ์ œ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.", session_id
return
system = UnifiedLiterarySystem()
stages_markdown = ""
novel_content = ""
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id):
stages_markdown = format_stages_display(stages)
# ์ตœ์ข… ์†Œ์„ค ๋‚ด์šฉ ๊ฐ€์ ธ์˜ค๊ธฐ
if stages and all(s.get("status") == "complete" for s in stages[-10:]):
novel_content = NovelDatabase.get_writer_content(current_session_id)
novel_content = format_novel_display(novel_content)
yield stages_markdown, novel_content, status or "๐Ÿ”„ ์ฒ˜๋ฆฌ ์ค‘...", current_session_id
def get_active_sessions(language: str) -> List[str]:
"""ํ™œ์„ฑ ์„ธ์…˜ ๋ชฉ๋ก"""
sessions = NovelDatabase.get_active_sessions()
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']}) [{s['total_words']:,}๋‹จ์–ด]"
for s in sessions]
def auto_recover_session(language: str) -> Tuple[Optional[str], str]:
"""์ตœ๊ทผ ์„ธ์…˜ ์ž๋™ ๋ณต๊ตฌ"""
sessions = NovelDatabase.get_active_sessions()
if sessions:
latest_session = sessions[0]
return latest_session['session_id'], f"์„ธ์…˜ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ"
return None, "๋ณต๊ตฌํ•  ์„ธ์…˜์ด ์—†์Šต๋‹ˆ๋‹ค."
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]:
"""์„ธ์…˜ ์žฌ๊ฐœ"""
if not session_id:
yield "", "", "โŒ ์„ธ์…˜ ID๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", session_id
return
if "..." in session_id:
session_id = session_id.split("...")[0]
session = NovelDatabase.get_session(session_id)
if not session:
yield "", "", "โŒ ์„ธ์…˜์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", None
return
yield from process_query(session['user_query'], session['language'], session_id)
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]:
"""์†Œ์„ค ๋‹ค์šด๋กœ๋“œ ํŒŒ์ผ ์ƒ์„ฑ"""
if not novel_text or not session_id:
return None
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"novel_{session_id[:8]}_{timestamp}"
try:
if format_type == "DOCX" and DOCX_AVAILABLE:
return export_to_docx(novel_text, filename, language, session_id)
else:
return export_to_txt(novel_text, filename)
except Exception as e:
logger.error(f"ํŒŒ์ผ ์ƒ์„ฑ ์‹คํŒจ: {e}")
return None
def format_stages_display(stages: List[Dict]) -> str:
"""๋‹จ๊ณ„๋ณ„ ์ง„ํ–‰ ์ƒํ™ฉ ํ‘œ์‹œ - ๋‹จ์ผ ์ž‘๊ฐ€ ์‹œ์Šคํ…œ์šฉ"""
markdown = "## ๐ŸŽฌ ์ง„ํ–‰ ์ƒํ™ฉ\n\n"
# ์ด ๋‹จ์–ด ์ˆ˜ ๊ณ„์‚ฐ (์ž‘๊ฐ€ ์Šคํ…Œ์ด์ง€๋งŒ)
total_words = sum(s.get('word_count', 0) for s in stages
if s.get('name', '').startswith('โœ๏ธ ์ž‘๊ฐ€:') and '์ˆ˜์ •๋ณธ' in s.get('name', ''))
markdown += f"**์ด ๋‹จ์–ด ์ˆ˜: {total_words:,} / {TARGET_WORDS:,}**\n\n"
# ์ง„ํ–‰ ์ƒํ™ฉ ์š”์•ฝ
completed_parts = sum(1 for s in stages
if '์ˆ˜์ •๋ณธ' in s.get('name', '') and s.get('status') == 'complete')
markdown += f"**์™„์„ฑ๋œ ํŒŒํŠธ: {completed_parts} / 10**\n\n"
# ์„œ์‚ฌ ์ถ”์ง„๋ ฅ ํ‰๊ท 
momentum_scores = [s.get('momentum', 0) for s in stages if s.get('momentum', 0) > 0]
if momentum_scores:
avg_momentum = sum(momentum_scores) / len(momentum_scores)
markdown += f"**ํ‰๊ท  ์„œ์‚ฌ ์ถ”์ง„๋ ฅ: {avg_momentum:.1f} / 10**\n\n"
markdown += "---\n\n"
# ๊ฐ ์Šคํ…Œ์ด์ง€ ํ‘œ์‹œ
current_part = 0
for i, stage in enumerate(stages):
status_icon = "โœ…" if stage['status'] == 'complete' else "๐Ÿ”„" if stage['status'] == 'active' else "โณ"
# ํŒŒํŠธ ๊ตฌ๋ถ„์„  ์ถ”๊ฐ€
if 'ํŒŒํŠธ' in stage.get('name', '') and '๋น„ํ‰๊ฐ€' not in stage.get('name', ''):
part_match = re.search(r'ํŒŒํŠธ (\d+)', stage['name'])
if part_match:
new_part = int(part_match.group(1))
if new_part != current_part:
current_part = new_part
markdown += f"\n### ๐Ÿ“š ํŒŒํŠธ {current_part}\n\n"
markdown += f"{status_icon} **{stage['name']}**"
if stage.get('word_count', 0) > 0:
markdown += f" ({stage['word_count']:,}๋‹จ์–ด)"
if stage.get('momentum', 0) > 0:
markdown += f" [์ถ”์ง„๋ ฅ: {stage['momentum']:.1f}/10]"
markdown += "\n"
if stage['content'] and stage['status'] == 'complete':
# ๋ฏธ๋ฆฌ๋ณด๊ธฐ ๊ธธ์ด๋ฅผ ์—ญํ• ์— ๋”ฐ๋ผ ์กฐ์ •
preview_length = 300 if 'writer' in stage.get('name', '').lower() else 200
preview = stage['content'][:preview_length] + "..." if len(stage['content']) > preview_length else stage['content']
markdown += f"> {preview}\n\n"
elif stage['status'] == 'active':
markdown += "> *์ž‘์„ฑ ์ค‘...*\n\n"
return markdown
def format_novel_display(novel_text: str) -> str:
"""์†Œ์„ค ๋‚ด์šฉ ํ‘œ์‹œ - ํŒŒํŠธ๋ณ„ ๊ตฌ๋ถ„ ๊ฐ•ํ™”"""
if not novel_text:
return "์•„์ง ์™„์„ฑ๋œ ๋‚ด์šฉ์ด ์—†์Šต๋‹ˆ๋‹ค."
formatted = "# ๐Ÿ“– ์™„์„ฑ๋œ ์†Œ์„ค\n\n"
# ๋‹จ์–ด ์ˆ˜ ํ‘œ์‹œ
word_count = len(novel_text.split())
formatted += f"**์ด ๋ถ„๋Ÿ‰: {word_count:,}๋‹จ์–ด (๋ชฉํ‘œ: {TARGET_WORDS:,}๋‹จ์–ด)**\n\n"
# ๋‹ฌ์„ฑ๋ฅ 
achievement = (word_count / TARGET_WORDS) * 100
formatted += f"**๋‹ฌ์„ฑ๋ฅ : {achievement:.1f}%**\n\n"
formatted += "---\n\n"
# ๊ฐ ํŒŒํŠธ๋ฅผ ๊ตฌ๋ถ„ํ•˜์—ฌ ํ‘œ์‹œ
parts = novel_text.split('\n\n')
for i, part in enumerate(parts):
if part.strip():
# ํŒŒํŠธ ์ œ๋ชฉ ์ถ”๊ฐ€
if i < len(NARRATIVE_PHASES):
formatted += f"## {NARRATIVE_PHASES[i]}\n\n"
formatted += f"{part}\n\n"
# ํŒŒํŠธ ์‚ฌ์ด ๊ตฌ๋ถ„์„ 
if i < len(parts) - 1:
formatted += "---\n\n"
return formatted
def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str:
"""DOCX ํŒŒ์ผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ - ํ•œ๊ตญ ์‹ ๊ตญํŒ ๊ทœ๊ฒฉ"""
doc = Document()
# ํ•œ๊ตญ ์‹ ๊ตญํŒ ๊ทœ๊ฒฉ ์„ค์ • (152mm x 225mm)
section = doc.sections[0]
section.page_height = Mm(225) # 225mm
section.page_width = Mm(152) # 152mm
section.top_margin = Mm(20) # ์ƒ๋‹จ ์—ฌ๋ฐฑ 20mm
section.bottom_margin = Mm(20) # ํ•˜๋‹จ ์—ฌ๋ฐฑ 20mm
section.left_margin = Mm(20) # ์ขŒ์ธก ์—ฌ๋ฐฑ 20mm
section.right_margin = Mm(20) # ์šฐ์ธก ์—ฌ๋ฐฑ 20mm
# ์„ธ์…˜ ์ •๋ณด๋กœ๋ถ€ํ„ฐ ์ œ๋ชฉ ์ƒ์„ฑ
session = NovelDatabase.get_session(session_id)
# ์ œ๋ชฉ ์ƒ์„ฑ ํ•จ์ˆ˜
def generate_title(user_query: str, content_preview: str) -> str:
"""์ฃผ์ œ์™€ ๋‚ด์šฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์ œ๋ชฉ ์ƒ์„ฑ"""
# ๊ฐ„๋‹จํ•œ ๊ทœ์น™ ๊ธฐ๋ฐ˜ ์ œ๋ชฉ ์ƒ์„ฑ (์‹ค์ œ๋กœ๋Š” LLM ํ˜ธ์ถœ ๊ฐ€๋Šฅ)
if len(user_query) < 20:
return user_query
else:
# ์ฃผ์ œ์—์„œ ํ•ต์‹ฌ ํ‚ค์›Œ๋“œ ์ถ”์ถœ
keywords = user_query.split()[:5]
return " ".join(keywords)
# ์ œ๋ชฉ ํŽ˜์ด์ง€
title = generate_title(session["user_query"], content[:500]) if session else "๋ฌด์ œ"
# ์ œ๋ชฉ ์Šคํƒ€์ผ ์„ค์ •
title_para = doc.add_paragraph()
title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER
title_para.paragraph_format.space_before = Pt(100)
title_run = title_para.add_run(title)
title_run.font.name = '๋ฐ”ํƒ•'
title_run._element.rPr.rFonts.set(qn('w:eastAsia'), '๋ฐ”ํƒ•')
title_run.font.size = Pt(20)
title_run.bold = True
# ํŽ˜์ด์ง€ ๊ตฌ๋ถ„
doc.add_page_break()
# ๋ณธ๋ฌธ ์Šคํƒ€์ผ ์„ค์ •
style = doc.styles['Normal']
style.font.name = '๋ฐ”ํƒ•'
style._element.rPr.rFonts.set(qn('w:eastAsia'), '๋ฐ”ํƒ•')
style.font.size = Pt(10.5) # ํ•œ๊ตญ ์†Œ์„ค ํ‘œ์ค€ ํฌ๊ธฐ
style.paragraph_format.line_spacing = 1.8 # ํ–‰๊ฐ„ 180%
style.paragraph_format.space_after = Pt(0)
style.paragraph_format.first_line_indent = Mm(10) # ๋“ค์—ฌ์“ฐ๊ธฐ 10mm
# ๋ณธ๋ฌธ ๋‚ด์šฉ ์ •์ œ - ์ˆœ์ˆ˜ ํ…์ŠคํŠธ๋งŒ ์ถ”์ถœ
def clean_content(text: str) -> str:
"""๋ถˆํ•„์š”ํ•œ ๋งˆํฌ๋‹ค์šด, ํŒŒํŠธ ๋ฒˆํ˜ธ ๋“ฑ ์ œ๊ฑฐ"""
# ํŒŒํŠธ ์ œ๋ชฉ/๋ฒˆํ˜ธ ํŒจํ„ด ์ œ๊ฑฐ
patterns_to_remove = [
r'^#{1,6}\s+.*$', # ๋งˆํฌ๋‹ค์šด ํ—ค๋”
r'^\*\*.*\*\*$', # ๊ตต์€ ๊ธ€์”จ ๋ผ์ธ
r'^ํŒŒํŠธ\s*\d+.*$', # ํŒŒํŠธ ๋ฒˆํ˜ธ
r'^Part\s*\d+.*$', # Part ๋ฒˆํ˜ธ
r'^\d+\.\s+.*:.*$', # ๋ฒˆํ˜ธ๊ฐ€ ์žˆ๋Š” ์ œ๋ชฉ
r'^---+$', # ๊ตฌ๋ถ„์„ 
r'^\s*\[.*\]\s*$', # ๋Œ€๊ด„ํ˜ธ๋กœ ๋‘˜๋Ÿฌ์‹ธ์ธ ๋ผ๋ฒจ
]
lines = text.split('\n')
cleaned_lines = []
for line in lines:
# ๋นˆ ์ค„์€ ์œ ์ง€
if not line.strip():
cleaned_lines.append('')
continue
# ํŒจํ„ด ๋งค์นญ์œผ๋กœ ๋ถˆํ•„์š”ํ•œ ๋ผ์ธ ์ œ๊ฑฐ
skip_line = False
for pattern in patterns_to_remove:
if re.match(pattern, line.strip(), re.MULTILINE):
skip_line = True
break
if not skip_line:
# ๋งˆํฌ๋‹ค์šด ๊ฐ•์กฐ ํ‘œ์‹œ ์ œ๊ฑฐ
cleaned_line = line
cleaned_line = re.sub(r'\*\*(.*?)\*\*', r'\1', cleaned_line) # **text** -> text
cleaned_line = re.sub(r'\*(.*?)\*', r'\1', cleaned_line) # *text* -> text
cleaned_line = re.sub(r'`(.*?)`', r'\1', cleaned_line) # `text` -> text
cleaned_lines.append(cleaned_line.strip())
# ์—ฐ์†๋œ ๋นˆ ์ค„ ์ œ๊ฑฐ (์ตœ๋Œ€ 1๊ฐœ๋งŒ ์œ ์ง€)
final_lines = []
prev_empty = False
for line in cleaned_lines:
if not line:
if not prev_empty:
final_lines.append('')
prev_empty = True
else:
final_lines.append(line)
prev_empty = False
return '\n'.join(final_lines)
# ๋‚ด์šฉ ์ •์ œ
cleaned_content = clean_content(content)
# ๋ณธ๋ฌธ ์ถ”๊ฐ€
paragraphs = cleaned_content.split('\n')
for para_text in paragraphs:
if para_text.strip():
para = doc.add_paragraph(para_text.strip())
# ์Šคํƒ€์ผ ์žฌํ™•์ธ (ํ•œ๊ธ€ ํฐํŠธ ์ ์šฉ)
for run in para.runs:
run.font.name = '๋ฐ”ํƒ•'
run._element.rPr.rFonts.set(qn('w:eastAsia'), '๋ฐ”ํƒ•')
else:
# ๋ฌธ๋‹จ ๊ตฌ๋ถ„์„ ์œ„ํ•œ ๋นˆ ์ค„
doc.add_paragraph()
# ํŒŒ์ผ ์ €์žฅ
filepath = f"{filename}.docx"
doc.save(filepath)
return filepath
def export_to_txt(content: str, filename: str) -> str:
"""TXT ํŒŒ์ผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ"""
filepath = f"{filename}.txt"
with open(filepath, 'w', encoding='utf-8') as f:
# ํ—ค๋” ์ถ”๊ฐ€
f.write("=" * 80 + "\n")
f.write(f"์ƒ์„ฑ์ผ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
f.write(f"์ด ๋‹จ์–ด ์ˆ˜: {len(content.split()):,}๋‹จ์–ด\n")
f.write("=" * 80 + "\n\n")
# ๋ณธ๋ฌธ
f.write(content)
# ํ‘ธํ„ฐ
f.write("\n\n" + "=" * 80 + "\n")
f.write("AI ๋ฌธํ•™ ์ฐฝ์ž‘ ์‹œ์Šคํ…œ v2.0\n")
f.write("=" * 80 + "\n")
return filepath
# CSS ์Šคํƒ€์ผ
custom_css = """
.gradio-container {
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 50%, #0f3460 100%);
min-height: 100vh;
}
.main-header {
background-color: rgba(255, 255, 255, 0.05);
backdrop-filter: blur(20px);
padding: 40px;
border-radius: 20px;
margin-bottom: 30px;
text-align: center;
color: white;
border: 2px solid rgba(255, 255, 255, 0.1);
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
}
.progress-note {
background: linear-gradient(135deg, rgba(255, 107, 107, 0.1), rgba(255, 230, 109, 0.1));
border-left: 4px solid #ff6b6b;
padding: 20px;
margin: 25px auto;
border-radius: 10px;
color: #fff;
max-width: 800px;
font-weight: 500;
}
.input-section {
background-color: rgba(255, 255, 255, 0.08);
backdrop-filter: blur(15px);
padding: 25px;
border-radius: 15px;
margin-bottom: 25px;
border: 1px solid rgba(255, 255, 255, 0.1);
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.1);
}
.session-section {
background-color: rgba(255, 255, 255, 0.06);
backdrop-filter: blur(10px);
padding: 20px;
border-radius: 12px;
margin-top: 25px;
color: white;
border: 1px solid rgba(255, 255, 255, 0.08);
}
#stages-display {
background-color: rgba(255, 255, 255, 0.97);
padding: 25px;
border-radius: 15px;
max-height: 650px;
overflow-y: auto;
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.15);
color: #2c3e50;
}
#novel-output {
background-color: rgba(255, 255, 255, 0.97);
padding: 35px;
border-radius: 15px;
max-height: 750px;
overflow-y: auto;
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.15);
color: #2c3e50;
line-height: 1.8;
}
.download-section {
background-color: rgba(255, 255, 255, 0.92);
padding: 20px;
border-radius: 12px;
margin-top: 25px;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1);
}
/* ์ง„ํ–‰ ํ‘œ์‹œ๊ธฐ ๊ฐœ์„  */
.progress-bar {
background-color: #e0e0e0;
height: 25px;
border-radius: 12px;
overflow: hidden;
margin: 15px 0;
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.1);
}
.progress-fill {
background: linear-gradient(90deg, #4CAF50, #8BC34A);
height: 100%;
transition: width 0.5s ease;
box-shadow: 0 2px 8px rgba(76, 175, 80, 0.3);
}
/* ์Šคํฌ๋กค๋ฐ” ์Šคํƒ€์ผ */
::-webkit-scrollbar {
width: 10px;
}
::-webkit-scrollbar-track {
background: rgba(0, 0, 0, 0.1);
border-radius: 5px;
}
::-webkit-scrollbar-thumb {
background: rgba(0, 0, 0, 0.3);
border-radius: 5px;
}
::-webkit-scrollbar-thumb:hover {
background: rgba(0, 0, 0, 0.5);
}
/* ๋ฒ„ํŠผ ํ˜ธ๋ฒ„ ํšจ๊ณผ */
.gr-button:hover {
transform: translateY(-2px);
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2);
transition: all 0.3s ease;
}
"""
# Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ
def create_interface():
with gr.Blocks(css=custom_css, title="AI ๋‹จ์ผ ์ž‘๊ฐ€ ์žฅํŽธ์†Œ์„ค ์‹œ์Šคํ…œ v2.0") as interface:
gr.HTML("""
<div class="main-header">
<h1 style="font-size: 2.8em; margin-bottom: 15px; font-weight: 700;">
๐Ÿ“š AI ๋‹จ์ผ ์ž‘๊ฐ€ ์žฅํŽธ์†Œ์„ค ์‹œ์Šคํ…œ v2.0
</h1>
<h3 style="color: #e0e0e0; margin-bottom: 25px; font-weight: 400;">
ํ•˜๋‚˜์˜ ์ผ๊ด€๋œ ๋ชฉ์†Œ๋ฆฌ๋กœ ๋งŒ๋“œ๋Š” 8,000๋‹จ์–ด ํ†ตํ•ฉ ์„œ์‚ฌ
</h3>
<p style="font-size: 1.2em; color: #d0d0d0; max-width: 900px; margin: 0 auto; line-height: 1.6;">
๋‹จ์ผ ์ž‘๊ฐ€๊ฐ€ 10๊ฐœ ํŒŒํŠธ๋ฅผ ์ˆœ์ฐจ์ ์œผ๋กœ ์ง‘ํ•„ํ•˜๋ฉฐ, ๊ฐ ํŒŒํŠธ๋Š” ์ „๋‹ด ๋น„ํ‰๊ฐ€์˜ ์ฆ‰๊ฐ์  ํ”ผ๋“œ๋ฐฑ์„ ๋ฐ›์•„ ์ˆ˜์ •๋ฉ๋‹ˆ๋‹ค.
<br>
<strong>์ธ๊ณผ๊ด€๊ณ„์˜ ๋ช…ํ™•์„ฑ</strong>๊ณผ <strong>์„œ์‚ฌ์˜ ์œ ๊ธฐ์  ์ง„ํ–‰</strong>์„ ์ตœ์šฐ์„ ์œผ๋กœ ์ถ”๊ตฌํ•ฉ๋‹ˆ๋‹ค.
</p>
<div class="progress-note">
๐ŸŽฏ <strong>ํ•ต์‹ฌ ํ˜์‹ :</strong> ์—ฌ๋Ÿฌ ์ž‘๊ฐ€์˜ ํŒŒํŽธํ™”๋œ ํ…์ŠคํŠธ๊ฐ€ ์•„๋‹Œ,
ํ•œ ๋ช…์˜ ์ž‘๊ฐ€๊ฐ€ ์ฒ˜์Œ๋ถ€ํ„ฐ ๋๊นŒ์ง€ ์ผ๊ด€๋˜๊ฒŒ ์ง‘ํ•„ํ•˜๋Š” ์ง„์ •ํ•œ ์žฅํŽธ์†Œ์„ค
</div>
</div>
""")
# ์ƒํƒœ ๊ด€๋ฆฌ
current_session_id = gr.State(None)
with gr.Row():
with gr.Column(scale=1):
with gr.Group(elem_classes=["input-section"]):
query_input = gr.Textbox(
label="์†Œ์„ค ์ฃผ์ œ / Novel Theme",
placeholder="""์ค‘ํŽธ์†Œ์„ค์˜ ์ฃผ์ œ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”.
์˜ˆ: ์ธ๋ฌผ์˜ ๋‚ด์  ๋ณ€ํ™”, ๊ด€๊ณ„์˜ ๋ฐœ์ „, ์‚ฌํšŒ์  ๊ฐˆ๋“ฑ๊ณผ ๊ฐœ์ธ์˜ ์„ ํƒ...
Enter your novella theme.
Ex: Character transformation, relationship evolution, social conflict and personal choice...""",
lines=5
)
language_select = gr.Radio(
choices=["Korean", "English"],
value="Korean",
label="์–ธ์–ด / Language"
)
with gr.Row():
submit_btn = gr.Button("๐Ÿš€ ์ง‘ํ•„ ์‹œ์ž‘", variant="primary", scale=2)
clear_btn = gr.Button("๐Ÿ—‘๏ธ ์ดˆ๊ธฐํ™”", scale=1)
status_text = gr.Textbox(
label="์ง„ํ–‰ ์ƒํƒœ",
interactive=False,
value="๐Ÿ”„ ์ค€๋น„ ์™„๋ฃŒ"
)
# ์„ธ์…˜ ๊ด€๋ฆฌ
with gr.Group(elem_classes=["session-section"]):
gr.Markdown("### ๐Ÿ’พ ์ง„ํ–‰ ์ค‘์ธ ์ž‘ํ’ˆ")
session_dropdown = gr.Dropdown(
label="์ €์žฅ๋œ ์„ธ์…˜",
choices=[],
interactive=True
)
with gr.Row():
refresh_btn = gr.Button("๐Ÿ”„ ์ƒˆ๋กœ๊ณ ์นจ", scale=1)
resume_btn = gr.Button("โ–ถ๏ธ ์ด์–ด์“ฐ๊ธฐ", variant="secondary", scale=1)
auto_recover_btn = gr.Button("โ™ป๏ธ ์ตœ๊ทผ ์ž‘ํ’ˆ ๋ณต๊ตฌ", scale=1)
with gr.Column(scale=2):
with gr.Tab("๐Ÿ“ ์ง‘ํ•„ ๊ณผ์ •"):
stages_display = gr.Markdown(
value="์ง‘ํ•„ ๊ณผ์ •์ด ์‹ค์‹œ๊ฐ„์œผ๋กœ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค...",
elem_id="stages-display"
)
with gr.Tab("๐Ÿ“– ์™„์„ฑ ์ž‘ํ’ˆ"):
novel_output = gr.Markdown(
value="์™„์„ฑ๋œ ์†Œ์„ค์ด ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค...",
elem_id="novel-output"
)
with gr.Group(elem_classes=["download-section"]):
gr.Markdown("### ๐Ÿ“ฅ ์ž‘ํ’ˆ ๋‹ค์šด๋กœ๋“œ")
with gr.Row():
format_select = gr.Radio(
choices=["DOCX", "TXT"],
value="DOCX" if DOCX_AVAILABLE else "TXT",
label="ํŒŒ์ผ ํ˜•์‹"
)
download_btn = gr.Button("โฌ‡๏ธ ๋‹ค์šด๋กœ๋“œ", variant="secondary")
download_file = gr.File(
label="๋‹ค์šด๋กœ๋“œ ํŒŒ์ผ",
visible=False
)
# ์ˆจ๊ฒจ์ง„ ์ƒํƒœ
novel_text_state = gr.State("")
# ์˜ˆ์ œ
with gr.Row():
gr.Examples(
examples=[
["์‹ค์งํ•œ ์ค‘๋…„ ๋‚จ์„ฑ์ด ๋„์‹œ๋ฅผ ๋– ๋‚˜ ์‹œ๊ณจ์—์„œ ์ƒˆ๋กœ์šด ์‚ถ์˜ ์˜๋ฏธ๋ฅผ ์ฐพ์•„๊ฐ€๋Š” ๊ณผ์ •"],
["์ „์Ÿ ํŠธ๋ผ์šฐ๋งˆ๋ฅผ ๊ฐ€์ง„ ์˜์‚ฌ๊ฐ€ ๊ตญ๊ฒฝ์—†๋Š”์˜์‚ฌํšŒ ํ™œ๋™์„ ํ†ตํ•ด ์น˜์œ ๋˜๋Š” ์ด์•ผ๊ธฐ"],
["AI์—๊ฒŒ ์ผ์ž๋ฆฌ๋ฅผ ๋นผ์•—๊ธด ๋ฒˆ์—ญ๊ฐ€๊ฐ€ ๊ณ ์ „ ๋ฌธํ•™ ํ•„์‚ฌ๋ฅผ ํ†ตํ•ด ์–ธ์–ด์˜ ๋ณธ์งˆ์„ ์žฌ๋ฐœ๊ฒฌํ•˜๋Š” ์—ฌ์ •"],
["A daughter discovering her mother's hidden past through old letters"],
["An architect losing sight who learns to design through touch and sound"],
["์žฌ๊ฐœ๋ฐœ๋กœ ์‚ฌ๋ผ์งˆ ๋™๋„ค ์„œ์ ์„ ์ง€ํ‚ค๋ ค๋Š” ์ฃผ๋ฏผ๋“ค์˜ ์—ฐ๋Œ€"],
["๊ธฐ์–ต์„ ์žƒ์–ด๊ฐ€๋Š” ๋…ธ๊ต์ˆ˜์™€ ๊ทธ์˜ ๋งˆ์ง€๋ง‰ ์ œ์ž์˜ ์ผ ๋…„"]
],
inputs=query_input,
label="๐Ÿ’ก ์ฃผ์ œ ์˜ˆ์‹œ"
)
# ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
def refresh_sessions():
try:
sessions = get_active_sessions("Korean")
return gr.update(choices=sessions)
except Exception as e:
logger.error(f"์„ธ์…˜ ์ƒˆ๋กœ๊ณ ์นจ ์˜ค๋ฅ˜: {str(e)}")
return gr.update(choices=[])
def handle_auto_recover(language):
session_id, message = auto_recover_session(language)
return session_id, message
# ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
submit_btn.click(
fn=process_query,
inputs=[query_input, language_select, current_session_id],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
novel_output.change(
fn=lambda x: x,
inputs=[novel_output],
outputs=[novel_text_state]
)
resume_btn.click(
fn=lambda x: x.split("...")[0] if x and "..." in x else x,
inputs=[session_dropdown],
outputs=[current_session_id]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
auto_recover_btn.click(
fn=handle_auto_recover,
inputs=[language_select],
outputs=[current_session_id, status_text]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
refresh_btn.click(
fn=refresh_sessions,
outputs=[session_dropdown]
)
clear_btn.click(
fn=lambda: ("", "", "๐Ÿ”„ ์ค€๋น„ ์™„๋ฃŒ", "", None),
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id]
)
def handle_download(format_type, language, session_id, novel_text):
if not session_id or not novel_text:
return gr.update(visible=False)
file_path = download_novel(novel_text, format_type, language, session_id)
if file_path:
return gr.update(value=file_path, visible=True)
else:
return gr.update(visible=False)
download_btn.click(
fn=handle_download,
inputs=[format_select, language_select, current_session_id, novel_text_state],
outputs=[download_file]
)
# ์‹œ์ž‘ ์‹œ ์„ธ์…˜ ๋กœ๋“œ
interface.load(
fn=refresh_sessions,
outputs=[session_dropdown]
)
return interface
# ๋ฉ”์ธ ์‹คํ–‰
if __name__ == "__main__":
logger.info("AI ๋‹จ์ผ ์ž‘๊ฐ€ ์žฅํŽธ์†Œ์„ค ์‹œ์Šคํ…œ v2.0 ์‹œ์ž‘...")
logger.info("=" * 60)
# ํ™˜๊ฒฝ ํ™•์ธ
logger.info(f"API ์—”๋“œํฌ์ธํŠธ: {API_URL}")
logger.info(f"๋ชฉํ‘œ ๋ถ„๋Ÿ‰: {TARGET_WORDS:,}๋‹จ์–ด")
logger.info(f"ํŒŒํŠธ๋‹น ์ตœ์†Œ ๋ถ„๋Ÿ‰: {MIN_WORDS_PER_PART:,}๋‹จ์–ด")
logger.info("์‹œ์Šคํ…œ ํŠน์ง•: ๋‹จ์ผ ์ž‘๊ฐ€ + ํŒŒํŠธ๋ณ„ ์ฆ‰์‹œ ๋น„ํ‰")
if BRAVE_SEARCH_API_KEY:
logger.info("์›น ๊ฒ€์ƒ‰์ด ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
else:
logger.warning("์›น ๊ฒ€์ƒ‰์ด ๋น„ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
if DOCX_AVAILABLE:
logger.info("DOCX ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
else:
logger.warning("DOCX ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ๋น„ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
logger.info("=" * 60)
# ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™”
logger.info("๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™” ์ค‘...")
NovelDatabase.init_db()
logger.info("๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™” ์™„๋ฃŒ.")
# ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ ๋ฐ ์‹คํ–‰
interface = create_interface()
interface.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
debug=True
)