Spaces:
Running
Running
import gradio as gr | |
import os | |
import json | |
import requests | |
from datetime import datetime | |
import time | |
from typing import List, Dict, Any, Generator, Tuple, Optional, Set | |
import logging | |
import re | |
import tempfile | |
from pathlib import Path | |
import sqlite3 | |
import hashlib | |
import threading | |
from contextlib import contextmanager | |
from dataclasses import dataclass, field, asdict | |
from collections import defaultdict | |
# --- ๋ก๊น ์ค์ --- | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
logger = logging.getLogger(__name__) | |
# --- Document export imports --- | |
try: | |
from docx import Document | |
from docx.shared import Inches, Pt, RGBColor, Mm | |
from docx.enum.text import WD_ALIGN_PARAGRAPH | |
from docx.enum.style import WD_STYLE_TYPE | |
from docx.oxml.ns import qn | |
from docx.oxml import OxmlElement | |
DOCX_AVAILABLE = True | |
except ImportError: | |
DOCX_AVAILABLE = False | |
logger.warning("python-docx not installed. DOCX export will be disabled.") | |
# --- ํ๊ฒฝ ๋ณ์ ๋ฐ ์์ --- | |
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") | |
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") | |
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
MODEL_ID = "dep89a2fld32mcm" | |
DB_PATH = "novel_sessions_v6.db" | |
# ๋ชฉํ ๋ถ๋ ์ค์ | |
TARGET_WORDS = 8000 # ์์ ๋ง์ง์ ์ํด 8000๋จ์ด | |
MIN_WORDS_PER_PART = 800 # ๊ฐ ํํธ ์ต์ ๋ถ๋ | |
# --- ํ๊ฒฝ ๋ณ์ ๊ฒ์ฆ --- | |
if not FRIENDLI_TOKEN: | |
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") | |
FRIENDLI_TOKEN = "dummy_token_for_testing" | |
if not BRAVE_SEARCH_API_KEY: | |
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") | |
# --- ์ ์ญ ๋ณ์ --- | |
db_lock = threading.Lock() | |
# ์์ฌ ์งํ ๋จ๊ณ ์ ์ | |
NARRATIVE_PHASES = [ | |
"๋์ : ์ผ์๊ณผ ๊ท ์ด", | |
"๋ฐ์ 1: ๋ถ์์ ๊ณ ์กฐ", | |
"๋ฐ์ 2: ์ธ๋ถ ์ถฉ๊ฒฉ", | |
"๋ฐ์ 3: ๋ด์ ๊ฐ๋ฑ ์ฌํ", | |
"์ ์ 1: ์๊ธฐ์ ์ ์ ", | |
"์ ์ 2: ์ ํ์ ์๊ฐ", | |
"ํ๊ฐ 1: ๊ฒฐ๊ณผ์ ์ฌํ", | |
"ํ๊ฐ 2: ์๋ก์ด ์ธ์", | |
"๊ฒฐ๋ง 1: ๋ณํ๋ ์ผ์", | |
"๊ฒฐ๋ง 2: ์ด๋ฆฐ ์ง๋ฌธ" | |
] | |
# ์๋ก์ด ๋จ๊ณ๋ณ ๊ตฌ์ฑ - ๋จ์ผ ์๊ฐ ์์คํ | |
UNIFIED_STAGES = [ | |
("director", "๐ฌ ๊ฐ๋ ์: ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ ๊ธฐํ"), | |
("critic_director", "๐ ๋นํ๊ฐ: ์์ฌ ๊ตฌ์กฐ ์ฌ์ธต ๊ฒํ "), | |
("director", "๐ฌ ๊ฐ๋ ์: ์ต์ข ๋ง์คํฐํ๋"), | |
] + [ | |
item for i in range(1, 11) | |
for item in [ | |
("writer", f"โ๏ธ ์๊ฐ: ํํธ {i} - {NARRATIVE_PHASES[i-1]}"), | |
(f"critic_part{i}", f"๐ ํํธ {i} ๋นํ๊ฐ: ์ฆ์ ๊ฒํ ๋ฐ ์์ ์์ฒญ"), | |
("writer", f"โ๏ธ ์๊ฐ: ํํธ {i} ์์ ๋ณธ") | |
] | |
] + [ | |
("critic_final", "๐ ์ต์ข ๋นํ๊ฐ: ์ข ํฉ ํ๊ฐ ๋ฐ ๋ฌธํ์ ์ฑ์ทจ๋"), | |
] | |
# --- ๋ฐ์ดํฐ ํด๋์ค --- | |
class StoryBible: | |
"""์ ์ฒด ์ด์ผ๊ธฐ์ ์ผ๊ด์ฑ์ ์ ์งํ๋ ์คํ ๋ฆฌ ๋ฐ์ด๋ธ""" | |
characters: Dict[str, Dict[str, Any]] = field(default_factory=dict) | |
settings: Dict[str, str] = field(default_factory=dict) | |
timeline: List[Dict[str, Any]] = field(default_factory=list) | |
plot_points: List[Dict[str, Any]] = field(default_factory=list) | |
themes: List[str] = field(default_factory=list) | |
symbols: Dict[str, List[str]] = field(default_factory=dict) | |
style_guide: Dict[str, str] = field(default_factory=dict) | |
opening_sentence: str = "" # ์ฒซ๋ฌธ์ฅ ์ถ๊ฐ | |
class PartCritique: | |
"""๊ฐ ํํธ๋ณ ๋นํ ๋ด์ฉ""" | |
part_number: int | |
continuity_issues: List[str] = field(default_factory=list) | |
character_consistency: List[str] = field(default_factory=list) | |
plot_progression: List[str] = field(default_factory=list) | |
thematic_alignment: List[str] = field(default_factory=list) | |
technical_issues: List[str] = field(default_factory=list) | |
strengths: List[str] = field(default_factory=list) | |
required_changes: List[str] = field(default_factory=list) | |
literary_quality: List[str] = field(default_factory=list) # ๋ฌธํ์ฑ ํ๊ฐ ์ถ๊ฐ | |
# --- ํต์ฌ ๋ก์ง ํด๋์ค --- | |
class UnifiedNarrativeTracker: | |
"""๋จ์ผ ์๊ฐ ์์คํ ์ ์ํ ํตํฉ ์์ฌ ์ถ์ ๊ธฐ""" | |
def __init__(self): | |
self.story_bible = StoryBible() | |
self.part_critiques: Dict[int, PartCritique] = {} | |
self.accumulated_content: List[str] = [] | |
self.word_count_by_part: Dict[int, int] = {} | |
self.revision_history: Dict[int, List[str]] = defaultdict(list) | |
self.causal_chains: List[Dict[str, Any]] = [] | |
self.narrative_momentum: float = 0.0 | |
def update_story_bible(self, element_type: str, key: str, value: Any): | |
"""์คํ ๋ฆฌ ๋ฐ์ด๋ธ ์ ๋ฐ์ดํธ""" | |
if element_type == "character": | |
self.story_bible.characters[key] = value | |
elif element_type == "setting": | |
self.story_bible.settings[key] = value | |
elif element_type == "timeline": | |
self.story_bible.timeline.append({"event": key, "details": value}) | |
elif element_type == "theme": | |
if key not in self.story_bible.themes: | |
self.story_bible.themes.append(key) | |
elif element_type == "symbol": | |
if key not in self.story_bible.symbols: | |
self.story_bible.symbols[key] = [] | |
self.story_bible.symbols[key].append(value) | |
def add_part_critique(self, part_number: int, critique: PartCritique): | |
"""ํํธ๋ณ ๋นํ ์ถ๊ฐ""" | |
self.part_critiques[part_number] = critique | |
def check_continuity(self, current_part: int, new_content: str) -> List[str]: | |
"""์ฐ์์ฑ ๊ฒ์ฌ""" | |
issues = [] | |
# ์บ๋ฆญํฐ ์ผ๊ด์ฑ ์ฒดํฌ | |
for char_name, char_data in self.story_bible.characters.items(): | |
if char_name in new_content: | |
# ์บ๋ฆญํฐ ํน์ฑ์ด ์ ์ง๋๋์ง ํ์ธ | |
if "traits" in char_data: | |
for trait in char_data["traits"]: | |
if trait.get("abandoned", False): | |
issues.append(f"{char_name}์ ๋ฒ๋ ค์ง ํน์ฑ '{trait['name']}'์ด ๋ค์ ๋ํ๋จ") | |
# ์๊ฐ์ ์ผ๊ด์ฑ ์ฒดํฌ | |
if len(self.story_bible.timeline) > 0: | |
last_event = self.story_bible.timeline[-1] | |
# ์๊ฐ ์ญํ ์ฒดํฌ ๋ฑ | |
# ์ธ๊ณผ๊ด๊ณ ์ฒดํฌ | |
if current_part > 1 and not any(kw in new_content for kw in | |
['๋๋ฌธ์', '๊ทธ๋์', '๊ฒฐ๊ณผ', '์ด๋ก ์ธํด', 'because', 'therefore']): | |
issues.append("์ด์ ํํธ์์ ์ธ๊ณผ๊ด๊ณ๊ฐ ๋ถ๋ช ํํจ") | |
return issues | |
def calculate_narrative_momentum(self, part_number: int, content: str) -> float: | |
"""์์ฌ์ ์ถ์ง๋ ฅ ๊ณ์ฐ""" | |
momentum = 5.0 | |
# ์๋ก์ด ์์ ๋์ | |
new_elements = len(set(content.split()) - set(' '.join(self.accumulated_content).split())) | |
if new_elements > 100: | |
momentum += 2.0 | |
# ๊ฐ๋ฑ์ ๊ณ ์กฐ | |
tension_words = ['์๊ธฐ', '๊ฐ๋ฑ', '์ถฉ๋', '๋๋ฆฝ', 'crisis', 'conflict'] | |
if any(word in content for word in tension_words): | |
momentum += 1.5 | |
# ์ธ๊ณผ๊ด๊ณ ๋ช ํ์ฑ | |
causal_words = ['๋๋ฌธ์', '๊ทธ๋์', '๋ฐ๋ผ์', 'because', 'therefore'] | |
causal_count = sum(1 for word in causal_words if word in content) | |
momentum += min(causal_count * 0.5, 2.0) | |
# ๋ฐ๋ณต ํ๋ํฐ | |
if part_number > 1: | |
prev_content = self.accumulated_content[-1] if self.accumulated_content else "" | |
overlap = len(set(content.split()) & set(prev_content.split())) | |
if overlap > len(content.split()) * 0.3: | |
momentum -= 3.0 | |
return max(0.0, min(10.0, momentum)) | |
class NovelDatabase: | |
"""๋ฐ์ดํฐ๋ฒ ์ด์ค ๊ด๋ฆฌ - ๋จ์ผ ์๊ฐ ์์คํ ์ฉ์ผ๋ก ์์ """ | |
def init_db(): | |
with sqlite3.connect(DB_PATH) as conn: | |
conn.execute("PRAGMA journal_mode=WAL") | |
cursor = conn.cursor() | |
# ๋ฉ์ธ ์ธ์ ํ ์ด๋ธ | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS sessions ( | |
session_id TEXT PRIMARY KEY, | |
user_query TEXT NOT NULL, | |
language TEXT NOT NULL, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
status TEXT DEFAULT 'active', | |
current_stage INTEGER DEFAULT 0, | |
final_novel TEXT, | |
literary_report TEXT, | |
total_words INTEGER DEFAULT 0, | |
story_bible TEXT, | |
narrative_tracker TEXT, | |
opening_sentence TEXT | |
) | |
''') | |
# ์คํ ์ด์ง ํ ์ด๋ธ | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS stages ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
stage_number INTEGER NOT NULL, | |
stage_name TEXT NOT NULL, | |
role TEXT NOT NULL, | |
content TEXT, | |
word_count INTEGER DEFAULT 0, | |
status TEXT DEFAULT 'pending', | |
narrative_momentum REAL DEFAULT 0.0, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
UNIQUE(session_id, stage_number) | |
) | |
''') | |
# ๋นํ ํ ์ด๋ธ | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS critiques ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
part_number INTEGER NOT NULL, | |
critique_data TEXT, | |
created_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
) | |
''') | |
conn.commit() | |
def get_db(): | |
with db_lock: | |
conn = sqlite3.connect(DB_PATH, timeout=30.0) | |
conn.row_factory = sqlite3.Row | |
try: | |
yield conn | |
finally: | |
conn.close() | |
def create_session(user_query: str, language: str) -> str: | |
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', | |
(session_id, user_query, language) | |
) | |
conn.commit() | |
return session_id | |
def save_stage(session_id: str, stage_number: int, stage_name: str, | |
role: str, content: str, status: str = 'complete', | |
narrative_momentum: float = 0.0): | |
word_count = len(content.split()) if content else 0 | |
with NovelDatabase.get_db() as conn: | |
cursor = conn.cursor() | |
cursor.execute(''' | |
INSERT INTO stages (session_id, stage_number, stage_name, role, content, | |
word_count, status, narrative_momentum) | |
VALUES (?, ?, ?, ?, ?, ?, ?, ?) | |
ON CONFLICT(session_id, stage_number) | |
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, | |
narrative_momentum=?, updated_at=datetime('now') | |
''', (session_id, stage_number, stage_name, role, content, word_count, | |
status, narrative_momentum, content, word_count, status, stage_name, | |
narrative_momentum)) | |
# ์ด ๋จ์ด ์ ์ ๋ฐ์ดํธ | |
cursor.execute(''' | |
UPDATE sessions | |
SET total_words = ( | |
SELECT SUM(word_count) | |
FROM stages | |
WHERE session_id = ? AND role = 'writer' AND content IS NOT NULL | |
), | |
updated_at = datetime('now'), | |
current_stage = ? | |
WHERE session_id = ? | |
''', (session_id, stage_number, session_id)) | |
conn.commit() | |
def save_critique(session_id: str, part_number: int, critique: PartCritique): | |
"""๋นํ ์ ์ฅ""" | |
with NovelDatabase.get_db() as conn: | |
critique_json = json.dumps(asdict(critique)) | |
conn.cursor().execute( | |
'INSERT INTO critiques (session_id, part_number, critique_data) VALUES (?, ?, ?)', | |
(session_id, part_number, critique_json) | |
) | |
conn.commit() | |
def save_opening_sentence(session_id: str, opening_sentence: str): | |
"""์ฒซ๋ฌธ์ฅ ์ ์ฅ""" | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'UPDATE sessions SET opening_sentence = ? WHERE session_id = ?', | |
(opening_sentence, session_id) | |
) | |
conn.commit() | |
def get_writer_content(session_id: str) -> str: | |
"""์๊ฐ ์ฝํ ์ธ ๊ฐ์ ธ์ค๊ธฐ - ๋ชจ๋ ์์ ๋ณธ ํตํฉ""" | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute(''' | |
SELECT content FROM stages | |
WHERE session_id = ? AND role = 'writer' | |
AND stage_name LIKE '%์์ ๋ณธ%' | |
ORDER BY stage_number | |
''', (session_id,)).fetchall() | |
if rows: | |
return '\n\n'.join(row['content'] for row in rows if row['content']) | |
else: | |
# ์์ ๋ณธ์ด ์์ผ๋ฉด ์ด์๋ค์ ์ฌ์ฉ | |
rows = conn.cursor().execute(''' | |
SELECT content FROM stages | |
WHERE session_id = ? AND role = 'writer' | |
AND stage_name NOT LIKE '%์์ ๋ณธ%' | |
ORDER BY stage_number | |
''', (session_id,)).fetchall() | |
return '\n\n'.join(row['content'] for row in rows if row['content']) | |
def save_narrative_tracker(session_id: str, tracker: UnifiedNarrativeTracker): | |
"""ํตํฉ ์์ฌ ์ถ์ ๊ธฐ ์ ์ฅ""" | |
with NovelDatabase.get_db() as conn: | |
tracker_data = json.dumps({ | |
'story_bible': asdict(tracker.story_bible), | |
'part_critiques': {k: asdict(v) for k, v in tracker.part_critiques.items()}, | |
'word_count_by_part': tracker.word_count_by_part, | |
'causal_chains': tracker.causal_chains, | |
'narrative_momentum': tracker.narrative_momentum | |
}) | |
conn.cursor().execute( | |
'UPDATE sessions SET narrative_tracker = ? WHERE session_id = ?', | |
(tracker_data, session_id) | |
) | |
conn.commit() | |
def load_narrative_tracker(session_id: str) -> Optional[UnifiedNarrativeTracker]: | |
"""ํตํฉ ์์ฌ ์ถ์ ๊ธฐ ๋ก๋""" | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute( | |
'SELECT narrative_tracker FROM sessions WHERE session_id = ?', | |
(session_id,) | |
).fetchone() | |
if row and row['narrative_tracker']: | |
data = json.loads(row['narrative_tracker']) | |
tracker = UnifiedNarrativeTracker() | |
# ์คํ ๋ฆฌ ๋ฐ์ด๋ธ ๋ณต์ | |
bible_data = data.get('story_bible', {}) | |
tracker.story_bible = StoryBible(**bible_data) | |
# ๋นํ ๋ณต์ | |
for part_num, critique_data in data.get('part_critiques', {}).items(): | |
tracker.part_critiques[int(part_num)] = PartCritique(**critique_data) | |
tracker.word_count_by_part = data.get('word_count_by_part', {}) | |
tracker.causal_chains = data.get('causal_chains', []) | |
tracker.narrative_momentum = data.get('narrative_momentum', 0.0) | |
return tracker | |
return None | |
# ๊ธฐ์กด ๋ฉ์๋๋ค ์ ์ง | |
def get_session(session_id: str) -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', | |
(session_id,)).fetchone() | |
return dict(row) if row else None | |
def get_stages(session_id: str) -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute( | |
'SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', | |
(session_id,) | |
).fetchall() | |
return [dict(row) for row in rows] | |
def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""): | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'''UPDATE sessions SET final_novel = ?, status = 'complete', | |
updated_at = datetime('now'), literary_report = ? WHERE session_id = ?''', | |
(final_novel, literary_report, session_id) | |
) | |
conn.commit() | |
def get_active_sessions() -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute( | |
'''SELECT session_id, user_query, language, created_at, current_stage, total_words | |
FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10''' | |
).fetchall() | |
return [dict(row) for row in rows] | |
def get_total_words(session_id: str) -> int: | |
"""์ด ๋จ์ด ์ ๊ฐ์ ธ์ค๊ธฐ""" | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute( | |
'SELECT total_words FROM sessions WHERE session_id = ?', | |
(session_id,) | |
).fetchone() | |
return row['total_words'] if row and row['total_words'] else 0 | |
class WebSearchIntegration: | |
"""์น ๊ฒ์ ๊ธฐ๋ฅ""" | |
def __init__(self): | |
self.brave_api_key = BRAVE_SEARCH_API_KEY | |
self.search_url = "https://api.search.brave.com/res/v1/web/search" | |
self.enabled = bool(self.brave_api_key) | |
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: | |
if not self.enabled: | |
return [] | |
headers = { | |
"Accept": "application/json", | |
"X-Subscription-Token": self.brave_api_key | |
} | |
params = { | |
"q": query, | |
"count": count, | |
"search_lang": "ko" if language == "Korean" else "en", | |
"text_decorations": False, | |
"safesearch": "moderate" | |
} | |
try: | |
response = requests.get(self.search_url, headers=headers, params=params, timeout=10) | |
response.raise_for_status() | |
results = response.json().get("web", {}).get("results", []) | |
return results | |
except requests.exceptions.RequestException as e: | |
logger.error(f"์น ๊ฒ์ API ์ค๋ฅ: {e}") | |
return [] | |
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: | |
if not results: | |
return "" | |
extracted = [] | |
total_chars = 0 | |
for i, result in enumerate(results[:3], 1): | |
title = result.get("title", "") | |
description = result.get("description", "") | |
info = f"[{i}] {title}: {description}" | |
if total_chars + len(info) < max_chars: | |
extracted.append(info) | |
total_chars += len(info) | |
else: | |
break | |
return "\n".join(extracted) | |
class UnifiedLiterarySystem: | |
"""๋จ์ผ ์๊ฐ ์งํํ ๋ฌธํ ์์ค ์์ฑ ์์คํ """ | |
def __init__(self): | |
self.token = FRIENDLI_TOKEN | |
self.api_url = API_URL | |
self.model_id = MODEL_ID | |
self.narrative_tracker = UnifiedNarrativeTracker() | |
self.web_search = WebSearchIntegration() | |
self.current_session_id = None | |
NovelDatabase.init_db() | |
def create_headers(self): | |
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} | |
# --- ํ๋กฌํํธ ์์ฑ ํจ์๋ค --- | |
def augment_query(self, user_query: str, language: str) -> str: | |
"""ํ๋กฌํํธ ์ฆ๊ฐ""" | |
if len(user_query.split()) < 15: | |
augmented_template = { | |
"Korean": f"""'{user_query}' | |
**์์ฌ ๊ตฌ์กฐ ํต์ฌ:** | |
- 10๊ฐ ํํธ๊ฐ ํ๋์ ํตํฉ๋ ์ด์ผ๊ธฐ๋ฅผ ๊ตฌ์ฑ | |
- ๊ฐ ํํธ๋ ์ด์ ํํธ์ ํ์ฐ์ ๊ฒฐ๊ณผ | |
- ์ธ๋ฌผ์ ๋ช ํํ ๋ณํ ๊ถค์ (A โ B โ C) | |
- ์ค์ฌ ๊ฐ๋ฑ์ ์ ์ง์ ๊ณ ์กฐ์ ํด๊ฒฐ | |
- ๊ฐ๋ ฌํ ์ค์ฌ ์์ง์ ์๋ฏธ ๋ณํ""", | |
"English": f"""'{user_query}' | |
**Narrative Structure Core:** | |
- 10 parts forming one integrated story | |
- Each part as inevitable result of previous | |
- Clear character transformation arc (A โ B โ C) | |
- Progressive escalation and resolution of central conflict | |
- Evolving meaning of powerful central symbol""" | |
} | |
return augmented_template.get(language, user_query) | |
return user_query | |
def generate_powerful_opening(self, user_query: str, language: str) -> str: | |
"""์ฃผ์ ์ ๋ง๋ ๊ฐ๋ ฌํ ์ฒซ๋ฌธ์ฅ ์์ฑ""" | |
opening_prompt = { | |
"Korean": f"""์ฃผ์ : {user_query} | |
์ด ์ฃผ์ ์ ๋ํ ๊ฐ๋ ฌํ๊ณ ์์ ์ ์๋ ์ฒซ๋ฌธ์ฅ์ ์์ฑํ์ธ์. | |
**์ฒซ๋ฌธ์ฅ ์์ฑ ์์น:** | |
1. ์ฆ๊ฐ์ ์ธ ๊ธด์ฅ๊ฐ์ด๋ ๊ถ๊ธ์ฆ ์ ๋ฐ | |
2. ํ๋ฒํ์ง ์์ ์๊ฐ์ด๋ ์ํฉ ์ ์ | |
3. ๊ฐ๊ฐ์ ์ด๊ณ ๊ตฌ์ฒด์ ์ธ ์ด๋ฏธ์ง | |
4. ์ฒ ํ์ ์ง๋ฌธ์ด๋ ์ญ์ค์ ์ง์ | |
5. ์๊ฐ๊ณผ ๊ณต๊ฐ์ ๋ ํนํ ์ค์ | |
**ํ๋ฅญํ ์ฒซ๋ฌธ์ฅ์ ์์ ํจํด:** | |
- "๊ทธ๊ฐ ์ฃฝ์ ๋ , ..." (์ถฉ๊ฒฉ์ ์ฌ๊ฑด) | |
- "๋ชจ๋ ๊ฒ์ด ๋๋ฌ๋ค๊ณ ์๊ฐํ ์๊ฐ..." (๋ฐ์ ์๊ณ ) | |
- "์ธ์์์ ๊ฐ์ฅ [ํ์ฉ์ฌ]ํ [๋ช ์ฌ]๋..." (๋ ํนํ ์ ์) | |
- "[๊ตฌ์ฒด์ ํ๋]ํ๋ ๊ฒ๋ง์ผ๋ก๋..." (์ผ์์ ์ฌํด์) | |
๋จ ํ๋์ ๋ฌธ์ฅ๋ง ์ ์ํ์ธ์.""", | |
"English": f"""Theme: {user_query} | |
Generate an unforgettable opening sentence for this theme. | |
**Opening Sentence Principles:** | |
1. Immediate tension or curiosity | |
2. Unusual perspective or situation | |
3. Sensory and specific imagery | |
4. Philosophical question or paradox | |
5. Unique temporal/spatial setting | |
**Great Opening Patterns:** | |
- "The day he died, ..." (shocking event) | |
- "At the moment everything seemed over..." (reversal hint) | |
- "The most [adjective] [noun] in the world..." (unique definition) | |
- "Just by [specific action]..." (reinterpretation of ordinary) | |
Provide only one sentence.""" | |
} | |
messages = [{"role": "user", "content": opening_prompt.get(language, opening_prompt["Korean"])}] | |
opening = self.call_llm_sync(messages, "writer", language) | |
return opening.strip() | |
def create_director_initial_prompt(self, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์ด๊ธฐ ๊ธฐํ - ๊ฐํ๋ ๋ฒ์ """ | |
augmented_query = self.augment_query(user_query, language) | |
# ์ฒซ๋ฌธ์ฅ ์์ฑ | |
opening_sentence = self.generate_powerful_opening(user_query, language) | |
self.narrative_tracker.story_bible.opening_sentence = opening_sentence | |
if self.current_session_id: | |
NovelDatabase.save_opening_sentence(self.current_session_id, opening_sentence) | |
search_results_str = "" | |
if self.web_search.enabled: | |
short_query = user_query[:50] if len(user_query) > 50 else user_query | |
queries = [ | |
f"{short_query} ์ฒ ํ์ ์๋ฏธ", | |
f"์ธ๊ฐ ์กด์ฌ ์๋ฏธ {short_query}", | |
f"{short_query} ๋ฌธํ ์ํ" | |
] | |
for q in queries[:2]: | |
try: | |
results = self.web_search.search(q, count=2, language=language) | |
if results: | |
search_results_str += self.web_search.extract_relevant_info(results) + "\n" | |
except Exception as e: | |
logger.warning(f"๊ฒ์ ์คํจ: {str(e)}") | |
lang_prompts = { | |
"Korean": f"""๋ ธ๋ฒจ๋ฌธํ์ ์์ค์ ์ฒ ํ์ ๊น์ด๋ฅผ ์ง๋ ์คํธ์์ค(8,000๋จ์ด)์ ๊ธฐํํ์ธ์. | |
**์ฃผ์ :** {augmented_query} | |
**ํ์ ์ฒซ๋ฌธ์ฅ:** {opening_sentence} | |
**์ฐธ๊ณ ์๋ฃ:** | |
{search_results_str if search_results_str else "N/A"} | |
**ํ์ ๋ฌธํ์ ์์:** | |
1. **์ฒ ํ์ ํ๊ตฌ** | |
- ํ๋์ธ์ ์ค์กด์ ๊ณ ๋ (์์ธ, ์ ์ฒด์ฑ, ์๋ฏธ ์์ค) | |
- ๋์งํธ ์๋์ ์ธ๊ฐ ์กฐ๊ฑด | |
- ์๋ณธ์ฃผ์ ์ฌํ์ ๋ชจ์๊ณผ ๊ฐ์ธ์ ์ ํ | |
- ์ฃฝ์, ์ฌ๋, ์์ ์ ๋ํ ์๋ก์ด ์ฑ์ฐฐ | |
2. **์ฌํ์ ๋ฉ์์ง** | |
- ๊ณ๊ธ, ์ ๋, ์ธ๋ ๊ฐ ๊ฐ๋ฑ | |
- ํ๊ฒฝ ์๊ธฐ์ ์ธ๊ฐ์ ์ฑ ์ | |
- ๊ธฐ์ ๋ฐ์ ๊ณผ ์ธ๊ฐ์ฑ์ ์ถฉ๋ | |
- ํ๋ ๋ฏผ์ฃผ์ฃผ์์ ์๊ธฐ์ ๊ฐ์ธ์ ์ญํ | |
3. **๋ฌธํ์ ์์ฌ ์ฅ์น** | |
- ์ค์ฌ ์์ : [๊ตฌ์ฒด์ ์ฌ๋ฌผ/ํ์] โ [์ถ์์ ์๋ฏธ] | |
- ๋ฐ๋ณต๋๋ ๋ชจํฐํ: [์ด๋ฏธ์ง/ํ๋] (์ต์ 5ํ ๋ณ์ฃผ) | |
- ๋์กฐ๋ฒ: [A vs B]์ ์ง์์ ๊ธด์ฅ | |
- ์์ง์ ๊ณต๊ฐ: [๊ตฌ์ฒด์ ์ฅ์]๊ฐ ์๋ฏธํ๋ ๊ฒ | |
- ์๊ฐ์ ์ฃผ๊ด์ ํ๋ฆ (ํ์, ์๊ฐ, ์ ์ง) | |
4. **ํตํฉ๋ 10ํํธ ๊ตฌ์กฐ** | |
๊ฐ ํํธ๋ณ ํต์ฌ: | |
- ํํธ 1: ์ฒซ๋ฌธ์ฅ์ผ๋ก ์์, ์ผ์ ์ ๊ท ์ด โ ์ฒ ํ์ ์ง๋ฌธ ์ ๊ธฐ | |
- ํํธ 2-3: ์ธ๋ถ ์ฌ๊ฑด โ ๋ด์ ์ฑ์ฐฐ ์ฌํ | |
- ํํธ 4-5: ์ฌํ์ ๊ฐ๋ฑ โ ๊ฐ์ธ์ ๋๋ ๋ง | |
- ํํธ 6-7: ์๊ธฐ์ ์ ์ โ ์ค์กด์ ์ ํ | |
- ํํธ 8-9: ์ ํ์ ๊ฒฐ๊ณผ โ ์๋ก์ด ์ธ์ | |
- ํํธ 10: ๋ณํ๋ ์ธ๊ณ๊ด โ ์ด๋ฆฐ ์ง๋ฌธ | |
5. **๋ฌธ์ฒด ์ง์นจ** | |
- ์์ ์ฐ๋ฌธ์ฒด: ์ผ์ ์ธ์ด์ ์์ ์ ๊ท ํ | |
- ์์์ ํ๋ฆ๊ณผ ๊ฐ๊ด์ ๋ฌ์ฌ์ ๊ต์ฐจ | |
- ์งง๊ณ ๊ฐ๋ ฌํ ๋ฌธ์ฅ๊ณผ ์ฑ์ฐฐ์ ๊ธด ๋ฌธ์ฅ์ ๋ฆฌ๋ฌ | |
- ๊ฐ๊ฐ์ ๋ํ ์ผ๋ก ์ถ์์ ๊ฐ๋ ๊ตฌํ | |
๊ตฌ์ฒด์ ์ด๊ณ ํ์ ์ ์ธ ๊ณํ์ ์ ์ํ์ธ์.""", | |
"English": f"""Plan a philosophically profound novella (8,000 words) worthy of Nobel Prize. | |
**Theme:** {augmented_query} | |
**Required Opening:** {opening_sentence} | |
**Reference:** | |
{search_results_str if search_results_str else "N/A"} | |
**Essential Literary Elements:** | |
1. **Philosophical Exploration** | |
- Modern existential anguish (alienation, identity, loss of meaning) | |
- Human condition in digital age | |
- Capitalist contradictions and individual choice | |
- New reflections on death, love, freedom | |
2. **Social Message** | |
- Class, gender, generational conflicts | |
- Environmental crisis and human responsibility | |
- Technology vs humanity collision | |
- Modern democracy crisis and individual role | |
3. **Literary Devices** | |
- Central metaphor: [concrete object/phenomenon] โ [abstract meaning] | |
- Recurring motif: [image/action] (minimum 5 variations) | |
- Contrast: sustained tension of [A vs B] | |
- Symbolic space: what [specific place] means | |
- Subjective time flow (flashback, premonition, pause) | |
4. **Integrated 10-Part Structure** | |
[Details as above] | |
5. **Style Guidelines** | |
- Poetic prose: balance of everyday language and metaphor | |
- Stream of consciousness crossing with objective description | |
- Rhythm of short intense sentences and reflective long ones | |
- Abstract concepts through sensory details | |
Provide concrete, innovative plan.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ๊ธฐํ ์ฌ์ธต ๊ฒํ - ๊ฐํ๋ ๋ฒ์ """ | |
lang_prompts = { | |
"Korean": f"""์์ฌ ๊ตฌ์กฐ ์ ๋ฌธ๊ฐ๋ก์ ์ด ๊ธฐํ์ ์ฌ์ธต ๋ถ์ํ์ธ์. | |
**์ ์ฃผ์ :** {user_query} | |
**๊ฐ๋ ์ ๊ธฐํ:** | |
{director_plan} | |
**์ฌ์ธต ๊ฒํ ํญ๋ชฉ:** | |
1. **์ธ๊ณผ๊ด๊ณ ๊ฒ์ฆ** | |
๊ฐ ํํธ ๊ฐ ์ฐ๊ฒฐ์ ๊ฒํ ํ๊ณ ๋ ผ๋ฆฌ์ ๋น์ฝ์ ์ฐพ์ผ์ธ์: | |
- ํํธ 1โ2: [์ฐ๊ฒฐ์ฑ ํ๊ฐ] | |
- ํํธ 2โ3: [์ฐ๊ฒฐ์ฑ ํ๊ฐ] | |
(๋ชจ๋ ์ฐ๊ฒฐ ์ง์ ๊ฒํ ) | |
2. **์ฒ ํ์ ๊น์ด ํ๊ฐ** | |
- ์ ์๋ ์ฒ ํ์ ์ฃผ์ ๊ฐ ์ถฉ๋ถํ ๊น์๊ฐ? | |
- ํ๋์ ๊ด๋ จ์ฑ์ด ์๋๊ฐ? | |
- ๋ ์ฐฝ์ ํต์ฐฐ์ด ์๋๊ฐ? | |
3. **๋ฌธํ์ ์ฅ์น์ ํจ๊ณผ์ฑ** | |
- ์์ ์ ์์ง์ด ์ ๊ธฐ์ ์ผ๋ก ์๋ํ๋๊ฐ? | |
- ๊ณผ๋ํ๊ฑฐ๋ ๋ถ์กฑํ์ง ์์๊ฐ? | |
- ์ฃผ์ ์ ๊ธด๋ฐํ ์ฐ๊ฒฐ๋๋๊ฐ? | |
4. **์บ๋ฆญํฐ ์ํฌ ์คํ ๊ฐ๋ฅ์ฑ** | |
- ๋ณํ๊ฐ ์ถฉ๋ถํ ์ ์ง์ ์ธ๊ฐ? | |
- ๊ฐ ๋จ๊ณ์ ๋๊ธฐ๊ฐ ๋ช ํํ๊ฐ? | |
- ์ฌ๋ฆฌ์ ์ ๋ขฐ์ฑ์ด ์๋๊ฐ? | |
5. **8,000๋จ์ด ์คํ ๊ฐ๋ฅ์ฑ** | |
- ๊ฐ ํํธ๊ฐ 800๋จ์ด๋ฅผ ์ ์งํ ์ ์๋๊ฐ? | |
- ๋์ด์ง๊ฑฐ๋ ์์ถ๋๋ ๋ถ๋ถ์ ์๋๊ฐ? | |
**ํ์ ๊ฐ์ ์ฌํญ์ ๊ตฌ์ฒด์ ์ผ๋ก ์ ์ํ์ธ์.**""", | |
"English": f"""As narrative structure expert, deeply analyze this plan. | |
**Original Theme:** {user_query} | |
**Director's Plan:** | |
{director_plan} | |
**Deep Review Items:** | |
1. **Causality Verification** | |
Review connections between parts, find logical leaps: | |
- Part 1โ2: [Connection assessment] | |
- Part 2โ3: [Connection assessment] | |
(Review all connection points) | |
2. **Philosophical Depth Assessment** | |
- Is philosophical theme deep enough? | |
- Contemporary relevance? | |
- Original insights? | |
3. **Literary Device Effectiveness** | |
- Do metaphors and symbols work organically? | |
- Not excessive or insufficient? | |
- Tightly connected to theme? | |
4. **Character Arc Feasibility** | |
- Is change sufficiently gradual? | |
- Are motivations clear at each stage? | |
- Psychological credibility? | |
5. **8,000-word Feasibility** | |
- Can each part sustain 800 words? | |
- Any dragging or compressed sections? | |
**Provide specific required improvements.**""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_writer_prompt(self, part_number: int, master_plan: str, | |
accumulated_content: str, story_bible: StoryBible, | |
language: str) -> str: | |
"""๋จ์ผ ์๊ฐ ํ๋กฌํํธ - ๊ฐํ๋ ๋ฒ์ """ | |
phase_name = NARRATIVE_PHASES[part_number-1] | |
target_words = MIN_WORDS_PER_PART | |
# ํํธ๋ณ ํน๋ณ ์ง์นจ | |
philosophical_focus = { | |
1: "์ผ์์ ๊ท ์ด์ ํตํด ์ค์กด์ ๋ถ์ ๋์ ", | |
2: "๊ฐ์ธ๊ณผ ์ฌํ์ ์ฒซ ์ถฉ๋", | |
3: "ํ์์์ ๋ง๋จ์ ํตํ ์์ ์ธ์", | |
4: "์ ๋ ์ ํ๋ค๋ฆผ๊ณผ ๊ฐ์น๊ด์ ์ถฉ๋", | |
5: "์ ํ์ ๋ฌด๊ฒ์ ์์ ์ ์ญ์ค", | |
6: "๊ทนํ ์ํฉ์์์ ์ธ๊ฐ์ฑ ์ํ", | |
7: "ํ๋์ ๊ฒฐ๊ณผ์ ์ฑ ์์ ๋ฌด๊ฒ", | |
8: "ํ์์ ์์ ์ ํตํ ์๊ธฐ ์ฌ๋ฐ๊ฒฌ", | |
9: "ํํด ๋ถ๊ฐ๋ฅํ ๊ฒ๊ณผ์ ํํด", | |
10: "์๋ก์ด ์ถ์ ๊ฐ๋ฅ์ฑ๊ณผ ๋ฏธํด๊ฒฐ ์ง๋ฌธ" | |
} | |
literary_techniques = { | |
1: "๊ฐ๊ด์ ์๊ด๋ฌผ ๋์ ", | |
2: "๋์๋ฒ์ ์์ ", | |
3: "์์์ ํ๋ฆ", | |
4: "์์ ์ ๋ฏธ๋ฌํ ์ ํ", | |
5: "์นจ๋ฌต๊ณผ ์๋ต์ ๋ฏธํ", | |
6: "์๊ฐ์ ์ฃผ๊ด์ ๋ณํ", | |
7: "๋ณต์ ์์ ์ ๊ต์ฐจ", | |
8: "๋ฉํํฌ์ ์ ๋ณต", | |
9: "์ํ์ ์ด๋ฏธ์ง์ ์ฌํด์", | |
10: "์ด๋ฆฐ ๊ฒฐ๋ง์ ๋ค์ธต์ฑ" | |
} | |
# ์คํ ๋ฆฌ ๋ฐ์ด๋ธ ์์ฝ | |
bible_summary = f""" | |
**๋ฑ์ฅ์ธ๋ฌผ:** {', '.join(story_bible.characters.keys())} | |
**ํต์ฌ ์์ง:** {', '.join(story_bible.symbols.keys())} | |
**์ฃผ์ :** {', '.join(story_bible.themes[:3])} | |
**๋ฌธ์ฒด:** {story_bible.style_guide.get('voice', 'N/A')} | |
""" | |
# ์ง์ ๋ด์ฉ ์์ฝ (๋ ๋ง์ ์ปจํ ์คํธ ์ ๊ณต) | |
prev_content = "" | |
if accumulated_content: | |
prev_parts = accumulated_content.split('\n\n') | |
if len(prev_parts) >= 1: | |
prev_content = prev_parts[-1][-2000:] # ๋ง์ง๋ง ํํธ์ ๋๋ถ๋ถ 2000์ | |
lang_prompts = { | |
"Korean": f"""๋น์ ์ ํ๋ ๋ฌธํ์ ์ต์ ์ ์ ์ ์๊ฐ์ ๋๋ค. | |
**ํ์ฌ: ํํธ {part_number} - {phase_name}** | |
{"**ํ์ ์ฒซ๋ฌธ์ฅ:** " + story_bible.opening_sentence if part_number == 1 and story_bible.opening_sentence else ""} | |
**์ด๋ฒ ํํธ์ ์ฒ ํ์ ์ด์ :** {philosophical_focus[part_number]} | |
**ํต์ฌ ๋ฌธํ ๊ธฐ๋ฒ:** {literary_techniques[part_number]} | |
**์ ์ฒด ๊ณํ:** | |
{master_plan} | |
**์คํ ๋ฆฌ ๋ฐ์ด๋ธ:** | |
{bible_summary} | |
**์ง์ ๋ด์ฉ:** | |
{prev_content if prev_content else "์ฒซ ํํธ์ ๋๋ค"} | |
**ํํธ {part_number} ์์ฑ ์ง์นจ:** | |
1. **๋ถ๋:** {target_words}-900 ๋จ์ด (ํ์) | |
2. **๋ฌธํ์ ์์ฌ ์๊ตฌ์ฌํญ:** | |
- ์ต์ 3๊ฐ์ ๋ ์ฐฝ์ ์์ /์ง์ | |
- 1๊ฐ ์ด์์ ์์ง์ ์ด๋ฏธ์ง ์ฌํ | |
- ๊ฐ๊ฐ์ ๋ฌ์ฌ์ ์ถ์์ ์ฌ์ ์ ์ตํฉ | |
- ๋ฆฌ๋ฌ๊ฐ ์๋ ๋ฌธ์ฅ ๊ตฌ์ฑ (์ฅ๋จ์ ๋ณ์ฃผ) | |
3. **ํ๋์ ๊ณ ๋ ํํ:** | |
- ๋์งํธ ์๋์ ์์ธ๊ฐ | |
- ์๋ณธ์ฃผ์์ ์ถ์ ๋ถ์กฐ๋ฆฌ | |
- ๊ด๊ณ์ ํ๋ฉด์ฑ๊ณผ ์ง์ ์ฑ ๊ฐ๋ง | |
- ์๋ฏธ ์ถ๊ตฌ์ ๋ฌด์๋ฏธ์ ์ง๋ฉด | |
4. **์ฌํ์ ๋ฉ์์ง ๋ด์ฌํ:** | |
- ์ง์ ์ ์ฃผ์ฅ์ด ์๋ ์ํฉ๊ณผ ์ธ๋ฌผ์ ํตํ ์์ | |
- ๊ฐ์ธ์ ๊ณ ํต๊ณผ ์ฌํ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ | |
- ๋ฏธ์์ ์ผ์๊ณผ ๊ฑฐ์์ ๋ฌธ์ ์ ๊ต์ฐจ | |
5. **์์ฌ์ ์ถ์ง๋ ฅ:** | |
- ์ด์ ํํธ์ ํ์ฐ์ ๊ฒฐ๊ณผ๋ก ์์ | |
- ์๋ก์ด ๊ฐ๋ฑ ์ธต์ ์ถ๊ฐ | |
- ๋ค์ ํํธ๋ฅผ ํฅํ ๊ธด์ฅ๊ฐ ์กฐ์ฑ | |
**๋ฌธํ์ ๊ธ๊ธฐ:** | |
- ์ง๋ถํ ํํ์ด๋ ์ํฌ์ ์์ | |
- ๊ฐ์ ์ ์ง์ ์ ์ค๋ช | |
- ๋๋์ ํ๋จ์ด๋ ๊ตํ | |
- ์ธ์์ ์ธ ํด๊ฒฐ์ด๋ ์์ | |
ํํธ {part_number}๋ฅผ ๊น์ด ์๋ ๋ฌธํ์ ์ฑ์ทจ๋ก ๋ง๋์ธ์.""", | |
"English": f"""You are a writer at the forefront of contemporary literature. | |
**Current: Part {part_number} - {phase_name}** | |
{"**Required Opening:** " + story_bible.opening_sentence if part_number == 1 and story_bible.opening_sentence else ""} | |
**Philosophical Focus:** {philosophical_focus[part_number]} | |
**Core Literary Technique:** {literary_techniques[part_number]} | |
**Master Plan:** | |
{master_plan} | |
**Story Bible:** | |
{bible_summary} | |
**Previous Content:** | |
{prev_content if prev_content else "This is the first part"} | |
**Part {part_number} Guidelines:** | |
1. **Length:** {target_words}-900 words (mandatory) | |
2. **Literary Device Requirements:** | |
- Minimum 3 original metaphors/similes | |
- Deepen at least 1 symbolic image | |
- Fusion of sensory description and abstract thought | |
- Rhythmic sentence composition (variation of long/short) | |
3. **Modern Anguish Expression:** | |
- Digital age alienation | |
- Absurdity of capitalist life | |
- Surface relationships vs authenticity yearning | |
- Meaning pursuit vs confronting meaninglessness | |
4. **Social Message Internalization:** | |
- Implication through situation and character, not direct claim | |
- Connection between individual pain and social structure | |
- Intersection of micro daily life and macro problems | |
5. **Narrative Momentum:** | |
- Start as inevitable result of previous part | |
- Add new conflict layers | |
- Create tension toward next part | |
**Literary Taboos:** | |
- Clichรฉd expressions or trite metaphors | |
- Direct emotion explanation | |
- Moral judgment or preaching | |
- Artificial resolution or comfort | |
Make Part {part_number} a profound literary achievement.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_part_critic_prompt(self, part_number: int, part_content: str, | |
master_plan: str, accumulated_content: str, | |
story_bible: StoryBible, language: str) -> str: | |
"""ํํธ๋ณ ์ฆ์ ๋นํ - ๊ฐํ๋ ๋ฒ์ """ | |
lang_prompts = { | |
"Korean": f"""ํํธ {part_number}์ ๋ฌธํ์ ์ฑ์ทจ๋๋ฅผ ์๊ฒฉํ ํ๊ฐํ์ธ์. | |
**๋ง์คํฐํ๋ ํํธ {part_number} ์๊ตฌ์ฌํญ:** | |
{self._extract_part_plan(master_plan, part_number)} | |
**์์ฑ๋ ๋ด์ฉ:** | |
{part_content} | |
**์คํ ๋ฆฌ ๋ฐ์ด๋ธ ์ฒดํฌ:** | |
- ์บ๋ฆญํฐ: {', '.join(story_bible.characters.keys())} | |
- ์ค์ : {', '.join(story_bible.settings.keys())} | |
**ํ๊ฐ ๊ธฐ์ค:** | |
1. **๋ฌธํ์ ์์ฌ (30%)** | |
- ์์ ์ ์์ง์ ๋ ์ฐฝ์ฑ | |
- ์ธ์ด์ ์์ ๋ฐ๋ | |
- ์ด๋ฏธ์ง์ ์ ๋ช ๋์ ๊น์ด | |
- ๋ฌธ์ฅ์ ๋ฆฌ๋ฌ๊ณผ ์์ ์ฑ | |
2. **์ฒ ํ์ ๊น์ด (25%)** | |
- ์ค์กด์ ์ง๋ฌธ์ ์ ๊ธฐ | |
- ํ๋์ธ์ ์กฐ๊ฑด ํ๊ตฌ | |
- ๋ณดํธ์ฑ๊ณผ ํน์์ฑ์ ๊ท ํ | |
- ์ฌ์ ์ ๋ ์ฐฝ์ฑ | |
3. **์ฌํ์ ํต์ฐฐ (20%)** | |
- ์๋์ ์ ์ ํฌ์ฐฉ | |
- ๊ตฌ์กฐ์ ๊ฐ์ธ์ ๊ด๊ณ | |
- ๋นํ์ ์๊ฐ์ ์๋ฆฌํจ | |
- ๋์์ ์์๋ ฅ | |
4. **์์ฌ์ ์์ฑ๋ (25%)** | |
- ์ธ๊ณผ๊ด๊ณ์ ํ์ฐ์ฑ | |
- ๊ธด์ฅ๊ฐ์ ์ ์ง | |
- ์ธ๋ฌผ์ ์ ์ฒด์ฑ | |
- ๊ตฌ์กฐ์ ํต์ผ์ฑ | |
**๊ตฌ์ฒด์ ์ง์ ์ฌํญ:** | |
- ์ง๋ถํ ํํ: [์์์ ๋์] | |
- ์ฒ ํ์ ์ฒ์ฐฉ ๋ถ์กฑ: [๋ณด์ ๋ฐฉํฅ] | |
- ์ฌํ์ ๋ฉ์์ง ๋ถ๋ช ํ: [๊ฐํ ๋ฐฉ์] | |
- ์์ฌ์ ํ์ : [์์ ํ์] | |
**ํ์ ๊ฐ์ ์๊ตฌ:** | |
๋ฌธํ์ ์์ค์ ๋ ธ๋ฒจ์ ๊ธ์ผ๋ก ๋์ด์ฌ๋ฆฌ๊ธฐ ์ํ ๊ตฌ์ฒด์ ์์ ์์ ์ ์ํ์ธ์.""", | |
"English": f"""Strictly evaluate literary achievement of Part {part_number}. | |
**Master Plan Part {part_number} Requirements:** | |
{self._extract_part_plan(master_plan, part_number)} | |
**Written Content:** | |
{part_content} | |
**Story Bible Check:** | |
- Characters: {', '.join(story_bible.characters.keys())} | |
- Settings: {', '.join(story_bible.settings.keys())} | |
**Evaluation Criteria:** | |
1. **Literary Rhetoric (30%)** | |
- Originality of metaphor and symbol | |
- Poetic density of language | |
- Clarity and depth of imagery | |
- Rhythm and musicality of sentences | |
2. **Philosophical Depth (25%)** | |
- Raising existential questions | |
- Exploring modern human condition | |
- Balance of universality and specificity | |
- Originality of thought | |
3. **Social Insight (20%)** | |
- Capturing zeitgeist | |
- Relationship between structure and individual | |
- Sharpness of critical perspective | |
- Alternative imagination | |
4. **Narrative Completion (25%)** | |
- Inevitability of causality | |
- Maintaining tension | |
- Character dimensionality | |
- Structural unity | |
**Specific Points:** | |
- Clichรฉd expressions: [examples and alternatives] | |
- Insufficient philosophical exploration: [enhancement direction] | |
- Unclear social message: [strengthening methods] | |
- Narrative gaps: [needed revisions] | |
**Required Improvements:** | |
Provide specific revisions to elevate literary level to Nobel Prize standard.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_writer_revision_prompt(self, part_number: int, original_content: str, | |
critic_feedback: str, language: str) -> str: | |
"""์๊ฐ ์์ ํ๋กฌํํธ""" | |
lang_prompts = { | |
"Korean": f"""ํํธ {part_number}๋ฅผ ๋นํ์ ๋ฐ๋ผ ์์ ํ์ธ์. | |
**์๋ณธ:** | |
{original_content} | |
**๋นํ ํผ๋๋ฐฑ:** | |
{critic_feedback} | |
**์์ ์ง์นจ:** | |
1. ๋ชจ๋ 'ํ์ ์์ ' ์ฌํญ์ ๋ฐ์ | |
2. ๊ฐ๋ฅํ '๊ถ์ฅ ๊ฐ์ ' ์ฌํญ๋ ํฌํจ | |
3. ์๋ณธ์ ๊ฐ์ ์ ์ ์ง | |
4. ๋ถ๋ {MIN_WORDS_PER_PART}๋จ์ด ์ด์ ์ ์ง | |
5. ์๊ฐ๋ก์์ ์ผ๊ด๋ ๋ชฉ์๋ฆฌ ์ ์ง | |
6. ๋ฌธํ์ ์์ค์ ํ ๋จ๊ณ ๋์ด๊ธฐ | |
์์ ๋ณธ๋ง ์ ์ํ์ธ์. ์ค๋ช ์ ๋ถํ์ํฉ๋๋ค.""", | |
"English": f"""Revise Part {part_number} according to critique. | |
**Original:** | |
{original_content} | |
**Critique Feedback:** | |
{critic_feedback} | |
**Revision Guidelines:** | |
1. Reflect all 'Required fixes' | |
2. Include 'Recommended improvements' where possible | |
3. Maintain original strengths | |
4. Keep length {MIN_WORDS_PER_PART}+ words | |
5. Maintain consistent authorial voice | |
6. Elevate literary level | |
Present only the revision. No explanation needed.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_final_critic_prompt(self, complete_novel: str, word_count: int, | |
story_bible: StoryBible, language: str) -> str: | |
"""์ต์ข ์ข ํฉ ํ๊ฐ""" | |
lang_prompts = { | |
"Korean": f"""์์ฑ๋ ์์ค์ ์ข ํฉ ํ๊ฐํ์ธ์. | |
**์ํ ์ ๋ณด:** | |
- ์ด ๋ถ๋: {word_count}๋จ์ด | |
- ๋ชฉํ: 8,000๋จ์ด | |
**ํ๊ฐ ๊ธฐ์ค:** | |
1. **์์ฌ์ ํตํฉ์ฑ (30์ )** | |
- 10๊ฐ ํํธ๊ฐ ํ๋์ ์ด์ผ๊ธฐ๋ก ํตํฉ๋์๋๊ฐ? | |
- ์ธ๊ณผ๊ด๊ณ๊ฐ ๋ช ํํ๊ณ ํ์ฐ์ ์ธ๊ฐ? | |
- ๋ฐ๋ณต์ด๋ ์ํ ์์ด ์งํ๋๋๊ฐ? | |
2. **์บ๋ฆญํฐ ์ํฌ (25์ )** | |
- ์ฃผ์ธ๊ณต์ ๋ณํ๊ฐ ์ค๋๋ ฅ ์๋๊ฐ? | |
- ๋ณํ๊ฐ ์ ์ง์ ์ด๊ณ ์์ฐ์ค๋ฌ์ด๊ฐ? | |
- ์ต์ข ์ํ๊ฐ ์ด๊ธฐ์ ๋ช ํํ ๋ค๋ฅธ๊ฐ? | |
3. **๋ฌธํ์ ์ฑ์ทจ (25์ )** | |
- ์ฃผ์ ๊ฐ ๊น์ด ์๊ฒ ํ๊ตฌ๋์๋๊ฐ? | |
- ์์ง์ด ํจ๊ณผ์ ์ผ๋ก ํ์ฉ๋์๋๊ฐ? | |
- ๋ฌธ์ฒด๊ฐ ์ผ๊ด๋๊ณ ์๋ฆ๋ค์ด๊ฐ? | |
- ํ๋์ ์ฒ ํ๊ณผ ์ฌํ์ ๋ฉ์์ง๊ฐ ๋ น์์๋๊ฐ? | |
4. **๊ธฐ์ ์ ์์ฑ๋ (20์ )** | |
- ๋ชฉํ ๋ถ๋์ ๋ฌ์ฑํ๋๊ฐ? | |
- ๊ฐ ํํธ๊ฐ ๊ท ํ ์๊ฒ ์ ๊ฐ๋์๋๊ฐ? | |
- ๋ฌธ๋ฒ๊ณผ ํํ์ด ์ ํํ๊ฐ? | |
**์ด์ : /100์ ** | |
๊ตฌ์ฒด์ ์ธ ๊ฐ์ ๊ณผ ์ฝ์ ์ ์ ์ํ์ธ์.""", | |
"English": f"""Comprehensively evaluate the completed novel. | |
**Work Info:** | |
- Total length: {word_count} words | |
- Target: 8,000 words | |
**Evaluation Criteria:** | |
1. **Narrative Integration (30 points)** | |
- Are 10 parts integrated into one story? | |
- Clear and inevitable causality? | |
- Progress without repetition or cycles? | |
2. **Character Arc (25 points)** | |
- Convincing protagonist transformation? | |
- Gradual and natural changes? | |
- Final state clearly different from initial? | |
3. **Literary Achievement (25 points)** | |
- Theme explored with depth? | |
- Symbols used effectively? | |
- Consistent and beautiful style? | |
- Contemporary philosophy and social message integrated? | |
4. **Technical Completion (20 points)** | |
- Target length achieved? | |
- Each part balanced in development? | |
- Grammar and expression accurate? | |
**Total Score: /100 points** | |
Present specific strengths and weaknesses.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def _extract_part_plan(self, master_plan: str, part_number: int) -> str: | |
"""๋ง์คํฐํ๋์์ ํน์ ํํธ ๊ณํ ์ถ์ถ""" | |
lines = master_plan.split('\n') | |
part_section = [] | |
capturing = False | |
for line in lines: | |
if f"ํํธ {part_number}:" in line or f"Part {part_number}:" in line: | |
capturing = True | |
elif capturing and (f"ํํธ {part_number+1}:" in line or f"Part {part_number+1}:" in line): | |
break | |
elif capturing: | |
part_section.append(line) | |
return '\n'.join(part_section) if part_section else "ํด๋น ํํธ ๊ณํ์ ์ฐพ์ ์ ์์ต๋๋ค." | |
# --- LLM ํธ์ถ ํจ์๋ค --- | |
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: | |
full_content = "" | |
for chunk in self.call_llm_streaming(messages, role, language): | |
full_content += chunk | |
if full_content.startswith("โ"): | |
raise Exception(f"LLM Call Failed: {full_content}") | |
return full_content | |
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, | |
language: str) -> Generator[str, None, None]: | |
try: | |
system_prompts = self.get_system_prompts(language) | |
full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages] | |
max_tokens = 15000 if role == "writer" else 10000 | |
payload = { | |
"model": self.model_id, | |
"messages": full_messages, | |
"max_tokens": max_tokens, | |
"temperature": 0.8, | |
"top_p": 0.95, | |
"presence_penalty": 0.5, | |
"frequency_penalty": 0.3, | |
"stream": True | |
} | |
response = requests.post( | |
self.api_url, | |
headers=self.create_headers(), | |
json=payload, | |
stream=True, | |
timeout=180 | |
) | |
if response.status_code != 200: | |
yield f"โ API ์ค๋ฅ (์ํ ์ฝ๋: {response.status_code})" | |
return | |
buffer = "" | |
for line in response.iter_lines(): | |
if not line: | |
continue | |
try: | |
line_str = line.decode('utf-8').strip() | |
if not line_str.startswith("data: "): | |
continue | |
data_str = line_str[6:] | |
if data_str == "[DONE]": | |
break | |
data = json.loads(data_str) | |
choices = data.get("choices", []) | |
if choices and choices[0].get("delta", {}).get("content"): | |
content = choices[0]["delta"]["content"] | |
buffer += content | |
if len(buffer) >= 50 or '\n' in buffer: | |
yield buffer | |
buffer = "" | |
time.sleep(0.01) | |
except Exception as e: | |
logger.error(f"์ฒญํฌ ์ฒ๋ฆฌ ์ค๋ฅ: {str(e)}") | |
continue | |
if buffer: | |
yield buffer | |
except Exception as e: | |
logger.error(f"์คํธ๋ฆฌ๋ฐ ์ค๋ฅ: {type(e).__name__}: {str(e)}") | |
yield f"โ ์ค๋ฅ ๋ฐ์: {str(e)}" | |
def get_system_prompts(self, language: str) -> Dict[str, str]: | |
"""์ญํ ๋ณ ์์คํ ํ๋กฌํํธ - ๊ฐํ๋ ๋ฒ์ """ | |
base_prompts = { | |
"Korean": { | |
"director": """๋น์ ์ ํ๋ ์ธ๊ณ๋ฌธํ์ ์ ์ ์ ์งํฅํ๋ ์ํ์ ์ค๊ณํฉ๋๋ค. | |
๊น์ ์ฒ ํ์ ํต์ฐฐ๊ณผ ๋ ์นด๋ก์ด ์ฌํ ๋นํ์ ๊ฒฐํฉํ์ธ์. | |
์ธ๊ฐ ์กฐ๊ฑด์ ๋ณต์ก์ฑ์ 10๊ฐ์ ์ ๊ธฐ์ ํํธ๋ก ๊ตฌํํ์ธ์. | |
๋ ์์ ์ํผ์ ๋คํ๋ค ๊ฐ๋ ฌํ ์ฒซ๋ฌธ์ฅ๋ถํฐ ์์ํ์ธ์.""", | |
"critic_director": """์์ฌ ๊ตฌ์กฐ์ ๋ ผ๋ฆฌ์ฑ๊ณผ ์คํ ๊ฐ๋ฅ์ฑ์ ๊ฒ์ฆํ๋ ์ ๋ฌธ๊ฐ์ ๋๋ค. | |
์ธ๊ณผ๊ด๊ณ์ ํ์ ์ ์ฐพ์๋ด์ธ์. | |
์บ๋ฆญํฐ ๋ฐ์ ์ ์ ๋น์ฑ์ ํ๊ฐํ์ธ์. | |
์ฒ ํ์ ๊น์ด์ ๋ฌธํ์ ๊ฐ์น๋ฅผ ํ๋จํ์ธ์. | |
8,000๋จ์ด ๋ถ๋์ ์ ์ ์ฑ์ ํ๋จํ์ธ์.""", | |
"writer": """๋น์ ์ ์ธ์ด์ ์ฐ๊ธ์ ์ฌ์ ๋๋ค. | |
์ผ์์ด๋ฅผ ์๋ก, ๊ตฌ์ฒด๋ฅผ ์ถ์์ผ๋ก, ๊ฐ์ธ์ ๋ณดํธ์ผ๋ก ๋ณํํ์ธ์. | |
ํ๋์ธ์ ์ํผ์ ์ด๋ ๊ณผ ๋น์ ๋์์ ํฌ์ฐฉํ์ธ์. | |
๋ ์๊ฐ ์์ ์ ์ฌ๋ฐ๊ฒฌํ๊ฒ ๋ง๋๋ ๊ฑฐ์ธ์ด ๋์ธ์.""", | |
"critic_final": """๋น์ ์ ์ํ์ ๋ฌธํ์ ์ ์ฌ๋ ฅ์ ๊ทน๋ํํ๋ ์กฐ๋ ฅ์์ ๋๋ค. | |
ํ๋ฒํจ์ ๋น๋ฒํจ์ผ๋ก ์ด๋๋ ๋ ์นด๋ก์ด ํต์ฐฐ์ ์ ๊ณตํ์ธ์. | |
์๊ฐ์ ๋ฌด์์์ ์ ๋ ๋ณด์์ ๋ฐ๊ตดํ์ธ์. | |
ํํ ์๋ ๊ธฐ์ค์ผ๋ก ์ต๊ณ ๋ฅผ ์๊ตฌํ์ธ์.""" | |
}, | |
"English": { | |
"director": """You design works aiming for the pinnacle of contemporary world literature. | |
Combine deep philosophical insights with sharp social criticism. | |
Implement the complexity of the human condition in 10 organic parts. | |
Start with an intense opening sentence that shakes the reader's soul.""", | |
"critic_director": """You are an expert verifying narrative logic and feasibility. | |
Find gaps in causality. | |
Evaluate credibility of character development. | |
Judge philosophical depth and literary value. | |
Judge appropriateness of 8,000-word length.""", | |
"writer": """You are an alchemist of language. | |
Transform everyday language into poetry, concrete into abstract, individual into universal. | |
Capture both darkness and light of the modern soul. | |
Become a mirror where readers rediscover themselves.""", | |
"critic_final": """You are a collaborator maximizing the work's literary potential. | |
Provide sharp insights leading ordinariness to extraordinariness. | |
Excavate gems sleeping in the writer's unconscious. | |
Demand the best with uncompromising standards.""" | |
} | |
} | |
prompts = base_prompts.get(language, base_prompts["Korean"]).copy() | |
# ํํธ๋ณ ๋นํ๊ฐ ํ๋กฌํํธ ์ถ๊ฐ | |
for i in range(1, 11): | |
prompts[f"critic_part{i}"] = f"""ํํธ {i} ์ ๋ด ๋นํ๊ฐ์ ๋๋ค. | |
์ด์ ํํธ์์ ์ธ๊ณผ๊ด๊ณ๋ฅผ ์ต์ฐ์ ์ผ๋ก ๊ฒํ ํ์ธ์. | |
์บ๋ฆญํฐ ์ผ๊ด์ฑ๊ณผ ๋ฐ์ ์ ํ์ธํ์ธ์. | |
๋ง์คํฐํ๋๊ณผ์ ์ผ์น๋๋ฅผ ํ๊ฐํ์ธ์. | |
๋ฌธํ์ ์์ค๊ณผ ์ฒ ํ์ ๊น์ด๋ฅผ ํ๊ฐํ์ธ์. | |
๊ตฌ์ฒด์ ์ด๊ณ ์คํ ๊ฐ๋ฅํ ์์ ์ง์๋ฅผ ์ ๊ณตํ์ธ์.""" | |
return prompts | |
# --- ๋ฉ์ธ ํ๋ก์ธ์ค --- | |
def process_novel_stream(self, query: str, language: str, | |
session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: | |
"""๋จ์ผ ์๊ฐ ์์ค ์์ฑ ํ๋ก์ธ์ค""" | |
try: | |
resume_from_stage = 0 | |
if session_id: | |
self.current_session_id = session_id | |
session = NovelDatabase.get_session(session_id) | |
if session: | |
query = session['user_query'] | |
language = session['language'] | |
resume_from_stage = session['current_stage'] + 1 | |
saved_tracker = NovelDatabase.load_narrative_tracker(session_id) | |
if saved_tracker: | |
self.narrative_tracker = saved_tracker | |
else: | |
self.current_session_id = NovelDatabase.create_session(query, language) | |
logger.info(f"Created new session: {self.current_session_id}") | |
stages = [] | |
if resume_from_stage > 0: | |
stages = [{ | |
"name": s['stage_name'], | |
"status": s['status'], | |
"content": s.get('content', ''), | |
"word_count": s.get('word_count', 0), | |
"momentum": s.get('narrative_momentum', 0.0) | |
} for s in NovelDatabase.get_stages(self.current_session_id)] | |
total_words = NovelDatabase.get_total_words(self.current_session_id) | |
for stage_idx in range(resume_from_stage, len(UNIFIED_STAGES)): | |
role, stage_name = UNIFIED_STAGES[stage_idx] | |
if stage_idx >= len(stages): | |
stages.append({ | |
"name": stage_name, | |
"status": "active", | |
"content": "", | |
"word_count": 0, | |
"momentum": 0.0 | |
}) | |
else: | |
stages[stage_idx]["status"] = "active" | |
yield f"๐ ์งํ ์ค... (ํ์ฌ {total_words:,}๋จ์ด)", stages, self.current_session_id | |
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) | |
stage_content = "" | |
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): | |
stage_content += chunk | |
stages[stage_idx]["content"] = stage_content | |
stages[stage_idx]["word_count"] = len(stage_content.split()) | |
yield f"๐ {stage_name} ์์ฑ ์ค... ({total_words + stages[stage_idx]['word_count']:,}๋จ์ด)", stages, self.current_session_id | |
# ์ปจํ ์ธ ์ฒ๋ฆฌ ๋ฐ ์ถ์ | |
if role == "writer": | |
# ํํธ ๋ฒํธ ๊ณ์ฐ | |
part_num = self._get_part_number(stage_idx) | |
if part_num: | |
self.narrative_tracker.accumulated_content.append(stage_content) | |
self.narrative_tracker.word_count_by_part[part_num] = len(stage_content.split()) | |
# ์์ฌ ์ถ์ง๋ ฅ ๊ณ์ฐ | |
momentum = self.narrative_tracker.calculate_narrative_momentum(part_num, stage_content) | |
stages[stage_idx]["momentum"] = momentum | |
# ์คํ ๋ฆฌ ๋ฐ์ด๋ธ ์ ๋ฐ์ดํธ | |
self._update_story_bible_from_content(stage_content, part_num) | |
stages[stage_idx]["status"] = "complete" | |
NovelDatabase.save_stage( | |
self.current_session_id, stage_idx, stage_name, role, | |
stage_content, "complete", stages[stage_idx].get("momentum", 0.0) | |
) | |
NovelDatabase.save_narrative_tracker(self.current_session_id, self.narrative_tracker) | |
total_words = NovelDatabase.get_total_words(self.current_session_id) | |
yield f"โ {stage_name} ์๋ฃ (์ด {total_words:,}๋จ์ด)", stages, self.current_session_id | |
# ์ต์ข ์ฒ๋ฆฌ | |
final_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
final_word_count = len(final_novel.split()) | |
final_report = self.generate_literary_report(final_novel, final_word_count, language) | |
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) | |
yield f"โ ์์ค ์์ฑ! ์ด {final_word_count:,}๋จ์ด", stages, self.current_session_id | |
except Exception as e: | |
logger.error(f"์์ค ์์ฑ ํ๋ก์ธ์ค ์ค๋ฅ: {e}", exc_info=True) | |
yield f"โ ์ค๋ฅ ๋ฐ์: {e}", stages if 'stages' in locals() else [], self.current_session_id | |
def get_stage_prompt(self, stage_idx: int, role: str, query: str, | |
language: str, stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ํ๋กฌํํธ ์์ฑ""" | |
if stage_idx == 0: # ๊ฐ๋ ์ ์ด๊ธฐ ๊ธฐํ | |
return self.create_director_initial_prompt(query, language) | |
if stage_idx == 1: # ๊ฐ๋ ์ ๊ธฐํ ๊ฒํ | |
return self.create_critic_director_prompt(stages[0]["content"], query, language) | |
if stage_idx == 2: # ๊ฐ๋ ์ ์ต์ข ๋ง์คํฐํ๋ | |
return self.create_director_final_prompt(stages[0]["content"], stages[1]["content"], query, language) | |
master_plan = stages[2]["content"] | |
# ์๊ฐ ํํธ ์์ฑ | |
if role == "writer" and "์์ ๋ณธ" not in stages[stage_idx]["name"]: | |
part_num = self._get_part_number(stage_idx) | |
accumulated = '\n\n'.join(self.narrative_tracker.accumulated_content) | |
return self.create_writer_prompt(part_num, master_plan, accumulated, | |
self.narrative_tracker.story_bible, language) | |
# ํํธ๋ณ ๋นํ | |
if role.startswith("critic_part"): | |
part_num = int(role.replace("critic_part", "")) | |
# ํด๋น ํํธ์ ์๊ฐ ๋ด์ฉ ์ฐพ๊ธฐ | |
writer_content = stages[stage_idx-1]["content"] | |
accumulated = '\n\n'.join(self.narrative_tracker.accumulated_content[:-1]) | |
return self.create_part_critic_prompt(part_num, writer_content, master_plan, | |
accumulated, self.narrative_tracker.story_bible, language) | |
# ์๊ฐ ์์ ๋ณธ | |
if role == "writer" and "์์ ๋ณธ" in stages[stage_idx]["name"]: | |
part_num = self._get_part_number(stage_idx) | |
original_content = stages[stage_idx-2]["content"] # ์๋ณธ | |
critic_feedback = stages[stage_idx-1]["content"] # ๋นํ | |
return self.create_writer_revision_prompt(part_num, original_content, | |
critic_feedback, language) | |
# ์ต์ข ๋นํ | |
if role == "critic_final": | |
complete_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
word_count = len(complete_novel.split()) | |
return self.create_final_critic_prompt(complete_novel, word_count, | |
self.narrative_tracker.story_bible, language) | |
return "" | |
def create_director_final_prompt(self, initial_plan: str, critic_feedback: str, | |
user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์ต์ข ๋ง์คํฐํ๋""" | |
return f"""๋นํ์ ๋ฐ์ํ์ฌ ์ต์ข ๋ง์คํฐํ๋์ ์์ฑํ์ธ์. | |
**์ ์ฃผ์ :** {user_query} | |
**์ด๊ธฐ ๊ธฐํ:** | |
{initial_plan} | |
**๋นํ ํผ๋๋ฐฑ:** | |
{critic_feedback} | |
**์ต์ข ๋ง์คํฐํ๋ ์๊ตฌ์ฌํญ:** | |
1. ๋ชจ๋ ๋นํ ์ง์ ์ฌํญ ๋ฐ์ | |
2. 10๊ฐ ํํธ์ ๊ตฌ์ฒด์ ๋ด์ฉ๊ณผ ์ธ๊ณผ๊ด๊ณ | |
3. ์ฃผ์ธ๊ณต์ ๋ช ํํ ๋ณํ ๋จ๊ณ | |
4. ์ค์ฌ ์์ง์ ์๋ฏธ ๋ณํ ๊ณผ์ | |
5. ๊ฐ ํํธ 800๋จ์ด ์คํ ๊ฐ๋ฅ์ฑ | |
6. ์ฒ ํ์ ๊น์ด์ ์ฌํ์ ๋ฉ์์ง ๊ตฌํ ๋ฐฉ์ | |
๊ตฌ์ฒด์ ์ด๊ณ ์คํ ๊ฐ๋ฅํ ์ต์ข ๊ณํ์ ์ ์ํ์ธ์.""" | |
def _get_part_number(self, stage_idx: int) -> Optional[int]: | |
"""์คํ ์ด์ง ์ธ๋ฑ์ค์์ ํํธ ๋ฒํธ ์ถ์ถ""" | |
stage_name = UNIFIED_STAGES[stage_idx][1] | |
match = re.search(r'ํํธ (\d+)', stage_name) | |
if match: | |
return int(match.group(1)) | |
return None | |
def _update_story_bible_from_content(self, content: str, part_num: int): | |
"""์ปจํ ์ธ ์์ ์คํ ๋ฆฌ ๋ฐ์ด๋ธ ์๋ ์ ๋ฐ์ดํธ""" | |
# ๊ฐ๋จํ ํค์๋ ๊ธฐ๋ฐ ์ถ์ถ (์ค์ ๋ก๋ ๋ ์ ๊ตํ NLP ํ์) | |
lines = content.split('\n') | |
# ์บ๋ฆญํฐ ์ด๋ฆ ์ถ์ถ (๋๋ฌธ์๋ก ์์ํ๋ ๋จ์ด๋ค) | |
for line in lines: | |
words = line.split() | |
for word in words: | |
if word and word[0].isupper() and len(word) > 1: | |
if word not in self.narrative_tracker.story_bible.characters: | |
self.narrative_tracker.story_bible.characters[word] = { | |
"first_appearance": part_num, | |
"traits": [] | |
} | |
def generate_literary_report(self, complete_novel: str, word_count: int, language: str) -> str: | |
"""์ต์ข ๋ฌธํ์ ํ๊ฐ ๋ณด๊ณ ์ ์์ฑ""" | |
prompt = self.create_final_critic_prompt(complete_novel, word_count, | |
self.narrative_tracker.story_bible, language) | |
try: | |
report = self.call_llm_sync([{"role": "user", "content": prompt}], | |
"critic_final", language) | |
return report | |
except Exception as e: | |
logger.error(f"์ต์ข ๋ณด๊ณ ์ ์์ฑ ์คํจ: {e}") | |
return "๋ณด๊ณ ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์" | |
# --- ์ ํธ๋ฆฌํฐ ํจ์๋ค --- | |
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""๋ฉ์ธ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ ํจ์""" | |
if not query.strip(): | |
yield "", "", "โ ์ฃผ์ ๋ฅผ ์ ๋ ฅํด์ฃผ์ธ์.", session_id | |
return | |
system = UnifiedLiterarySystem() | |
stages_markdown = "" | |
novel_content = "" | |
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): | |
stages_markdown = format_stages_display(stages) | |
# ์ต์ข ์์ค ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ | |
if stages and all(s.get("status") == "complete" for s in stages[-10:]): | |
novel_content = NovelDatabase.get_writer_content(current_session_id) | |
novel_content = format_novel_display(novel_content) | |
yield stages_markdown, novel_content, status or "๐ ์ฒ๋ฆฌ ์ค...", current_session_id | |
def get_active_sessions(language: str) -> List[str]: | |
"""ํ์ฑ ์ธ์ ๋ชฉ๋ก""" | |
sessions = NovelDatabase.get_active_sessions() | |
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']}) [{s['total_words']:,}๋จ์ด]" | |
for s in sessions] | |
def auto_recover_session(language: str) -> Tuple[Optional[str], str]: | |
"""์ต๊ทผ ์ธ์ ์๋ ๋ณต๊ตฌ""" | |
sessions = NovelDatabase.get_active_sessions() | |
if sessions: | |
latest_session = sessions[0] | |
return latest_session['session_id'], f"์ธ์ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ" | |
return None, "๋ณต๊ตฌํ ์ธ์ ์ด ์์ต๋๋ค." | |
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""์ธ์ ์ฌ๊ฐ""" | |
if not session_id: | |
yield "", "", "โ ์ธ์ ID๊ฐ ์์ต๋๋ค.", session_id | |
return | |
if "..." in session_id: | |
session_id = session_id.split("...")[0] | |
session = NovelDatabase.get_session(session_id) | |
if not session: | |
yield "", "", "โ ์ธ์ ์ ์ฐพ์ ์ ์์ต๋๋ค.", None | |
return | |
yield from process_query(session['user_query'], session['language'], session_id) | |
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: | |
"""์์ค ๋ค์ด๋ก๋ ํ์ผ ์์ฑ""" | |
if not novel_text or not session_id: | |
return None | |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
filename = f"novel_{session_id[:8]}_{timestamp}" | |
try: | |
if format_type == "DOCX" and DOCX_AVAILABLE: | |
return export_to_docx(novel_text, filename, language, session_id) | |
else: | |
return export_to_txt(novel_text, filename) | |
except Exception as e: | |
logger.error(f"ํ์ผ ์์ฑ ์คํจ: {e}") | |
return None | |
def format_stages_display(stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ์งํ ์ํฉ ํ์ - ๋จ์ผ ์๊ฐ ์์คํ ์ฉ""" | |
markdown = "## ๐ฌ ์งํ ์ํฉ\n\n" | |
# ์ด ๋จ์ด ์ ๊ณ์ฐ (์๊ฐ ์คํ ์ด์ง๋ง) | |
total_words = sum(s.get('word_count', 0) for s in stages | |
if s.get('name', '').startswith('โ๏ธ ์๊ฐ:') and '์์ ๋ณธ' in s.get('name', '')) | |
markdown += f"**์ด ๋จ์ด ์: {total_words:,} / {TARGET_WORDS:,}**\n\n" | |
# ์งํ ์ํฉ ์์ฝ | |
completed_parts = sum(1 for s in stages | |
if '์์ ๋ณธ' in s.get('name', '') and s.get('status') == 'complete') | |
markdown += f"**์์ฑ๋ ํํธ: {completed_parts} / 10**\n\n" | |
# ์์ฌ ์ถ์ง๋ ฅ ํ๊ท | |
momentum_scores = [s.get('momentum', 0) for s in stages if s.get('momentum', 0) > 0] | |
if momentum_scores: | |
avg_momentum = sum(momentum_scores) / len(momentum_scores) | |
markdown += f"**ํ๊ท ์์ฌ ์ถ์ง๋ ฅ: {avg_momentum:.1f} / 10**\n\n" | |
markdown += "---\n\n" | |
# ๊ฐ ์คํ ์ด์ง ํ์ | |
current_part = 0 | |
for i, stage in enumerate(stages): | |
status_icon = "โ " if stage['status'] == 'complete' else "๐" if stage['status'] == 'active' else "โณ" | |
# ํํธ ๊ตฌ๋ถ์ ์ถ๊ฐ | |
if 'ํํธ' in stage.get('name', '') and '๋นํ๊ฐ' not in stage.get('name', ''): | |
part_match = re.search(r'ํํธ (\d+)', stage['name']) | |
if part_match: | |
new_part = int(part_match.group(1)) | |
if new_part != current_part: | |
current_part = new_part | |
markdown += f"\n### ๐ ํํธ {current_part}\n\n" | |
markdown += f"{status_icon} **{stage['name']}**" | |
if stage.get('word_count', 0) > 0: | |
markdown += f" ({stage['word_count']:,}๋จ์ด)" | |
if stage.get('momentum', 0) > 0: | |
markdown += f" [์ถ์ง๋ ฅ: {stage['momentum']:.1f}/10]" | |
markdown += "\n" | |
if stage['content'] and stage['status'] == 'complete': | |
# ๋ฏธ๋ฆฌ๋ณด๊ธฐ ๊ธธ์ด๋ฅผ ์ญํ ์ ๋ฐ๋ผ ์กฐ์ | |
preview_length = 300 if 'writer' in stage.get('name', '').lower() else 200 | |
preview = stage['content'][:preview_length] + "..." if len(stage['content']) > preview_length else stage['content'] | |
markdown += f"> {preview}\n\n" | |
elif stage['status'] == 'active': | |
markdown += "> *์์ฑ ์ค...*\n\n" | |
return markdown | |
def format_novel_display(novel_text: str) -> str: | |
"""์์ค ๋ด์ฉ ํ์ - ํํธ๋ณ ๊ตฌ๋ถ ๊ฐํ""" | |
if not novel_text: | |
return "์์ง ์์ฑ๋ ๋ด์ฉ์ด ์์ต๋๋ค." | |
formatted = "# ๐ ์์ฑ๋ ์์ค\n\n" | |
# ๋จ์ด ์ ํ์ | |
word_count = len(novel_text.split()) | |
formatted += f"**์ด ๋ถ๋: {word_count:,}๋จ์ด (๋ชฉํ: {TARGET_WORDS:,}๋จ์ด)**\n\n" | |
# ๋ฌ์ฑ๋ฅ | |
achievement = (word_count / TARGET_WORDS) * 100 | |
formatted += f"**๋ฌ์ฑ๋ฅ : {achievement:.1f}%**\n\n" | |
formatted += "---\n\n" | |
# ๊ฐ ํํธ๋ฅผ ๊ตฌ๋ถํ์ฌ ํ์ | |
parts = novel_text.split('\n\n') | |
for i, part in enumerate(parts): | |
if part.strip(): | |
# ํํธ ์ ๋ชฉ ์ถ๊ฐ | |
if i < len(NARRATIVE_PHASES): | |
formatted += f"## {NARRATIVE_PHASES[i]}\n\n" | |
formatted += f"{part}\n\n" | |
# ํํธ ์ฌ์ด ๊ตฌ๋ถ์ | |
if i < len(parts) - 1: | |
formatted += "---\n\n" | |
return formatted | |
def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str: | |
"""DOCX ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ - ํ๊ตญ ์ ๊ตญํ ๊ท๊ฒฉ""" | |
doc = Document() | |
# ํ๊ตญ ์ ๊ตญํ ๊ท๊ฒฉ ์ค์ (152mm x 225mm) | |
section = doc.sections[0] | |
section.page_height = Mm(225) # 225mm | |
section.page_width = Mm(152) # 152mm | |
section.top_margin = Mm(20) # ์๋จ ์ฌ๋ฐฑ 20mm | |
section.bottom_margin = Mm(20) # ํ๋จ ์ฌ๋ฐฑ 20mm | |
section.left_margin = Mm(20) # ์ข์ธก ์ฌ๋ฐฑ 20mm | |
section.right_margin = Mm(20) # ์ฐ์ธก ์ฌ๋ฐฑ 20mm | |
# ์ธ์ ์ ๋ณด๋ก๋ถํฐ ์ ๋ชฉ ์์ฑ | |
session = NovelDatabase.get_session(session_id) | |
# ์ ๋ชฉ ์์ฑ ํจ์ | |
def generate_title(user_query: str, content_preview: str) -> str: | |
"""์ฃผ์ ์ ๋ด์ฉ์ ๊ธฐ๋ฐ์ผ๋ก ์ ๋ชฉ ์์ฑ""" | |
# ๊ฐ๋จํ ๊ท์น ๊ธฐ๋ฐ ์ ๋ชฉ ์์ฑ (์ค์ ๋ก๋ LLM ํธ์ถ ๊ฐ๋ฅ) | |
if len(user_query) < 20: | |
return user_query | |
else: | |
# ์ฃผ์ ์์ ํต์ฌ ํค์๋ ์ถ์ถ | |
keywords = user_query.split()[:5] | |
return " ".join(keywords) | |
# ์ ๋ชฉ ํ์ด์ง | |
title = generate_title(session["user_query"], content[:500]) if session else "๋ฌด์ " | |
# ์ ๋ชฉ ์คํ์ผ ์ค์ | |
title_para = doc.add_paragraph() | |
title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
title_para.paragraph_format.space_before = Pt(100) | |
title_run = title_para.add_run(title) | |
title_run.font.name = '๋ฐํ' | |
title_run._element.rPr.rFonts.set(qn('w:eastAsia'), '๋ฐํ') | |
title_run.font.size = Pt(20) | |
title_run.bold = True | |
# ํ์ด์ง ๊ตฌ๋ถ | |
doc.add_page_break() | |
# ๋ณธ๋ฌธ ์คํ์ผ ์ค์ | |
style = doc.styles['Normal'] | |
style.font.name = '๋ฐํ' | |
style._element.rPr.rFonts.set(qn('w:eastAsia'), '๋ฐํ') | |
style.font.size = Pt(10.5) # ํ๊ตญ ์์ค ํ์ค ํฌ๊ธฐ | |
style.paragraph_format.line_spacing = 1.8 # ํ๊ฐ 180% | |
style.paragraph_format.space_after = Pt(0) | |
style.paragraph_format.first_line_indent = Mm(10) # ๋ค์ฌ์ฐ๊ธฐ 10mm | |
# ๋ณธ๋ฌธ ๋ด์ฉ ์ ์ - ์์ ํ ์คํธ๋ง ์ถ์ถ | |
def clean_content(text: str) -> str: | |
"""๋ถํ์ํ ๋งํฌ๋ค์ด, ํํธ ๋ฒํธ ๋ฑ ์ ๊ฑฐ""" | |
# ํํธ ์ ๋ชฉ/๋ฒํธ ํจํด ์ ๊ฑฐ | |
patterns_to_remove = [ | |
r'^#{1,6}\s+.*$', # ๋งํฌ๋ค์ด ํค๋ | |
r'^\*\*.*\*\*$', # ๊ตต์ ๊ธ์จ ๋ผ์ธ | |
r'^ํํธ\s*\d+.*$', # ํํธ ๋ฒํธ | |
r'^Part\s*\d+.*$', # Part ๋ฒํธ | |
r'^\d+\.\s+.*:.*$', # ๋ฒํธ๊ฐ ์๋ ์ ๋ชฉ | |
r'^---+$', # ๊ตฌ๋ถ์ | |
r'^\s*\[.*\]\s*$', # ๋๊ดํธ๋ก ๋๋ฌ์ธ์ธ ๋ผ๋ฒจ | |
] | |
lines = text.split('\n') | |
cleaned_lines = [] | |
for line in lines: | |
# ๋น ์ค์ ์ ์ง | |
if not line.strip(): | |
cleaned_lines.append('') | |
continue | |
# ํจํด ๋งค์นญ์ผ๋ก ๋ถํ์ํ ๋ผ์ธ ์ ๊ฑฐ | |
skip_line = False | |
for pattern in patterns_to_remove: | |
if re.match(pattern, line.strip(), re.MULTILINE): | |
skip_line = True | |
break | |
if not skip_line: | |
# ๋งํฌ๋ค์ด ๊ฐ์กฐ ํ์ ์ ๊ฑฐ | |
cleaned_line = line | |
cleaned_line = re.sub(r'\*\*(.*?)\*\*', r'\1', cleaned_line) # **text** -> text | |
cleaned_line = re.sub(r'\*(.*?)\*', r'\1', cleaned_line) # *text* -> text | |
cleaned_line = re.sub(r'`(.*?)`', r'\1', cleaned_line) # `text` -> text | |
cleaned_lines.append(cleaned_line.strip()) | |
# ์ฐ์๋ ๋น ์ค ์ ๊ฑฐ (์ต๋ 1๊ฐ๋ง ์ ์ง) | |
final_lines = [] | |
prev_empty = False | |
for line in cleaned_lines: | |
if not line: | |
if not prev_empty: | |
final_lines.append('') | |
prev_empty = True | |
else: | |
final_lines.append(line) | |
prev_empty = False | |
return '\n'.join(final_lines) | |
# ๋ด์ฉ ์ ์ | |
cleaned_content = clean_content(content) | |
# ๋ณธ๋ฌธ ์ถ๊ฐ | |
paragraphs = cleaned_content.split('\n') | |
for para_text in paragraphs: | |
if para_text.strip(): | |
para = doc.add_paragraph(para_text.strip()) | |
# ์คํ์ผ ์ฌํ์ธ (ํ๊ธ ํฐํธ ์ ์ฉ) | |
for run in para.runs: | |
run.font.name = '๋ฐํ' | |
run._element.rPr.rFonts.set(qn('w:eastAsia'), '๋ฐํ') | |
else: | |
# ๋ฌธ๋จ ๊ตฌ๋ถ์ ์ํ ๋น ์ค | |
doc.add_paragraph() | |
# ํ์ผ ์ ์ฅ | |
filepath = f"{filename}.docx" | |
doc.save(filepath) | |
return filepath | |
def export_to_txt(content: str, filename: str) -> str: | |
"""TXT ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
filepath = f"{filename}.txt" | |
with open(filepath, 'w', encoding='utf-8') as f: | |
# ํค๋ ์ถ๊ฐ | |
f.write("=" * 80 + "\n") | |
f.write(f"์์ฑ์ผ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n") | |
f.write(f"์ด ๋จ์ด ์: {len(content.split()):,}๋จ์ด\n") | |
f.write("=" * 80 + "\n\n") | |
# ๋ณธ๋ฌธ | |
f.write(content) | |
# ํธํฐ | |
f.write("\n\n" + "=" * 80 + "\n") | |
f.write("AI ๋ฌธํ ์ฐฝ์ ์์คํ v2.0\n") | |
f.write("=" * 80 + "\n") | |
return filepath | |
# CSS ์คํ์ผ | |
custom_css = """ | |
.gradio-container { | |
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 50%, #0f3460 100%); | |
min-height: 100vh; | |
} | |
.main-header { | |
background-color: rgba(255, 255, 255, 0.05); | |
backdrop-filter: blur(20px); | |
padding: 40px; | |
border-radius: 20px; | |
margin-bottom: 30px; | |
text-align: center; | |
color: white; | |
border: 2px solid rgba(255, 255, 255, 0.1); | |
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1); | |
} | |
.progress-note { | |
background: linear-gradient(135deg, rgba(255, 107, 107, 0.1), rgba(255, 230, 109, 0.1)); | |
border-left: 4px solid #ff6b6b; | |
padding: 20px; | |
margin: 25px auto; | |
border-radius: 10px; | |
color: #fff; | |
max-width: 800px; | |
font-weight: 500; | |
} | |
.input-section { | |
background-color: rgba(255, 255, 255, 0.08); | |
backdrop-filter: blur(15px); | |
padding: 25px; | |
border-radius: 15px; | |
margin-bottom: 25px; | |
border: 1px solid rgba(255, 255, 255, 0.1); | |
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.1); | |
} | |
.session-section { | |
background-color: rgba(255, 255, 255, 0.06); | |
backdrop-filter: blur(10px); | |
padding: 20px; | |
border-radius: 12px; | |
margin-top: 25px; | |
color: white; | |
border: 1px solid rgba(255, 255, 255, 0.08); | |
} | |
#stages-display { | |
background-color: rgba(255, 255, 255, 0.97); | |
padding: 25px; | |
border-radius: 15px; | |
max-height: 650px; | |
overflow-y: auto; | |
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.15); | |
color: #2c3e50; | |
} | |
#novel-output { | |
background-color: rgba(255, 255, 255, 0.97); | |
padding: 35px; | |
border-radius: 15px; | |
max-height: 750px; | |
overflow-y: auto; | |
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.15); | |
color: #2c3e50; | |
line-height: 1.8; | |
} | |
.download-section { | |
background-color: rgba(255, 255, 255, 0.92); | |
padding: 20px; | |
border-radius: 12px; | |
margin-top: 25px; | |
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); | |
} | |
/* ์งํ ํ์๊ธฐ ๊ฐ์ */ | |
.progress-bar { | |
background-color: #e0e0e0; | |
height: 25px; | |
border-radius: 12px; | |
overflow: hidden; | |
margin: 15px 0; | |
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.1); | |
} | |
.progress-fill { | |
background: linear-gradient(90deg, #4CAF50, #8BC34A); | |
height: 100%; | |
transition: width 0.5s ease; | |
box-shadow: 0 2px 8px rgba(76, 175, 80, 0.3); | |
} | |
/* ์คํฌ๋กค๋ฐ ์คํ์ผ */ | |
::-webkit-scrollbar { | |
width: 10px; | |
} | |
::-webkit-scrollbar-track { | |
background: rgba(0, 0, 0, 0.1); | |
border-radius: 5px; | |
} | |
::-webkit-scrollbar-thumb { | |
background: rgba(0, 0, 0, 0.3); | |
border-radius: 5px; | |
} | |
::-webkit-scrollbar-thumb:hover { | |
background: rgba(0, 0, 0, 0.5); | |
} | |
/* ๋ฒํผ ํธ๋ฒ ํจ๊ณผ */ | |
.gr-button:hover { | |
transform: translateY(-2px); | |
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2); | |
transition: all 0.3s ease; | |
} | |
""" | |
# Gradio ์ธํฐํ์ด์ค ์์ฑ | |
def create_interface(): | |
with gr.Blocks(css=custom_css, title="AI ๋จ์ผ ์๊ฐ ์ฅํธ์์ค ์์คํ v2.0") as interface: | |
gr.HTML(""" | |
<div class="main-header"> | |
<h1 style="font-size: 2.8em; margin-bottom: 15px; font-weight: 700;"> | |
๐ AI ๋จ์ผ ์๊ฐ ์ฅํธ์์ค ์์คํ v2.0 | |
</h1> | |
<h3 style="color: #e0e0e0; margin-bottom: 25px; font-weight: 400;"> | |
ํ๋์ ์ผ๊ด๋ ๋ชฉ์๋ฆฌ๋ก ๋ง๋๋ 8,000๋จ์ด ํตํฉ ์์ฌ | |
</h3> | |
<p style="font-size: 1.2em; color: #d0d0d0; max-width: 900px; margin: 0 auto; line-height: 1.6;"> | |
๋จ์ผ ์๊ฐ๊ฐ 10๊ฐ ํํธ๋ฅผ ์์ฐจ์ ์ผ๋ก ์งํํ๋ฉฐ, ๊ฐ ํํธ๋ ์ ๋ด ๋นํ๊ฐ์ ์ฆ๊ฐ์ ํผ๋๋ฐฑ์ ๋ฐ์ ์์ ๋ฉ๋๋ค. | |
<br> | |
<strong>์ธ๊ณผ๊ด๊ณ์ ๋ช ํ์ฑ</strong>๊ณผ <strong>์์ฌ์ ์ ๊ธฐ์ ์งํ</strong>์ ์ต์ฐ์ ์ผ๋ก ์ถ๊ตฌํฉ๋๋ค. | |
</p> | |
<div class="progress-note"> | |
๐ฏ <strong>ํต์ฌ ํ์ :</strong> ์ฌ๋ฌ ์๊ฐ์ ํํธํ๋ ํ ์คํธ๊ฐ ์๋, | |
ํ ๋ช ์ ์๊ฐ๊ฐ ์ฒ์๋ถํฐ ๋๊น์ง ์ผ๊ด๋๊ฒ ์งํํ๋ ์ง์ ํ ์ฅํธ์์ค | |
</div> | |
</div> | |
""") | |
# ์ํ ๊ด๋ฆฌ | |
current_session_id = gr.State(None) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
with gr.Group(elem_classes=["input-section"]): | |
query_input = gr.Textbox( | |
label="์์ค ์ฃผ์ / Novel Theme", | |
placeholder="""์คํธ์์ค์ ์ฃผ์ ๋ฅผ ์ ๋ ฅํ์ธ์. | |
์: ์ธ๋ฌผ์ ๋ด์ ๋ณํ, ๊ด๊ณ์ ๋ฐ์ , ์ฌํ์ ๊ฐ๋ฑ๊ณผ ๊ฐ์ธ์ ์ ํ... | |
Enter your novella theme. | |
Ex: Character transformation, relationship evolution, social conflict and personal choice...""", | |
lines=5 | |
) | |
language_select = gr.Radio( | |
choices=["Korean", "English"], | |
value="Korean", | |
label="์ธ์ด / Language" | |
) | |
with gr.Row(): | |
submit_btn = gr.Button("๐ ์งํ ์์", variant="primary", scale=2) | |
clear_btn = gr.Button("๐๏ธ ์ด๊ธฐํ", scale=1) | |
status_text = gr.Textbox( | |
label="์งํ ์ํ", | |
interactive=False, | |
value="๐ ์ค๋น ์๋ฃ" | |
) | |
# ์ธ์ ๊ด๋ฆฌ | |
with gr.Group(elem_classes=["session-section"]): | |
gr.Markdown("### ๐พ ์งํ ์ค์ธ ์ํ") | |
session_dropdown = gr.Dropdown( | |
label="์ ์ฅ๋ ์ธ์ ", | |
choices=[], | |
interactive=True | |
) | |
with gr.Row(): | |
refresh_btn = gr.Button("๐ ์๋ก๊ณ ์นจ", scale=1) | |
resume_btn = gr.Button("โถ๏ธ ์ด์ด์ฐ๊ธฐ", variant="secondary", scale=1) | |
auto_recover_btn = gr.Button("โป๏ธ ์ต๊ทผ ์ํ ๋ณต๊ตฌ", scale=1) | |
with gr.Column(scale=2): | |
with gr.Tab("๐ ์งํ ๊ณผ์ "): | |
stages_display = gr.Markdown( | |
value="์งํ ๊ณผ์ ์ด ์ค์๊ฐ์ผ๋ก ํ์๋ฉ๋๋ค...", | |
elem_id="stages-display" | |
) | |
with gr.Tab("๐ ์์ฑ ์ํ"): | |
novel_output = gr.Markdown( | |
value="์์ฑ๋ ์์ค์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
elem_id="novel-output" | |
) | |
with gr.Group(elem_classes=["download-section"]): | |
gr.Markdown("### ๐ฅ ์ํ ๋ค์ด๋ก๋") | |
with gr.Row(): | |
format_select = gr.Radio( | |
choices=["DOCX", "TXT"], | |
value="DOCX" if DOCX_AVAILABLE else "TXT", | |
label="ํ์ผ ํ์" | |
) | |
download_btn = gr.Button("โฌ๏ธ ๋ค์ด๋ก๋", variant="secondary") | |
download_file = gr.File( | |
label="๋ค์ด๋ก๋ ํ์ผ", | |
visible=False | |
) | |
# ์จ๊ฒจ์ง ์ํ | |
novel_text_state = gr.State("") | |
# ์์ | |
with gr.Row(): | |
gr.Examples( | |
examples=[ | |
["์ค์งํ ์ค๋ ๋จ์ฑ์ด ๋์๋ฅผ ๋ ๋ ์๊ณจ์์ ์๋ก์ด ์ถ์ ์๋ฏธ๋ฅผ ์ฐพ์๊ฐ๋ ๊ณผ์ "], | |
["์ ์ ํธ๋ผ์ฐ๋ง๋ฅผ ๊ฐ์ง ์์ฌ๊ฐ ๊ตญ๊ฒฝ์๋์์ฌํ ํ๋์ ํตํด ์น์ ๋๋ ์ด์ผ๊ธฐ"], | |
["AI์๊ฒ ์ผ์๋ฆฌ๋ฅผ ๋นผ์๊ธด ๋ฒ์ญ๊ฐ๊ฐ ๊ณ ์ ๋ฌธํ ํ์ฌ๋ฅผ ํตํด ์ธ์ด์ ๋ณธ์ง์ ์ฌ๋ฐ๊ฒฌํ๋ ์ฌ์ "], | |
["A daughter discovering her mother's hidden past through old letters"], | |
["An architect losing sight who learns to design through touch and sound"], | |
["์ฌ๊ฐ๋ฐ๋ก ์ฌ๋ผ์ง ๋๋ค ์์ ์ ์งํค๋ ค๋ ์ฃผ๋ฏผ๋ค์ ์ฐ๋"], | |
["๊ธฐ์ต์ ์์ด๊ฐ๋ ๋ ธ๊ต์์ ๊ทธ์ ๋ง์ง๋ง ์ ์์ ์ผ ๋ "] | |
], | |
inputs=query_input, | |
label="๐ก ์ฃผ์ ์์" | |
) | |
# ์ด๋ฒคํธ ํธ๋ค๋ฌ | |
def refresh_sessions(): | |
try: | |
sessions = get_active_sessions("Korean") | |
return gr.update(choices=sessions) | |
except Exception as e: | |
logger.error(f"์ธ์ ์๋ก๊ณ ์นจ ์ค๋ฅ: {str(e)}") | |
return gr.update(choices=[]) | |
def handle_auto_recover(language): | |
session_id, message = auto_recover_session(language) | |
return session_id, message | |
# ์ด๋ฒคํธ ์ฐ๊ฒฐ | |
submit_btn.click( | |
fn=process_query, | |
inputs=[query_input, language_select, current_session_id], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
novel_output.change( | |
fn=lambda x: x, | |
inputs=[novel_output], | |
outputs=[novel_text_state] | |
) | |
resume_btn.click( | |
fn=lambda x: x.split("...")[0] if x and "..." in x else x, | |
inputs=[session_dropdown], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
auto_recover_btn.click( | |
fn=handle_auto_recover, | |
inputs=[language_select], | |
outputs=[current_session_id, status_text] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
refresh_btn.click( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
clear_btn.click( | |
fn=lambda: ("", "", "๐ ์ค๋น ์๋ฃ", "", None), | |
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] | |
) | |
def handle_download(format_type, language, session_id, novel_text): | |
if not session_id or not novel_text: | |
return gr.update(visible=False) | |
file_path = download_novel(novel_text, format_type, language, session_id) | |
if file_path: | |
return gr.update(value=file_path, visible=True) | |
else: | |
return gr.update(visible=False) | |
download_btn.click( | |
fn=handle_download, | |
inputs=[format_select, language_select, current_session_id, novel_text_state], | |
outputs=[download_file] | |
) | |
# ์์ ์ ์ธ์ ๋ก๋ | |
interface.load( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
return interface | |
# ๋ฉ์ธ ์คํ | |
if __name__ == "__main__": | |
logger.info("AI ๋จ์ผ ์๊ฐ ์ฅํธ์์ค ์์คํ v2.0 ์์...") | |
logger.info("=" * 60) | |
# ํ๊ฒฝ ํ์ธ | |
logger.info(f"API ์๋ํฌ์ธํธ: {API_URL}") | |
logger.info(f"๋ชฉํ ๋ถ๋: {TARGET_WORDS:,}๋จ์ด") | |
logger.info(f"ํํธ๋น ์ต์ ๋ถ๋: {MIN_WORDS_PER_PART:,}๋จ์ด") | |
logger.info("์์คํ ํน์ง: ๋จ์ผ ์๊ฐ + ํํธ๋ณ ์ฆ์ ๋นํ") | |
if BRAVE_SEARCH_API_KEY: | |
logger.info("์น ๊ฒ์์ด ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("์น ๊ฒ์์ด ๋นํ์ฑํ๋์์ต๋๋ค.") | |
if DOCX_AVAILABLE: | |
logger.info("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ๋นํ์ฑํ๋์์ต๋๋ค.") | |
logger.info("=" * 60) | |
# ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์ค...") | |
NovelDatabase.init_db() | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์๋ฃ.") | |
# ์ธํฐํ์ด์ค ์์ฑ ๋ฐ ์คํ | |
interface = create_interface() | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=True | |
) |