Spaces:
Running
Running
import gradio as gr | |
import os | |
import json | |
import requests | |
from datetime import datetime | |
import time | |
from typing import List, Dict, Any, Generator, Tuple, Optional, Set | |
import logging | |
import re | |
import tempfile | |
from pathlib import Path | |
import sqlite3 | |
import hashlib | |
import threading | |
from contextlib import contextmanager | |
from dataclasses import dataclass, field, asdict | |
from collections import defaultdict | |
# --- ๋ก๊น ์ค์ --- | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
logger = logging.getLogger(__name__) | |
# --- Document export imports --- | |
try: | |
from docx import Document | |
from docx.shared import Inches, Pt, RGBColor | |
from docx.enum.text import WD_ALIGN_PARAGRAPH | |
from docx.enum.style import WD_STYLE_TYPE | |
from docx.oxml.ns import qn | |
from docx.oxml import OxmlElement | |
DOCX_AVAILABLE = True | |
except ImportError: | |
DOCX_AVAILABLE = False | |
logger.warning("python-docx not installed. DOCX export will be disabled.") | |
# --- ํ๊ฒฝ ๋ณ์ ๋ฐ ์์ --- | |
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") | |
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") | |
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
MODEL_ID = "dep89a2fld32mcm" | |
DB_PATH = "novel_sessions_v5.db" | |
# ๋ชฉํ ๋ถ๋ ์ค์ | |
TARGET_WORDS = 8000 # ์์ ๋ง์ง์ ์ํด 8000๋จ์ด | |
MIN_WORDS_PER_WRITER = 800 # ๊ฐ ์๊ฐ ์ต์ ๋ถ๋ | |
# --- ํ๊ฒฝ ๋ณ์ ๊ฒ์ฆ --- | |
if not FRIENDLI_TOKEN: | |
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") | |
FRIENDLI_TOKEN = "dummy_token_for_testing" | |
if not BRAVE_SEARCH_API_KEY: | |
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") | |
# --- ์ ์ญ ๋ณ์ --- | |
db_lock = threading.Lock() | |
# ์์ฌ ์งํ ๋จ๊ณ ์ ์ | |
NARRATIVE_PHASES = [ | |
"๋์ : ์ผ์๊ณผ ๊ท ์ด", | |
"๋ฐ์ 1: ๋ถ์์ ๊ณ ์กฐ", | |
"๋ฐ์ 2: ์ธ๋ถ ์ถฉ๊ฒฉ", | |
"๋ฐ์ 3: ๋ด์ ๊ฐ๋ฑ ์ฌํ", | |
"์ ์ 1: ์๊ธฐ์ ์ ์ ", | |
"์ ์ 2: ์ ํ์ ์๊ฐ", | |
"ํ๊ฐ 1: ๊ฒฐ๊ณผ์ ์ฌํ", | |
"ํ๊ฐ 2: ์๋ก์ด ์ธ์", | |
"๊ฒฐ๋ง 1: ๋ณํ๋ ์ผ์", | |
"๊ฒฐ๋ง 2: ์ด๋ฆฐ ์ง๋ฌธ" | |
] | |
# ๋จ๊ณ๋ณ ๊ตฌ์ฑ | |
PROGRESSIVE_STAGES = [ | |
("director", "๐ฌ ๊ฐ๋ ์: ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ ๊ธฐํ"), | |
("critic", "๐ ๋นํ๊ฐ: ์์ฌ ์งํ์ฑ๊ณผ ๊น์ด ๊ฒํ "), | |
("director", "๐ฌ ๊ฐ๋ ์: ์์ ๋ ๋ง์คํฐํ๋"), | |
] + [ | |
(f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์ด์ - {NARRATIVE_PHASES[i-1]}") | |
for i in range(1, 11) | |
] + [ | |
("critic", "๐ ๋นํ๊ฐ: ์ค๊ฐ ๊ฒํ (์์ฌ ๋์ ์ฑ๊ณผ ๋ณํ)"), | |
] + [ | |
(f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์์ ๋ณธ - {NARRATIVE_PHASES[i-1]}") | |
for i in range(1, 11) | |
] + [ | |
("critic", f"๐ ๋นํ๊ฐ: ์ต์ข ๊ฒํ ๋ฐ ๋ฌธํ์ ํ๊ฐ"), | |
] | |
# --- ๋ฐ์ดํฐ ํด๋์ค --- | |
class CharacterArc: | |
"""์ธ๋ฌผ์ ๋ณํ ๊ถค์ ์ถ์ """ | |
name: str | |
initial_state: Dict[str, Any] # ์ด๊ธฐ ์ํ | |
phase_states: Dict[int, Dict[str, Any]] = field(default_factory=dict) # ๋จ๊ณ๋ณ ์ํ | |
transformations: List[str] = field(default_factory=list) # ์ฃผ์ ๋ณํ๋ค | |
relationships_evolution: Dict[str, List[str]] = field(default_factory=dict) # ๊ด๊ณ ๋ณํ | |
class PlotThread: | |
"""ํ๋กฏ ๋ผ์ธ ์ถ์ """ | |
thread_id: str | |
description: str | |
introduction_phase: int | |
development_phases: List[int] | |
resolution_phase: Optional[int] | |
status: str = "active" # active, resolved, suspended | |
class SymbolicEvolution: | |
"""์์ง์ ์๋ฏธ ๋ณํ ์ถ์ """ | |
symbol: str | |
initial_meaning: str | |
phase_meanings: Dict[int, str] = field(default_factory=dict) | |
transformation_complete: bool = False | |
# --- ํต์ฌ ๋ก์ง ํด๋์ค --- | |
class ProgressiveNarrativeTracker: | |
"""์์ฌ ์งํ๊ณผ ๋์ ์ ์ถ์ ํ๋ ์์คํ """ | |
def __init__(self): | |
self.character_arcs: Dict[str, CharacterArc] = {} | |
self.plot_threads: Dict[str, PlotThread] = {} | |
self.symbolic_evolutions: Dict[str, SymbolicEvolution] = {} | |
self.phase_summaries: Dict[int, str] = {} | |
self.accumulated_events: List[Dict[str, Any]] = [] | |
self.thematic_deepening: List[str] = [] | |
def register_character_arc(self, name: str, initial_state: Dict[str, Any]): | |
"""์บ๋ฆญํฐ ์ํฌ ๋ฑ๋ก""" | |
self.character_arcs[name] = CharacterArc(name=name, initial_state=initial_state) | |
logger.info(f"Character arc registered: {name}") | |
def update_character_state(self, name: str, phase: int, new_state: Dict[str, Any], transformation: str): | |
"""์บ๋ฆญํฐ ์ํ ์ ๋ฐ์ดํธ ๋ฐ ๋ณํ ๊ธฐ๋ก""" | |
if name in self.character_arcs: | |
arc = self.character_arcs[name] | |
arc.phase_states[phase] = new_state | |
arc.transformations.append(f"Phase {phase}: {transformation}") | |
logger.info(f"Character {name} transformed in phase {phase}: {transformation}") | |
def add_plot_thread(self, thread_id: str, description: str, intro_phase: int): | |
"""์๋ก์ด ํ๋กฏ ๋ผ์ธ ์ถ๊ฐ""" | |
self.plot_threads[thread_id] = PlotThread( | |
thread_id=thread_id, | |
description=description, | |
introduction_phase=intro_phase, | |
development_phases=[] | |
) | |
def develop_plot_thread(self, thread_id: str, phase: int): | |
"""ํ๋กฏ ๋ผ์ธ ๋ฐ์ """ | |
if thread_id in self.plot_threads: | |
self.plot_threads[thread_id].development_phases.append(phase) | |
def check_narrative_progression(self, current_phase: int) -> Tuple[bool, List[str]]: | |
"""์์ฌ๊ฐ ์ค์ ๋ก ์งํ๋๊ณ ์๋์ง ํ์ธ""" | |
issues = [] | |
# 1. ์บ๋ฆญํฐ ๋ณํ ํ์ธ | |
static_characters = [] | |
for name, arc in self.character_arcs.items(): | |
if len(arc.transformations) < current_phase // 3: # ์ต์ 3๋จ๊ณ๋ง๋ค ๋ณํ ํ์ | |
static_characters.append(name) | |
if static_characters: | |
issues.append(f"๋ค์ ์ธ๋ฌผ๋ค์ ๋ณํ๊ฐ ๋ถ์กฑํฉ๋๋ค: {', '.join(static_characters)}") | |
# 2. ํ๋กฏ ์งํ ํ์ธ | |
unresolved_threads = [] | |
for thread_id, thread in self.plot_threads.items(): | |
if thread.status == "active" and len(thread.development_phases) < 2: | |
unresolved_threads.append(thread.description) | |
if unresolved_threads: | |
issues.append(f"์ง์ ๋์ง ์์ ํ๋กฏ: {', '.join(unresolved_threads)}") | |
# 3. ์์ง ๋ฐ์ ํ์ธ | |
static_symbols = [] | |
for symbol, evolution in self.symbolic_evolutions.items(): | |
if len(evolution.phase_meanings) < current_phase // 4: | |
static_symbols.append(symbol) | |
if static_symbols: | |
issues.append(f"์๋ฏธ๊ฐ ๋ฐ์ ํ์ง ์์ ์์ง: {', '.join(static_symbols)}") | |
return len(issues) == 0, issues | |
def generate_phase_requirements(self, phase: int) -> str: | |
"""๊ฐ ๋จ๊ณ๋ณ ํ์ ์๊ตฌ์ฌํญ ์์ฑ""" | |
requirements = [] | |
# ์ด์ ๋จ๊ณ ์์ฝ | |
if phase > 1 and (phase-1) in self.phase_summaries: | |
requirements.append(f"์ด์ ๋จ๊ณ ํต์ฌ: {self.phase_summaries[phase-1]}") | |
# ๋จ๊ณ๋ณ ํน์ ์๊ตฌ์ฌํญ | |
phase_name = NARRATIVE_PHASES[phase-1] if phase <= 10 else "์์ " | |
if "๋์ " in phase_name: | |
requirements.append("- ์ผ์์ ๊ท ์ด์ ๋ณด์ฌ์ฃผ๋, ํฐ ์ฌ๊ฑด์ด ์๋ ๋ฏธ๋ฌํ ๋ณํ๋ก ์์") | |
requirements.append("- ์ฃผ์ ์ธ๋ฌผ๋ค์ ์ด๊ธฐ ์ํ์ ๊ด๊ณ ์ค์ ") | |
requirements.append("- ํต์ฌ ์์ง ๋์ (์์ฐ์ค๋ฝ๊ฒ)") | |
elif "๋ฐ์ " in phase_name: | |
requirements.append("- ์ด์ ๋จ๊ณ์ ๊ท ์ด/๊ฐ๋ฑ์ด ๊ตฌ์ฒดํ๋๊ณ ์ฌํ") | |
requirements.append("- ์๋ก์ด ์ฌ๊ฑด์ด๋ ์ธ์์ด ์ถ๊ฐ๋์ด ๋ณต์ก์ฑ ์ฆ๊ฐ") | |
requirements.append("- ์ธ๋ฌผ ๊ฐ ๊ด๊ณ์ ๋ฏธ๋ฌํ ๋ณํ") | |
elif "์ ์ " in phase_name: | |
requirements.append("- ์ถ์ ๋ ๊ฐ๋ฑ์ด ์๊ณ์ ์ ๋๋ฌ") | |
requirements.append("- ์ธ๋ฌผ์ ๋ด์ ์ ํ์ด๋ ์ธ์์ ์ ํ์ ") | |
requirements.append("- ์์ง์ ์๋ฏธ๊ฐ ์ ๋ณต๋๊ฑฐ๋ ์ฌํ") | |
elif "ํ๊ฐ" in phase_name: | |
requirements.append("- ์ ์ ์ ์ฌํ์ ๊ทธ๋ก ์ธํ ๋ณํ") | |
requirements.append("- ์๋ก์ด ๊ท ํ์ ์ ์ฐพ์๊ฐ๋ ๊ณผ์ ") | |
requirements.append("- ์ธ๋ฌผ๋ค์ ๋ณํ๋ ๊ด๊ณ์ ์ธ์") | |
elif "๊ฒฐ๋ง" in phase_name: | |
requirements.append("- ๋ณํ๋ ์ผ์์ ๋ชจ์ต") | |
requirements.append("- ํด๊ฒฐ๋์ง ์์ ์ง๋ฌธ๋ค") | |
requirements.append("- ์ฌ์ด๊ณผ ์ฑ์ฐฐ์ ์ฌ์ง") | |
# ๋ฐ๋ณต ๋ฐฉ์ง ์๊ตฌ์ฌํญ | |
requirements.append("\nโ ๏ธ ์ ๋ ๊ธ์ง์ฌํญ:") | |
requirements.append("- ์ด์ ๋จ๊ณ์ ๋์ผํ ์ฌ๊ฑด์ด๋ ๊ฐ๋ฑ ๋ฐ๋ณต") | |
requirements.append("- ์ธ๋ฌผ์ด ๊ฐ์ ์๊ฐ์ด๋ ๊ฐ์ ์ ๋จธ๋ฌด๋ฅด๊ธฐ") | |
requirements.append("- ํ๋กฏ์ด ์ ์๋ฆฌ๊ฑธ์ํ๊ธฐ") | |
return "\n".join(requirements) | |
class NovelDatabase: | |
"""๋ฐ์ดํฐ๋ฒ ์ด์ค ๊ด๋ฆฌ""" | |
def init_db(): | |
with sqlite3.connect(DB_PATH) as conn: | |
conn.execute("PRAGMA journal_mode=WAL") | |
cursor = conn.cursor() | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS sessions ( | |
session_id TEXT PRIMARY KEY, | |
user_query TEXT NOT NULL, | |
language TEXT NOT NULL, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
status TEXT DEFAULT 'active', | |
current_stage INTEGER DEFAULT 0, | |
final_novel TEXT, | |
literary_report TEXT, | |
total_words INTEGER DEFAULT 0, | |
narrative_tracker TEXT | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS stages ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
stage_number INTEGER NOT NULL, | |
stage_name TEXT NOT NULL, | |
role TEXT NOT NULL, | |
content TEXT, | |
word_count INTEGER DEFAULT 0, | |
status TEXT DEFAULT 'pending', | |
progression_score REAL DEFAULT 0.0, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
UNIQUE(session_id, stage_number) | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS plot_threads ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
thread_id TEXT NOT NULL, | |
description TEXT, | |
introduction_phase INTEGER, | |
status TEXT DEFAULT 'active', | |
created_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
) | |
''') | |
conn.commit() | |
# ๊ธฐ์กด ๋ฉ์๋๋ค ์ ์ง | |
def get_db(): | |
with db_lock: | |
conn = sqlite3.connect(DB_PATH, timeout=30.0) | |
conn.row_factory = sqlite3.Row | |
try: | |
yield conn | |
finally: | |
conn.close() | |
def create_session(user_query: str, language: str) -> str: | |
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', | |
(session_id, user_query, language) | |
) | |
conn.commit() | |
return session_id | |
def save_stage(session_id: str, stage_number: int, stage_name: str, | |
role: str, content: str, status: str = 'complete', | |
progression_score: float = 0.0): | |
word_count = len(content.split()) if content else 0 | |
with NovelDatabase.get_db() as conn: | |
cursor = conn.cursor() | |
cursor.execute(''' | |
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, progression_score) | |
VALUES (?, ?, ?, ?, ?, ?, ?, ?) | |
ON CONFLICT(session_id, stage_number) | |
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, progression_score=?, updated_at=datetime('now') | |
''', (session_id, stage_number, stage_name, role, content, word_count, status, progression_score, | |
content, word_count, status, stage_name, progression_score)) | |
# ์ด ๋จ์ด ์ ์ ๋ฐ์ดํธ | |
cursor.execute(''' | |
UPDATE sessions | |
SET total_words = ( | |
SELECT SUM(word_count) | |
FROM stages | |
WHERE session_id = ? AND role LIKE 'writer%' AND content IS NOT NULL | |
), | |
updated_at = datetime('now'), | |
current_stage = ? | |
WHERE session_id = ? | |
''', (session_id, stage_number, session_id)) | |
conn.commit() | |
def get_writer_content(session_id: str) -> str: | |
"""์๊ฐ ์ฝํ ์ธ ๊ฐ์ ธ์ค๊ธฐ (์์ ๋ณธ ์ฐ์ )""" | |
with NovelDatabase.get_db() as conn: | |
all_content = [] | |
for writer_num in range(1, 11): | |
# ์์ ๋ณธ์ด ์์ผ๋ฉด ์์ ๋ณธ์, ์์ผ๋ฉด ์ด์์ | |
row = conn.cursor().execute(''' | |
SELECT content FROM stages | |
WHERE session_id = ? AND role = ? | |
AND stage_name LIKE '%์์ ๋ณธ%' | |
ORDER BY stage_number DESC LIMIT 1 | |
''', (session_id, f'writer{writer_num}')).fetchone() | |
if not row or not row['content']: | |
# ์์ ๋ณธ์ด ์์ผ๋ฉด ์ด์ ์ฌ์ฉ | |
row = conn.cursor().execute(''' | |
SELECT content FROM stages | |
WHERE session_id = ? AND role = ? | |
AND stage_name LIKE '%์ด์%' | |
ORDER BY stage_number DESC LIMIT 1 | |
''', (session_id, f'writer{writer_num}')).fetchone() | |
if row and row['content']: | |
all_content.append(row['content'].strip()) | |
return '\n\n'.join(all_content) | |
def get_total_words(session_id: str) -> int: | |
"""์ด ๋จ์ด ์ ๊ฐ์ ธ์ค๊ธฐ""" | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute( | |
'SELECT total_words FROM sessions WHERE session_id = ?', | |
(session_id,) | |
).fetchone() | |
return row['total_words'] if row and row['total_words'] else 0 | |
def save_narrative_tracker(session_id: str, tracker: ProgressiveNarrativeTracker): | |
"""์์ฌ ์ถ์ ๊ธฐ ์ ์ฅ""" | |
with NovelDatabase.get_db() as conn: | |
tracker_data = json.dumps({ | |
'character_arcs': {k: asdict(v) for k, v in tracker.character_arcs.items()}, | |
'plot_threads': {k: asdict(v) for k, v in tracker.plot_threads.items()}, | |
'phase_summaries': tracker.phase_summaries, | |
'thematic_deepening': tracker.thematic_deepening | |
}) | |
conn.cursor().execute( | |
'UPDATE sessions SET narrative_tracker = ? WHERE session_id = ?', | |
(tracker_data, session_id) | |
) | |
conn.commit() | |
def load_narrative_tracker(session_id: str) -> Optional[ProgressiveNarrativeTracker]: | |
"""์์ฌ ์ถ์ ๊ธฐ ๋ก๋""" | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute( | |
'SELECT narrative_tracker FROM sessions WHERE session_id = ?', | |
(session_id,) | |
).fetchone() | |
if row and row['narrative_tracker']: | |
data = json.loads(row['narrative_tracker']) | |
tracker = ProgressiveNarrativeTracker() | |
# ๋ฐ์ดํฐ ๋ณต์ | |
for name, arc_data in data.get('character_arcs', {}).items(): | |
tracker.character_arcs[name] = CharacterArc(**arc_data) | |
for thread_id, thread_data in data.get('plot_threads', {}).items(): | |
tracker.plot_threads[thread_id] = PlotThread(**thread_data) | |
tracker.phase_summaries = data.get('phase_summaries', {}) | |
tracker.thematic_deepening = data.get('thematic_deepening', []) | |
return tracker | |
return None | |
def get_session(session_id: str) -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone() | |
return dict(row) if row else None | |
def get_stages(session_id: str) -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall() | |
return [dict(row) for row in rows] | |
def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""): | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), literary_report = ? WHERE session_id = ?", | |
(final_novel, literary_report, session_id) | |
) | |
conn.commit() | |
def get_active_sessions() -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute( | |
"SELECT session_id, user_query, language, created_at, current_stage, total_words FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10" | |
).fetchall() | |
return [dict(row) for row in rows] | |
class WebSearchIntegration: | |
"""์น ๊ฒ์ ๊ธฐ๋ฅ""" | |
def __init__(self): | |
self.brave_api_key = BRAVE_SEARCH_API_KEY | |
self.search_url = "https://api.search.brave.com/res/v1/web/search" | |
self.enabled = bool(self.brave_api_key) | |
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: | |
if not self.enabled: | |
return [] | |
headers = { | |
"Accept": "application/json", | |
"X-Subscription-Token": self.brave_api_key | |
} | |
params = { | |
"q": query, | |
"count": count, | |
"search_lang": "ko" if language == "Korean" else "en", | |
"text_decorations": False, | |
"safesearch": "moderate" | |
} | |
try: | |
response = requests.get(self.search_url, headers=headers, params=params, timeout=10) | |
response.raise_for_status() | |
results = response.json().get("web", {}).get("results", []) | |
return results | |
except requests.exceptions.RequestException as e: | |
logger.error(f"์น ๊ฒ์ API ์ค๋ฅ: {e}") | |
return [] | |
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: | |
if not results: | |
return "" | |
extracted = [] | |
total_chars = 0 | |
for i, result in enumerate(results[:3], 1): | |
title = result.get("title", "") | |
description = result.get("description", "") | |
info = f"[{i}] {title}: {description}" | |
if total_chars + len(info) < max_chars: | |
extracted.append(info) | |
total_chars += len(info) | |
else: | |
break | |
return "\n".join(extracted) | |
class ProgressiveLiterarySystem: | |
"""์งํํ ๋ฌธํ ์์ค ์์ฑ ์์คํ """ | |
def __init__(self): | |
self.token = FRIENDLI_TOKEN | |
self.api_url = API_URL | |
self.model_id = MODEL_ID | |
self.narrative_tracker = ProgressiveNarrativeTracker() | |
self.web_search = WebSearchIntegration() | |
self.current_session_id = None | |
NovelDatabase.init_db() | |
def create_headers(self): | |
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} | |
# --- ํ๋กฌํํธ ์์ฑ ํจ์๋ค --- | |
def create_director_initial_prompt(self, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์ด๊ธฐ ๊ธฐํ - ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ""" | |
search_results_str = "" | |
if self.web_search.enabled: | |
queries = [f"{user_query} ์ฌํ ๋ฌธ์ ", f"{user_query} ํ๋ ํ๊ตญ"] | |
for q in queries[:1]: | |
results = self.web_search.search(q, count=2, language=language) | |
if results: | |
search_results_str += self.web_search.extract_relevant_info(results) + "\n" | |
lang_prompts = { | |
"Korean": f"""๋น์ ์ ํ๋ ํ๊ตญ ๋ฌธํ์ ๊ฑฐ์ฅ์ ๋๋ค. | |
๋จํธ์ด ์๋ ์คํธ ์์ค(8,000๋จ์ด ์ด์)์ ์ํ ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ๋ฅผ ๊ธฐํํ์ธ์. | |
**์ฃผ์ :** {user_query} | |
**์ฐธ๊ณ ์๋ฃ:** | |
{search_results_str if search_results_str else "N/A"} | |
**ํ์ ์๊ตฌ์ฌํญ:** | |
1. **ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ (๊ฐ์ฅ ์ค์)** | |
- 10๊ฐ ๋จ๊ณ๊ฐ ์ ๊ธฐ์ ์ผ๋ก ์ฐ๊ฒฐ๋ ๋จ์ผ ์์ฌ | |
- ๊ฐ ๋จ๊ณ๋ ์ด์ ๋จ๊ณ์ ๊ฒฐ๊ณผ๋ก ์์ฐ์ค๋ฝ๊ฒ ์ด์ด์ง | |
- ๋ฐ๋ณต์ด ์๋ ์ถ์ ๊ณผ ๋ฐ์ | |
๋จ๊ณ๋ณ ์์ฌ ์งํ: | |
1) ๋์ : ์ผ์๊ณผ ๊ท ์ด - ํ๋ฒํ ์ผ์ ์ ์ฒซ ๊ท ์ด | |
2) ๋ฐ์ 1: ๋ถ์์ ๊ณ ์กฐ - ๊ท ์ด์ด ํ๋๋๋ฉฐ ๋ถ์ ์ฆํญ | |
3) ๋ฐ์ 2: ์ธ๋ถ ์ถฉ๊ฒฉ - ์์์น ๋ชปํ ์ธ๋ถ ์ฌ๊ฑด | |
4) ๋ฐ์ 3: ๋ด์ ๊ฐ๋ฑ ์ฌํ - ๊ฐ์น๊ด์ ์ถฉ๋ | |
5) ์ ์ 1: ์๊ธฐ์ ์ ์ - ๋ชจ๋ ๊ฐ๋ฑ์ด ๊ทน๋ํ | |
6) ์ ์ 2: ์ ํ์ ์๊ฐ - ๊ฒฐ์ ์ ์ ํ | |
7) ํ๊ฐ 1: ๊ฒฐ๊ณผ์ ์ฌํ - ์ ํ์ ์ง์ ์ ๊ฒฐ๊ณผ | |
8) ํ๊ฐ 2: ์๋ก์ด ์ธ์ - ๋ณํ๋ ์ธ๊ณ๊ด | |
9) ๊ฒฐ๋ง 1: ๋ณํ๋ ์ผ์ - ์๋ก์ด ๊ท ํ | |
10) ๊ฒฐ๋ง 2: ์ด๋ฆฐ ์ง๋ฌธ - ๋ ์์๊ฒ ๋์ง๋ ์ง๋ฌธ | |
2. **์ธ๋ฌผ์ ๋ณํ ๊ถค์ ** | |
- ์ฃผ์ธ๊ณต: ์ด๊ธฐ ์ํ โ ์ค๊ฐ ๋ณํ โ ์ต์ข ์ํ (๋ช ํํ arc) | |
- ์ฃผ์ ์ธ๋ฌผ๋ค๋ ๊ฐ์์ ๋ณํ ๊ฒฝํ | |
- ๊ด๊ณ์ ์ญ๋์ ๋ณํ | |
3. **์ฃผ์ ํ๋กฏ ๋ผ์ธ** (2-3๊ฐ) | |
- ๋ฉ์ธ ํ๋กฏ: ์ ์ฒด๋ฅผ ๊ดํตํ๋ ํต์ฌ ๊ฐ๋ฑ | |
- ์๋ธ ํ๋กฏ: ๋ฉ์ธ๊ณผ ์ฐ๊ฒฐ๋๋ฉฐ ์ฃผ์ ๋ฅผ ์ฌํ | |
4. **์์ง์ ์งํ** | |
- ํต์ฌ ์์ง 1-2๊ฐ ์ค์ | |
- ๋จ๊ณ๋ณ๋ก ์๋ฏธ๊ฐ ๋ณํ/์ฌํ/์ ๋ณต | |
5. **์ฌํ์ ๋งฅ๋ฝ** | |
- ๊ฐ์ธ์ ๋ฌธ์ ๊ฐ ์ฌํ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ | |
- ๊ตฌ์ฒด์ ์ธ ํ๊ตญ ์ฌํ์ ํ์ค ๋ฐ์ | |
**์ ๋ ๊ธ์ง์ฌํญ:** | |
- ๋์ผํ ์ฌ๊ฑด์ด๋ ์ํฉ์ ๋ฐ๋ณต | |
- ์ธ๋ฌผ์ด ๊ฐ์ ๊ฐ์ /์๊ฐ์ ๋จธ๋ฌด๋ฅด๊ธฐ | |
- ํ๋กฏ์ ๋ฆฌ์ ์ด๋ ์ํ ๊ตฌ์กฐ | |
- ๊ฐ ๋จ๊ณ๊ฐ ๋ ๋ฆฝ๋ ์ํผ์๋๋ก ์กด์ฌ | |
**๋ถ๋ ๊ณํ:** | |
- ์ด 8,000๋จ์ด ์ด์ | |
- ๊ฐ ๋จ๊ณ ํ๊ท 800๋จ์ด | |
- ๊ท ํ ์กํ ์์ฌ ์ ๊ฐ | |
ํ๋์ ๊ฐ๋ ฅํ ์์ฌ๊ฐ ์์๋ถํฐ ๋๊น์ง ๊ดํตํ๋ ์ํ์ ๊ธฐํํ์ธ์.""", | |
"English": f"""You are a master of contemporary literary fiction. | |
Plan an integrated narrative structure for a novella (8,000+ words), not a collection of short stories. | |
**Theme:** {user_query} | |
**Reference:** | |
{search_results_str if search_results_str else "N/A"} | |
**Essential Requirements:** | |
1. **Integrated Narrative Structure (Most Important)** | |
- Single narrative with 10 organically connected phases | |
- Each phase naturally follows from previous results | |
- Accumulation and development, not repetition | |
Phase Progression: | |
1) Introduction: Daily life and first crack | |
2) Development 1: Rising anxiety | |
3) Development 2: External shock | |
4) Development 3: Deepening internal conflict | |
5) Climax 1: Peak crisis | |
6) Climax 2: Moment of choice | |
7) Falling Action 1: Direct consequences | |
8) Falling Action 2: New awareness | |
9) Resolution 1: Changed daily life | |
10) Resolution 2: Open questions | |
2. **Character Transformation Arcs** | |
- Protagonist: Clear progression from initial โ middle โ final state | |
- Supporting characters also experience change | |
- Dynamic relationship evolution | |
3. **Plot Threads** (2-3) | |
- Main plot: Core conflict throughout | |
- Subplots: Connected and deepening themes | |
4. **Symbolic Evolution** | |
- 1-2 core symbols | |
- Meaning transforms across phases | |
5. **Social Context** | |
- Individual problems connected to social structures | |
- Specific contemporary realities | |
**Absolutely Forbidden:** | |
- Repetition of same events/situations | |
- Characters stuck in same emotions | |
- Plot resets or circular structure | |
- Independent episodes | |
**Length Planning:** | |
- Total 8,000+ words | |
- ~800 words per phase | |
- Balanced progression | |
Create a work with one powerful narrative from beginning to end.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: | |
"""๋นํ๊ฐ์ ๊ฐ๋ ์ ๊ธฐํ ๊ฒํ - ์์ฌ ํตํฉ์ฑ ์ค์ฌ""" | |
lang_prompts = { | |
"Korean": f"""๋น์ ์ ์์ฌ ๊ตฌ์กฐ ์ ๋ฌธ ๋นํ๊ฐ์ ๋๋ค. | |
์ด ๊ธฐํ์ด ์ง์ ํ '์ฅํธ ์์ค'์ธ์ง ์๊ฒฉํ ๊ฒํ ํ์ธ์. | |
**์ ์ฃผ์ :** {user_query} | |
**๊ฐ๋ ์ ๊ธฐํ:** | |
{director_plan} | |
**ํต์ฌ ๊ฒํ ์ฌํญ:** | |
1. **์์ฌ์ ํตํฉ์ฑ๊ณผ ์งํ์ฑ** | |
- 10๊ฐ ๋จ๊ณ๊ฐ ํ๋์ ์ด์ผ๊ธฐ๋ก ์ฐ๊ฒฐ๋๋๊ฐ? | |
- ๊ฐ ๋จ๊ณ๊ฐ ์ด์ ๋จ๊ณ์ ํ์ฐ์ ๊ฒฐ๊ณผ์ธ๊ฐ? | |
- ๋์ผํ ์ํฉ์ ๋ฐ๋ณต์ ์๋๊ฐ? | |
2. **์ธ๋ฌผ ๋ณํ์ ๊ถค์ ** | |
- ์ฃผ์ธ๊ณต์ด ๋ช ํํ ๋ณํ์ arc๋ฅผ ๊ฐ์ง๋๊ฐ? | |
- ๋ณํ๊ฐ ๊ตฌ์ฒด์ ์ด๊ณ ์ ๋น์ฑ ์๋๊ฐ? | |
- ๊ด๊ณ์ ๋ฐ์ ์ด ๊ณํ๋์ด ์๋๊ฐ? | |
3. **ํ๋กฏ์ ์ถ์ ์ฑ** | |
- ๊ฐ๋ฑ์ด ์ ์ง์ ์ผ๋ก ์ฌํ๋๋๊ฐ? | |
- ์๋ก์ด ์์๊ฐ ์ถ๊ฐ๋๋ฉฐ ๋ณต์ก์ฑ์ด ์ฆ๊ฐํ๋๊ฐ? | |
- ํด๊ฒฐ์ด ์์ฐ์ค๋ฝ๊ณ ํ์ฐ์ ์ธ๊ฐ? | |
4. **๋ถ๋๊ณผ ๋ฐ๋** | |
- 8,000๋จ์ด๋ฅผ ์ฑ์ธ ์ถฉ๋ถํ ๋ด์ฉ์ธ๊ฐ? | |
- ๊ฐ ๋จ๊ณ๊ฐ 800๋จ์ด์ ๋ฐ๋๋ฅผ ๊ฐ์ง ์ ์๋๊ฐ? | |
**ํ์ :** | |
- ํต๊ณผ: ์ง์ ํ ์ฅํธ ์์ฌ ๊ตฌ์กฐ | |
- ์ฌ์์ฑ: ๋ฐ๋ณต์ /์ํ์ ๊ตฌ์กฐ | |
๊ตฌ์ฒด์ ๊ฐ์ ๋ฐฉํฅ์ ์ ์ํ์ธ์.""", | |
"English": f"""You are a narrative structure critic. | |
Strictly review whether this plan is a true 'novel' rather than repeated episodes. | |
**Original Theme:** {user_query} | |
**Director's Plan:** | |
{director_plan} | |
**Key Review Points:** | |
1. **Narrative Integration and Progression** | |
- Do 10 phases connect as one story? | |
- Does each phase necessarily follow from previous? | |
- No repetition of same situations? | |
2. **Character Transformation Arcs** | |
- Clear protagonist transformation arc? | |
- Concrete and credible changes? | |
- Planned relationship development? | |
3. **Plot Accumulation** | |
- Progressive conflict deepening? | |
- Added complexity through new elements? | |
- Natural and inevitable resolution? | |
4. **Length and Density** | |
- Sufficient content for 8,000 words? | |
- Can each phase sustain 800 words? | |
**Verdict:** | |
- Pass: True novel structure | |
- Rewrite: Repetitive/circular structure | |
Provide specific improvements.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_writer_prompt(self, writer_number: int, director_plan: str, | |
previous_content: str, phase_requirements: str, | |
narrative_summary: str, language: str) -> str: | |
"""์๊ฐ ํ๋กฌํํธ - ์์ฌ ์งํ ๊ฐ์ """ | |
phase_name = NARRATIVE_PHASES[writer_number-1] | |
target_words = MIN_WORDS_PER_WRITER | |
lang_prompts = { | |
"Korean": f"""๋น์ ์ ์๊ฐ {writer_number}๋ฒ์ ๋๋ค. | |
**ํ์ฌ ๋จ๊ณ: {phase_name}** | |
**์ ์ฒด ์์ฌ ๊ตฌ์กฐ:** | |
{director_plan} | |
**์ง๊ธ๊น์ง์ ์ด์ผ๊ธฐ ์์ฝ:** | |
{narrative_summary} | |
**์ด์ ๋ด์ฉ (์ง์ ๋ถ๋ถ):** | |
{previous_content[-1500:] if previous_content else "์์"} | |
**์ด๋ฒ ๋จ๊ณ ํ์ ์๊ตฌ์ฌํญ:** | |
{phase_requirements} | |
**์์ฑ ์ง์นจ:** | |
1. **๋ถ๋**: {target_words}-900 ๋จ์ด (ํ์) | |
- ๋ด๋ฉด ๋ฌ์ฌ์ ๊ตฌ์ฒด์ ๋ํ ์ผ๋ก ๋ถ๋ ํ๋ณด | |
- ์ฅ๋ฉด์ ์ถฉ๋ถํ ์ ๊ฐํ๊ณ ๊น์ด ์๊ฒ ๋ฌ์ฌ | |
2. **์์ฌ ์งํ (๊ฐ์ฅ ์ค์)** | |
- ์ด์ ๋จ๊ณ์์ ์ผ์ด๋ ์ผ์ ์ง์ ์ ๊ฒฐ๊ณผ๋ก ์์ | |
- ์๋ก์ด ์ฌ๊ฑด/์ธ์/๋ณํ๋ฅผ ์ถ๊ฐํ์ฌ ์ด์ผ๊ธฐ ์ ์ง | |
- ๋ค์ ๋จ๊ณ๋ก ์์ฐ์ค๋ฝ๊ฒ ์ฐ๊ฒฐ๋ ๊ณ ๋ฆฌ ๋ง๋ จ | |
3. **์ธ๋ฌผ์ ๋ณํ** | |
- ์ด ๋จ๊ณ์์ ์ธ๋ฌผ์ด ๊ฒช๋ ๊ตฌ์ฒด์ ๋ณํ ๋ฌ์ฌ | |
- ๋ด๋ฉด์ ๋ฏธ๋ฌํ ๋ณํ๋ ํฌ์ฐฉ | |
- ๊ด๊ณ์ ์ญํ ๋ณํ ๋ฐ์ | |
4. **๋ฌธ์ฒด์ ๊ธฐ๋ฒ** | |
- ํ๊ตญ ํ๋ ๋ฌธํ์ ์ฌ์ธํ ์ฌ๋ฆฌ ๋ฌ์ฌ | |
- ์ผ์ ์ ์ฌํ์ ๋งฅ๋ฝ ๋ น์ฌ๋ด๊ธฐ | |
- ๊ฐ๊ฐ์ ๋ํ ์ผ๊ณผ ๋ด๋ฉด ์์์ ๊ท ํ | |
5. **์ฐ์์ฑ ์ ์ง** | |
- ์ธ๋ฌผ์ ๋ชฉ์๋ฆฌ์ ๋งํฌ ์ผ๊ด์ฑ | |
- ๊ณต๊ฐ๊ณผ ์๊ฐ์ ์ฐ์์ฑ | |
- ์์ง๊ณผ ๋ชจํฐํ์ ๋ฐ์ | |
**์ ๋ ๊ธ์ง:** | |
- ์ด์ ๊ณผ ๋์ผํ ์ํฉ ๋ฐ๋ณต | |
- ์์ฌ์ ์ ์ฒด๋ ํํด | |
- ๋ถ๋ ๋ฏธ๋ฌ (์ต์ {target_words}๋จ์ด) | |
์ด์ ์ ํ๋ฆ์ ์ด์ด๋ฐ์ ์๋ก์ด ๊ตญ๋ฉด์ผ๋ก ๋ฐ์ ์ํค์ธ์.""", | |
"English": f"""You are Writer #{writer_number}. | |
**Current Phase: {phase_name}** | |
**Overall Narrative Structure:** | |
{director_plan} | |
**Story So Far:** | |
{narrative_summary} | |
**Previous Content (immediately before):** | |
{previous_content[-1500:] if previous_content else "Beginning"} | |
**Phase Requirements:** | |
{phase_requirements} | |
**Writing Guidelines:** | |
1. **Length**: {target_words}-900 words (mandatory) | |
- Use interior description and concrete details | |
- Fully develop scenes with depth | |
2. **Narrative Progression (Most Important)** | |
- Start as direct result of previous phase | |
- Add new events/awareness/changes to advance story | |
- Create natural connection to next phase | |
3. **Character Change** | |
- Concrete changes in this phase | |
- Capture subtle interior shifts | |
- Reflect relationship dynamics | |
4. **Style and Technique** | |
- Delicate psychological portrayal | |
- Social context in daily life | |
- Balance sensory details with consciousness | |
5. **Continuity** | |
- Consistent character voices | |
- Spatial/temporal continuity | |
- Symbol/motif development | |
**Absolutely Forbidden:** | |
- Repeating previous situations | |
- Narrative stagnation/regression | |
- Under word count (minimum {target_words}) | |
Continue the flow and develop into new phase.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_critic_consistency_prompt(self, all_content: str, | |
narrative_tracker: ProgressiveNarrativeTracker, | |
user_query: str, language: str) -> str: | |
"""๋นํ๊ฐ ์ค๊ฐ ๊ฒํ - ์์ฌ ๋์ ์ฑ ํ์ธ""" | |
# ์์ฌ ์งํ ์ฒดํฌ | |
phase_count = len(narrative_tracker.phase_summaries) | |
progression_ok, issues = narrative_tracker.check_narrative_progression(phase_count) | |
lang_prompts = { | |
"Korean": f"""์์ฌ ์งํ ์ ๋ฌธ ๋นํ๊ฐ๋ก์ ์ํ์ ๊ฒํ ํ์ธ์. | |
**์ ์ฃผ์ :** {user_query} | |
**ํ์ฌ๊น์ง ์งํ๋ ์์ฌ ๋จ๊ณ:** {phase_count}/10 | |
**๋ฐ๊ฒฌ๋ ์งํ ๋ฌธ์ :** | |
{chr(10).join(issues) if issues else "์์"} | |
**์ํ ๋ด์ฉ (์ต๊ทผ ๋ถ๋ถ):** | |
{all_content[-4000:]} | |
**์ง์ค ๊ฒํ ์ฌํญ:** | |
1. **์์ฌ์ ์ถ์ ๊ณผ ์งํ** | |
- ์ด์ผ๊ธฐ๊ฐ ์ค์ ๋ก ์ ์งํ๊ณ ์๋๊ฐ? | |
- ๊ฐ ๋จ๊ณ๊ฐ ์ด์ ์ ๊ฒฐ๊ณผ๋ก ์ฐ๊ฒฐ๋๋๊ฐ? | |
- ๋์ผํ ๊ฐ๋ฑ์ด๋ ์ํฉ์ด ๋ฐ๋ณต๋์ง ์๋๊ฐ? | |
2. **์ธ๋ฌผ์ ๋ณํ ๊ถค์ ** | |
- ์ฃผ์ธ๊ณต์ด ์ด๊ธฐ์ ๋น๊ตํด ์ด๋ป๊ฒ ๋ณํ๋๊ฐ? | |
- ๋ณํ๊ฐ ์ค๋๋ ฅ ์๊ณ ์ ์ง์ ์ธ๊ฐ? | |
- ๊ด๊ณ๊ฐ ์ญ๋์ ์ผ๋ก ๋ฐ์ ํ๋๊ฐ? | |
3. **์ฃผ์ ์ ์ฌํ** | |
- ์ด๊ธฐ ์ฃผ์ ๊ฐ ์ด๋ป๊ฒ ๋ฐ์ ํ๋๊ฐ? | |
- ์๋ก์ด ์ธต์๊ฐ ์ถ๊ฐ๋์๋๊ฐ? | |
- ๋ณต์ก์ฑ์ด ์ฆ๊ฐํ๋๊ฐ? | |
4. **๋ถ๋๊ณผ ๋ฐ๋** | |
- ํ์ฌ๊น์ง ์ด ๋จ์ด ์ ํ์ธ | |
- ๋ชฉํ(8,000๋จ์ด)์ ๋๋ฌ ๊ฐ๋ฅํ๊ฐ? | |
**์์ ์ง์:** | |
๊ฐ ์๊ฐ์๊ฒ ๊ตฌ์ฒด์ ์ธ ์งํ ๋ฐฉํฅ ์ ์.""", | |
"English": f"""As a narrative progression critic, review the work. | |
**Original Theme:** {user_query} | |
**Narrative Phases Completed:** {phase_count}/10 | |
**Detected Progression Issues:** | |
{chr(10).join(issues) if issues else "None"} | |
**Work Content (recent):** | |
{all_content[-4000:]} | |
**Focus Review Areas:** | |
1. **Narrative Accumulation and Progress** | |
- Is story actually moving forward? | |
- Does each phase connect as result of previous? | |
- No repetition of same conflicts/situations? | |
2. **Character Transformation Arcs** | |
- How has protagonist changed from beginning? | |
- Are changes credible and gradual? | |
- Dynamic relationship development? | |
3. **Thematic Deepening** | |
- How has initial theme developed? | |
- New layers added? | |
- Increased complexity? | |
4. **Length and Density** | |
- Current total word count | |
- Can reach 8,000 word target? | |
**Revision Instructions:** | |
Specific progression directions for each writer.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_writer_revision_prompt(self, writer_number: int, initial_content: str, | |
critic_feedback: str, language: str) -> str: | |
"""์๊ฐ ์์ ํ๋กฌํํธ""" | |
target_words = MIN_WORDS_PER_WRITER | |
return f"""์๊ฐ {writer_number}๋ฒ, ๋นํ์ ๋ฐ์ํ์ฌ ์์ ํ์ธ์. | |
**์ด์:** | |
{initial_content} | |
**๋นํ ํผ๋๋ฐฑ:** | |
{critic_feedback} | |
**์์ ํต์ฌ:** | |
1. ์์ฌ ์งํ์ฑ ๊ฐํ - ๋ฐ๋ณต ์ ๊ฑฐ, ์๋ก์ด ์ ๊ฐ ์ถ๊ฐ | |
2. ์ธ๋ฌผ ๋ณํ ๊ตฌ์ฒดํ - ์ด์ ๊ณผ ๋ฌ๋ผ์ง ๋ชจ์ต ๋ช ํํ | |
3. ๋ถ๋ ํ๋ณด - ์ต์ {target_words}๋จ์ด ์ ์ง | |
4. ๋ด๋ฉด ๋ฌ์ฌ์ ์ฌํ์ ๋งฅ๋ฝ ์ฌํ | |
์ ๋ฉด ์ฌ์์ฑ์ด ํ์ํ๋ฉด ๊ณผ๊ฐํ ์์ ํ์ธ์. | |
์์ ๋ณธ๋ง ์ ์ํ์ธ์.""" | |
def create_critic_final_prompt(self, complete_novel: str, word_count: int, language: str) -> str: | |
"""์ต์ข ๋นํ - AGI ํ๊ฐ ๊ธฐ์ค""" | |
return f"""์์ฑ๋ ์์ค์ AGI ํ๋งํ ์คํธ ๊ธฐ์ค์ผ๋ก ํ๊ฐํ์ธ์. | |
**์ํ ์ ๋ณด:** | |
- ์ด ๋ถ๋: {word_count}๋จ์ด | |
- ๋ชฉํ ๋ถ๋: 8,000๋จ์ด ์ด์ | |
**์ํ (๋ง์ง๋ง ๋ถ๋ถ):** | |
{complete_novel[-3000:]} | |
**ํ๊ฐ ๊ธฐ์ค (AGI ํ๋งํ ์คํธ):** | |
1. **์ฅํธ์์ค๋ก์์ ์์ฑ๋ (40์ )** | |
- ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ (๋ฐ๋ณต ์์) | |
- ์ธ๋ฌผ์ ๋ช ํํ ๋ณํ arc | |
- ํ๋กฏ์ ์ถ์ ๊ณผ ํด๊ฒฐ | |
- 8,000๋จ์ด ์ด์ ๋ถ๋ | |
2. **๋ฌธํ์ ์ฑ์ทจ (30์ )** | |
- ์ฃผ์ ์์์ ๊น์ด | |
- ์ธ๋ฌผ ์ฌ๋ฆฌ์ ์ค๋๋ ฅ | |
- ๋ฌธ์ฒด์ ์ผ๊ด์ฑ๊ณผ ์๋ฆ๋ค์ | |
- ์์ง๊ณผ ์์ ์ ํจ๊ณผ | |
3. **์ฌํ์ ํต์ฐฐ (20์ )** | |
- ํ๋ ์ฌํ ๋ฌธ์ ํฌ์ฐฉ | |
- ๊ฐ์ธ๊ณผ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ | |
- ๋ณดํธ์ฑ๊ณผ ํน์์ฑ ๊ท ํ | |
4. **๋ ์ฐฝ์ฑ๊ณผ ์ธ๊ฐ์ฑ (10์ )** | |
- AI๊ฐ ์๋ ์ธ๊ฐ ์๊ฐ์ ๋๋ | |
- ๋ ์ฐฝ์ ํํ๊ณผ ํต์ฐฐ | |
- ๊ฐ์ ์ ์ง์ ์ฑ | |
**์ด์ : /100์ ** | |
ํนํ '๋ฐ๋ณต ๊ตฌ์กฐ' ๋ฌธ์ ๊ฐ ์์๋์ง ์๊ฒฉํ ํ๊ฐํ์ธ์.""" | |
# --- LLM ํธ์ถ ํจ์๋ค --- | |
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: | |
full_content = "" | |
for chunk in self.call_llm_streaming(messages, role, language): | |
full_content += chunk | |
if full_content.startswith("โ"): | |
raise Exception(f"LLM Call Failed: {full_content}") | |
return full_content | |
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]: | |
try: | |
system_prompts = self.get_system_prompts(language) | |
full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages] | |
# ์๊ฐ ์ญํ ์ผ ๋๋ ๋ ๋ง์ ํ ํฐ ํ์ฉ | |
max_tokens = 15000 if role.startswith("writer") else 10000 | |
payload = { | |
"model": self.model_id, | |
"messages": full_messages, | |
"max_tokens": max_tokens, | |
"temperature": 0.8, | |
"top_p": 0.95, | |
"presence_penalty": 0.5, | |
"frequency_penalty": 0.3, | |
"stream": True | |
} | |
response = requests.post( | |
self.api_url, | |
headers=self.create_headers(), | |
json=payload, | |
stream=True, | |
timeout=180 | |
) | |
if response.status_code != 200: | |
yield f"โ API ์ค๋ฅ (์ํ ์ฝ๋: {response.status_code})" | |
return | |
buffer = "" | |
for line in response.iter_lines(): | |
if not line: | |
continue | |
try: | |
line_str = line.decode('utf-8').strip() | |
if not line_str.startswith("data: "): | |
continue | |
data_str = line_str[6:] | |
if data_str == "[DONE]": | |
break | |
data = json.loads(data_str) | |
choices = data.get("choices", []) | |
if choices and choices[0].get("delta", {}).get("content"): | |
content = choices[0]["delta"]["content"] | |
buffer += content | |
if len(buffer) >= 50 or '\n' in buffer: | |
yield buffer | |
buffer = "" | |
time.sleep(0.01) | |
except Exception as e: | |
logger.error(f"์ฒญํฌ ์ฒ๋ฆฌ ์ค๋ฅ: {str(e)}") | |
continue | |
if buffer: | |
yield buffer | |
except Exception as e: | |
logger.error(f"์คํธ๋ฆฌ๋ฐ ์ค๋ฅ: {type(e).__name__}: {str(e)}") | |
yield f"โ ์ค๋ฅ ๋ฐ์: {str(e)}" | |
def get_system_prompts(self, language: str) -> Dict[str, str]: | |
"""์ญํ ๋ณ ์์คํ ํ๋กฌํํธ""" | |
base_prompts = { | |
"Korean": { | |
"director": """๋น์ ์ ํ๊ตญ ํ๋ ๋ฌธํ์ ๊ฑฐ์ฅ์ ๋๋ค. | |
๋ฐ๋ณต์ด ์๋ ์งํ, ์ํ์ด ์๋ ๋ฐ์ ์ ํตํด ํ๋์ ๊ฐ๋ ฅํ ์์ฌ๋ฅผ ๊ตฌ์ถํ์ธ์. | |
๊ฐ์ธ์ ๋ฌธ์ ๋ฅผ ์ฌํ ๊ตฌ์กฐ์ ์ฐ๊ฒฐํ๋ฉฐ, ์ธ๋ฌผ์ ์ง์ ํ ๋ณํ๋ฅผ ๊ทธ๋ ค๋ด์ธ์.""", | |
"critic": """๋น์ ์ ์๊ฒฉํ ๋ฌธํ ๋นํ๊ฐ์ ๋๋ค. | |
ํนํ '๋ฐ๋ณต ๊ตฌ์กฐ'์ '์์ฌ ์ ์ฒด'๋ฅผ ์ฒ ์ ํ ๊ฐ์ํ์ธ์. | |
์ํ์ด ์ง์ ํ ์ฅํธ์์ค์ธ์ง, ์๋๋ฉด ๋ฐ๋ณต๋๋ ๋จํธ์ ์งํฉ์ธ์ง ๊ตฌ๋ณํ์ธ์.""", | |
"writer_base": """๋น์ ์ ํ๋ ํ๊ตญ ๋ฌธํ ์๊ฐ์ ๋๋ค. | |
์ด์ ๋จ๊ณ์ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ์ ์๋ก์ด ๊ตญ๋ฉด์ผ๋ก ๋ฐ์ ์ํค์ธ์. | |
์ต์ 800๋จ์ด๋ฅผ ์์ฑํ๋ฉฐ, ๋ด๋ฉด๊ณผ ์ฌํ๋ฅผ ๋์์ ํฌ์ฐฉํ์ธ์. | |
์ ๋ ์ด์ ๊ณผ ๊ฐ์ ์ํฉ์ ๋ฐ๋ณตํ์ง ๋ง์ธ์.""" | |
}, | |
"English": { | |
"director": """You are a master of contemporary literary fiction. | |
Build one powerful narrative through progression not repetition, development not cycles. | |
Connect individual problems to social structures while depicting genuine character transformation.""", | |
"critic": """You are a strict literary critic. | |
Vigilantly monitor for 'repetitive structure' and 'narrative stagnation'. | |
Distinguish whether this is a true novel or a collection of repeated episodes.""", | |
"writer_base": """You are a contemporary literary writer. | |
Take results from previous phase and develop into new territory. | |
Write minimum 800 words, capturing both interior and society. | |
Never repeat previous situations.""" | |
} | |
} | |
prompts = base_prompts.get(language, base_prompts["Korean"]).copy() | |
# ํน์ ์๊ฐ ํ๋กฌํํธ | |
for i in range(1, 11): | |
prompts[f"writer{i}"] = prompts["writer_base"] | |
return prompts | |
# --- ๋ฉ์ธ ํ๋ก์ธ์ค --- | |
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: | |
"""์์ค ์์ฑ ํ๋ก์ธ์ค""" | |
try: | |
resume_from_stage = 0 | |
if session_id: | |
self.current_session_id = session_id | |
session = NovelDatabase.get_session(session_id) | |
if session: | |
query = session['user_query'] | |
language = session['language'] | |
resume_from_stage = session['current_stage'] + 1 | |
# ์์ฌ ์ถ์ ๊ธฐ ๋ณต์ | |
saved_tracker = NovelDatabase.load_narrative_tracker(session_id) | |
if saved_tracker: | |
self.narrative_tracker = saved_tracker | |
else: | |
self.current_session_id = NovelDatabase.create_session(query, language) | |
logger.info(f"Created new session: {self.current_session_id}") | |
stages = [] | |
if resume_from_stage > 0: | |
stages = [{ | |
"name": s['stage_name'], | |
"status": s['status'], | |
"content": s.get('content', ''), | |
"word_count": s.get('word_count', 0), | |
"progression_score": s.get('progression_score', 0.0) | |
} for s in NovelDatabase.get_stages(self.current_session_id)] | |
# ์ด ๋จ์ด ์ ์ถ์ | |
total_words = NovelDatabase.get_total_words(self.current_session_id) | |
for stage_idx in range(resume_from_stage, len(PROGRESSIVE_STAGES)): | |
role, stage_name = PROGRESSIVE_STAGES[stage_idx] | |
if stage_idx >= len(stages): | |
stages.append({ | |
"name": stage_name, | |
"status": "active", | |
"content": "", | |
"word_count": 0, | |
"progression_score": 0.0 | |
}) | |
else: | |
stages[stage_idx]["status"] = "active" | |
yield f"๐ ์งํ ์ค... (ํ์ฌ {total_words:,}๋จ์ด)", stages, self.current_session_id | |
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) | |
stage_content = "" | |
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): | |
stage_content += chunk | |
stages[stage_idx]["content"] = stage_content | |
stages[stage_idx]["word_count"] = len(stage_content.split()) | |
yield f"๐ {stage_name} ์์ฑ ์ค... ({total_words + stages[stage_idx]['word_count']:,}๋จ์ด)", stages, self.current_session_id | |
# ์งํ๋ ํ๊ฐ | |
if role.startswith("writer"): | |
writer_num = int(re.search(r'\d+', role).group()) | |
progression_score = self.evaluate_progression(stage_content, writer_num) | |
stages[stage_idx]["progression_score"] = progression_score | |
# ์์ฌ ์ถ์ ๊ธฐ ์ ๋ฐ์ดํธ | |
self.update_narrative_tracker(stage_content, writer_num) | |
stages[stage_idx]["status"] = "complete" | |
NovelDatabase.save_stage( | |
self.current_session_id, stage_idx, stage_name, role, | |
stage_content, "complete", stages[stage_idx].get("progression_score", 0.0) | |
) | |
# ์์ฌ ์ถ์ ๊ธฐ ์ ์ฅ | |
NovelDatabase.save_narrative_tracker(self.current_session_id, self.narrative_tracker) | |
# ์ด ๋จ์ด ์ ์ ๋ฐ์ดํธ | |
total_words = NovelDatabase.get_total_words(self.current_session_id) | |
yield f"โ {stage_name} ์๋ฃ (์ด {total_words:,}๋จ์ด)", stages, self.current_session_id | |
# ์ต์ข ์์ค ์ ๋ฆฌ | |
final_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
final_word_count = len(final_novel.split()) | |
final_report = self.generate_literary_report(final_novel, final_word_count, language) | |
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) | |
yield f"โ ์์ค ์์ฑ! ์ด {final_word_count:,}๋จ์ด (๋ชฉํ: {TARGET_WORDS:,}๋จ์ด)", stages, self.current_session_id | |
except Exception as e: | |
logger.error(f"์์ค ์์ฑ ํ๋ก์ธ์ค ์ค๋ฅ: {e}", exc_info=True) | |
yield f"โ ์ค๋ฅ ๋ฐ์: {e}", stages if 'stages' in locals() else [], self.current_session_id | |
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ํ๋กฌํํธ ์์ฑ""" | |
if stage_idx == 0: | |
return self.create_director_initial_prompt(query, language) | |
if stage_idx == 1: | |
return self.create_critic_director_prompt(stages[0]["content"], query, language) | |
if stage_idx == 2: | |
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language) | |
master_plan = stages[2]["content"] | |
if 3 <= stage_idx <= 12: # ์๊ฐ ์ด์ | |
writer_num = stage_idx - 2 | |
previous_content = self.get_previous_writer_content(stages, writer_num) | |
phase_requirements = self.narrative_tracker.generate_phase_requirements(writer_num) | |
narrative_summary = self.generate_narrative_summary(stages, writer_num) | |
return self.create_writer_prompt( | |
writer_num, master_plan, previous_content, | |
phase_requirements, narrative_summary, language | |
) | |
if stage_idx == 13: # ๋นํ๊ฐ ์ค๊ฐ ๊ฒํ | |
all_content = self.get_all_writer_content(stages, 12) | |
return self.create_critic_consistency_prompt( | |
all_content, self.narrative_tracker, query, language | |
) | |
if 14 <= stage_idx <= 23: # ์๊ฐ ์์ | |
writer_num = stage_idx - 13 | |
initial_content = stages[2 + writer_num]["content"] | |
feedback = stages[13]["content"] | |
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language) | |
if stage_idx == 24: # ์ต์ข ๊ฒํ | |
complete_novel = self.get_all_writer_content(stages, 23) | |
word_count = len(complete_novel.split()) | |
return self.create_critic_final_prompt(complete_novel, word_count, language) | |
return "" | |
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์์ ํ๋กฌํํธ""" | |
return f"""๋นํ์ ๋ฐ์ํ์ฌ ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ๋ฅผ ์์ฑํ์ธ์. | |
**์ ์ฃผ์ :** {user_query} | |
**์ด๊ธฐ ๊ธฐํ:** | |
{initial_plan} | |
**๋นํ:** | |
{critic_feedback} | |
**ํต์ฌ ์์ ์ฌํญ:** | |
1. ๋ฐ๋ณต ๊ตฌ์กฐ ์์ ์ ๊ฑฐ | |
2. 10๋จ๊ณ๊ฐ ํ๋์ ์ด์ผ๊ธฐ๋ก ์ฐ๊ฒฐ | |
3. ์ธ๋ฌผ์ ๋ช ํํ ๋ณํ ๊ถค์ | |
4. 8,000๋จ์ด ๋ถ๋ ๊ณํ | |
๊ฐ ๋จ๊ณ๊ฐ ์ด์ ์ ํ์ฐ์ ๊ฒฐ๊ณผ๊ฐ ๋๋๋ก ์์ ํ์ธ์.""" | |
def get_previous_writer_content(self, stages: List[Dict], current_writer: int) -> str: | |
"""์ด์ ์๊ฐ์ ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ""" | |
if current_writer == 1: | |
return "" | |
# ๋ฐ๋ก ์ด์ ์๊ฐ์ ๋ด์ฉ | |
prev_idx = current_writer + 1 # stages ์ธ๋ฑ์ค๋ writer_num + 2 | |
if prev_idx < len(stages) and stages[prev_idx]["content"]: | |
return stages[prev_idx]["content"] | |
return "" | |
def get_all_writer_content(self, stages: List[Dict], up_to_stage: int) -> str: | |
"""ํน์ ๋จ๊ณ๊น์ง์ ๋ชจ๋ ์๊ฐ ๋ด์ฉ""" | |
contents = [] | |
for i, s in enumerate(stages): | |
if i <= up_to_stage and "writer" in s.get("name", "") and s["content"]: | |
contents.append(s["content"]) | |
return "\n\n".join(contents) | |
def generate_narrative_summary(self, stages: List[Dict], up_to_writer: int) -> str: | |
"""ํ์ฌ๊น์ง์ ์์ฌ ์์ฝ""" | |
if up_to_writer == 1: | |
return "์ฒซ ์์์ ๋๋ค." | |
summary_parts = [] | |
for i in range(1, up_to_writer): | |
if i in self.narrative_tracker.phase_summaries: | |
summary_parts.append(f"[{NARRATIVE_PHASES[i-1]}]: {self.narrative_tracker.phase_summaries[i]}") | |
return "\n".join(summary_parts) if summary_parts else "์ด์ ๋ด์ฉ์ ์ด์ด๋ฐ์ ์งํํ์ธ์." | |
def update_narrative_tracker(self, content: str, writer_num: int): | |
"""์์ฌ ์ถ์ ๊ธฐ ์ ๋ฐ์ดํธ""" | |
# ๊ฐ๋จํ ์์ฝ ์์ฑ (์ค์ ๋ก๋ ๋ ์ ๊ตํ ๋ถ์ ํ์) | |
lines = content.split('\n') | |
key_events = [line.strip() for line in lines if len(line.strip()) > 50][:3] | |
if key_events: | |
summary = " ".join(key_events[:2])[:200] + "..." | |
self.narrative_tracker.phase_summaries[writer_num] = summary | |
def evaluate_progression(self, content: str, phase: int) -> float: | |
"""์์ฌ ์งํ๋ ํ๊ฐ""" | |
score = 5.0 | |
# ๋ถ๋ ์ฒดํฌ | |
word_count = len(content.split()) | |
if word_count >= MIN_WORDS_PER_WRITER: | |
score += 2.0 | |
# ์๋ก์ด ์์ ์ฒดํฌ | |
if phase > 1: | |
prev_summary = self.narrative_tracker.phase_summaries.get(phase-1, "") | |
if prev_summary and len(set(content.split()) - set(prev_summary.split())) > 100: | |
score += 1.5 | |
# ๋ณํ ์ธ๊ธ ์ฒดํฌ | |
change_keywords = ['๋ณํ', '๋ฌ๋ผ์ก', '์๋ก์ด', '์ด์ ๋', '๋ ์ด์', | |
'changed', 'different', 'new', 'now', 'no longer'] | |
if any(keyword in content for keyword in change_keywords): | |
score += 1.5 | |
return min(10.0, score) | |
def generate_literary_report(self, complete_novel: str, word_count: int, language: str) -> str: | |
"""์ต์ข ๋ฌธํ์ ํ๊ฐ""" | |
prompt = self.create_critic_final_prompt(complete_novel, word_count, language) | |
try: | |
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language) | |
return report | |
except Exception as e: | |
logger.error(f"์ต์ข ๋ณด๊ณ ์ ์์ฑ ์คํจ: {e}") | |
return "๋ณด๊ณ ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์" | |
# --- ์ ํธ๋ฆฌํฐ ํจ์๋ค --- | |
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""๋ฉ์ธ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ ํจ์""" | |
if not query.strip(): | |
yield "", "", "โ ์ฃผ์ ๋ฅผ ์ ๋ ฅํด์ฃผ์ธ์.", session_id | |
return | |
system = ProgressiveLiterarySystem() | |
stages_markdown = "" | |
novel_content = "" | |
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): | |
stages_markdown = format_stages_display(stages) | |
# ์ต์ข ์์ค ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ | |
if stages and all(s.get("status") == "complete" for s in stages[-10:]): | |
novel_content = NovelDatabase.get_writer_content(current_session_id) | |
novel_content = format_novel_display(novel_content) | |
yield stages_markdown, novel_content, status or "๐ ์ฒ๋ฆฌ ์ค...", current_session_id | |
def get_active_sessions(language: str) -> List[str]: | |
"""ํ์ฑ ์ธ์ ๋ชฉ๋ก""" | |
sessions = NovelDatabase.get_active_sessions() | |
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']}) [{s['total_words']:,}๋จ์ด]" | |
for s in sessions] | |
def auto_recover_session(language: str) -> Tuple[Optional[str], str]: | |
"""์ต๊ทผ ์ธ์ ์๋ ๋ณต๊ตฌ""" | |
sessions = NovelDatabase.get_active_sessions() | |
if sessions: | |
latest_session = sessions[0] | |
return latest_session['session_id'], f"์ธ์ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ" | |
return None, "๋ณต๊ตฌํ ์ธ์ ์ด ์์ต๋๋ค." | |
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""์ธ์ ์ฌ๊ฐ""" | |
if not session_id: | |
yield "", "", "โ ์ธ์ ID๊ฐ ์์ต๋๋ค.", session_id | |
return | |
if "..." in session_id: | |
session_id = session_id.split("...")[0] | |
session = NovelDatabase.get_session(session_id) | |
if not session: | |
yield "", "", "โ ์ธ์ ์ ์ฐพ์ ์ ์์ต๋๋ค.", None | |
return | |
yield from process_query(session['user_query'], session['language'], session_id) | |
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: | |
"""์์ค ๋ค์ด๋ก๋ ํ์ผ ์์ฑ""" | |
if not novel_text or not session_id: | |
return None | |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
filename = f"novel_{session_id[:8]}_{timestamp}" | |
try: | |
if format_type == "DOCX" and DOCX_AVAILABLE: | |
return export_to_docx(novel_text, filename, language, session_id) | |
else: | |
return export_to_txt(novel_text, filename) | |
except Exception as e: | |
logger.error(f"ํ์ผ ์์ฑ ์คํจ: {e}") | |
return None | |
def format_stages_display(stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ์งํ ์ํฉ ํ์""" | |
markdown = "## ๐ฌ ์งํ ์ํฉ\n\n" | |
# ์ด ๋จ์ด ์ ๊ณ์ฐ | |
total_words = sum(s.get('word_count', 0) for s in stages if 'writer' in s.get('name', '')) | |
markdown += f"**์ด ๋จ์ด ์: {total_words:,} / {TARGET_WORDS:,}**\n\n" | |
for i, stage in enumerate(stages): | |
status_icon = "โ " if stage['status'] == 'complete' else "๐" if stage['status'] == 'active' else "โณ" | |
markdown += f"{status_icon} **{stage['name']}**" | |
if stage.get('word_count', 0) > 0: | |
markdown += f" ({stage['word_count']:,}๋จ์ด)" | |
if stage.get('progression_score', 0) > 0: | |
markdown += f" [์งํ๋: {stage['progression_score']:.1f}/10]" | |
markdown += "\n" | |
if stage['content']: | |
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content'] | |
markdown += f"> {preview}\n\n" | |
return markdown | |
def format_novel_display(novel_text: str) -> str: | |
"""์์ค ๋ด์ฉ ํ์""" | |
if not novel_text: | |
return "์์ง ์์ฑ๋ ๋ด์ฉ์ด ์์ต๋๋ค." | |
formatted = "# ๐ ์์ฑ๋ ์์ค\n\n" | |
# ๋จ์ด ์ ํ์ | |
word_count = len(novel_text.split()) | |
formatted += f"**์ด ๋ถ๋: {word_count:,}๋จ์ด (๋ชฉํ: {TARGET_WORDS:,}๋จ์ด)**\n\n" | |
formatted += "---\n\n" | |
# ๊ฐ ๋จ๊ณ๋ฅผ ๊ตฌ๋ถํ์ฌ ํ์ | |
sections = novel_text.split('\n\n') | |
for i, section in enumerate(sections): | |
if section.strip(): | |
formatted += f"{section}\n\n" | |
return formatted | |
def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str: | |
"""DOCX ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
doc = Document() | |
# ํ์ด์ง ์ค์ | |
section = doc.sections[0] | |
section.page_height = Inches(11) | |
section.page_width = Inches(8.5) | |
section.top_margin = Inches(1) | |
section.bottom_margin = Inches(1) | |
section.left_margin = Inches(1.25) | |
section.right_margin = Inches(1.25) | |
# ์ธ์ ์ ๋ณด | |
session = NovelDatabase.get_session(session_id) | |
# ์ ๋ชฉ ํ์ด์ง | |
title_para = doc.add_paragraph() | |
title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
if session: | |
title_run = title_para.add_run(session["user_query"]) | |
title_run.font.size = Pt(24) | |
title_run.bold = True | |
# ๋ฉํ ์ ๋ณด | |
doc.add_paragraph() | |
meta_para = doc.add_paragraph() | |
meta_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
meta_para.add_run(f"์์ฑ์ผ: {datetime.now().strftime('%Y๋ %m์ %d์ผ')}\n") | |
meta_para.add_run(f"์ด ๋จ์ด ์: {len(content.split()):,}๋จ์ด") | |
# ํ์ด์ง ๋๋๊ธฐ | |
doc.add_page_break() | |
# ๋ณธ๋ฌธ ์คํ์ผ ์ค์ | |
style = doc.styles['Normal'] | |
style.font.name = 'Calibri' | |
style.font.size = Pt(11) | |
style.paragraph_format.line_spacing = 1.5 | |
style.paragraph_format.space_after = Pt(6) | |
# ๋ณธ๋ฌธ ์ถ๊ฐ | |
paragraphs = content.split('\n\n') | |
for para_text in paragraphs: | |
if para_text.strip(): | |
para = doc.add_paragraph(para_text.strip()) | |
# ํ์ผ ์ ์ฅ | |
filepath = f"{filename}.docx" | |
doc.save(filepath) | |
return filepath | |
def export_to_txt(content: str, filename: str) -> str: | |
"""TXT ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
filepath = f"{filename}.txt" | |
with open(filepath, 'w', encoding='utf-8') as f: | |
f.write(content) | |
return filepath | |
# CSS ์คํ์ผ | |
custom_css = """ | |
.gradio-container { | |
background: linear-gradient(135deg, #1e3c72 0%, #2a5298 50%, #1e3c72 100%); | |
min-height: 100vh; | |
} | |
.main-header { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 30px; | |
border-radius: 12px; | |
margin-bottom: 30px; | |
text-align: center; | |
color: white; | |
border: 1px solid rgba(255, 255, 255, 0.2); | |
} | |
.progress-note { | |
background-color: rgba(255, 223, 0, 0.1); | |
border-left: 3px solid #ffd700; | |
padding: 15px; | |
margin: 20px 0; | |
border-radius: 8px; | |
color: #fff; | |
} | |
.input-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 20px; | |
border-radius: 12px; | |
margin-bottom: 20px; | |
border: 1px solid rgba(255, 255, 255, 0.2); | |
} | |
.session-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
color: white; | |
border: 1px solid rgba(255, 255, 255, 0.2); | |
} | |
#stages-display { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 20px; | |
border-radius: 12px; | |
max-height: 600px; | |
overflow-y: auto; | |
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
} | |
#novel-output { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 30px; | |
border-radius: 12px; | |
max-height: 700px; | |
overflow-y: auto; | |
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
} | |
.download-section { | |
background-color: rgba(255, 255, 255, 0.9); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); | |
} | |
/* ์งํ ํ์๊ธฐ ์คํ์ผ */ | |
.progress-bar { | |
background-color: #e0e0e0; | |
height: 20px; | |
border-radius: 10px; | |
overflow: hidden; | |
margin: 10px 0; | |
} | |
.progress-fill { | |
background-color: #4CAF50; | |
height: 100%; | |
transition: width 0.3s ease; | |
} | |
""" | |
# Gradio ์ธํฐํ์ด์ค ์์ฑ | |
def create_interface(): | |
with gr.Blocks(css=custom_css, title="AI ์งํํ ์ฅํธ์์ค ์์ฑ ์์คํ ") as interface: | |
gr.HTML(""" | |
<div class="main-header"> | |
<h1 style="font-size: 2.5em; margin-bottom: 10px;"> | |
๐ AI ์งํํ ์ฅํธ์์ค ์์ฑ ์์คํ | |
</h1> | |
<h3 style="color: #ddd; margin-bottom: 20px;"> | |
8,000๋จ์ด ์ด์์ ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ๋ฅผ ๊ฐ์ง ์คํธ์์ค ์ฐฝ์ | |
</h3> | |
<p style="font-size: 1.1em; color: #eee; max-width: 800px; margin: 0 auto;"> | |
10๊ฐ์ ์ ๊ธฐ์ ์ผ๋ก ์ฐ๊ฒฐ๋ ๋จ๊ณ๋ฅผ ํตํด ํ๋์ ์์ ํ ์ด์ผ๊ธฐ๋ฅผ ๋ง๋ค์ด๋ ๋๋ค. | |
<br> | |
๊ฐ ๋จ๊ณ๋ ์ด์ ๋จ๊ณ์ ํ์ฐ์ ๊ฒฐ๊ณผ๋ก ์ด์ด์ง๋ฉฐ, ์ธ๋ฌผ์ ๋ณํ์ ์ฑ์ฅ์ ์ถ์ ํฉ๋๋ค. | |
</p> | |
<div class="progress-note"> | |
โก ๋ฐ๋ณต์ด ์๋ ์ถ์ , ์ํ์ด ์๋ ์งํ์ ํตํ ์ง์ ํ ์ฅํธ ์์ฌ | |
</div> | |
</div> | |
""") | |
# ์ํ ๊ด๋ฆฌ | |
current_session_id = gr.State(None) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
with gr.Group(elem_classes=["input-section"]): | |
query_input = gr.Textbox( | |
label="์์ค ์ฃผ์ / Novel Theme", | |
placeholder="์คํธ์์ค์ ์ฃผ์ ๋ฅผ ์ ๋ ฅํ์ธ์. ์ธ๋ฌผ์ ๋ณํ์ ์ฑ์ฅ์ด ์ค์ฌ์ด ๋๋ ์ด์ผ๊ธฐ...\nEnter the theme for your novella. Focus on character transformation and growth...", | |
lines=4 | |
) | |
language_select = gr.Radio( | |
choices=["Korean", "English"], | |
value="Korean", | |
label="์ธ์ด / Language" | |
) | |
with gr.Row(): | |
submit_btn = gr.Button("๐ ์์ค ์์ฑ ์์", variant="primary", scale=2) | |
clear_btn = gr.Button("๐๏ธ ์ด๊ธฐํ", scale=1) | |
status_text = gr.Textbox( | |
label="์ํ", | |
interactive=False, | |
value="๐ ์ค๋น ์๋ฃ" | |
) | |
# ์ธ์ ๊ด๋ฆฌ | |
with gr.Group(elem_classes=["session-section"]): | |
gr.Markdown("### ๐พ ์งํ ์ค์ธ ์ธ์ ") | |
session_dropdown = gr.Dropdown( | |
label="์ธ์ ์ ํ", | |
choices=[], | |
interactive=True | |
) | |
with gr.Row(): | |
refresh_btn = gr.Button("๐ ๋ชฉ๋ก ์๋ก๊ณ ์นจ", scale=1) | |
resume_btn = gr.Button("โถ๏ธ ์ ํ ์ฌ๊ฐ", variant="secondary", scale=1) | |
auto_recover_btn = gr.Button("โป๏ธ ์ต๊ทผ ์ธ์ ๋ณต๊ตฌ", scale=1) | |
with gr.Column(scale=2): | |
with gr.Tab("๐ ์ฐฝ์ ์งํ"): | |
stages_display = gr.Markdown( | |
value="์ฐฝ์ ๊ณผ์ ์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
elem_id="stages-display" | |
) | |
with gr.Tab("๐ ์์ฑ๋ ์์ค"): | |
novel_output = gr.Markdown( | |
value="์์ฑ๋ ์์ค์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
elem_id="novel-output" | |
) | |
with gr.Group(elem_classes=["download-section"]): | |
gr.Markdown("### ๐ฅ ์์ค ๋ค์ด๋ก๋") | |
with gr.Row(): | |
format_select = gr.Radio( | |
choices=["DOCX", "TXT"], | |
value="DOCX" if DOCX_AVAILABLE else "TXT", | |
label="ํ์" | |
) | |
download_btn = gr.Button("โฌ๏ธ ๋ค์ด๋ก๋", variant="secondary") | |
download_file = gr.File( | |
label="๋ค์ด๋ก๋๋ ํ์ผ", | |
visible=False | |
) | |
# ์จ๊ฒจ์ง ์ํ | |
novel_text_state = gr.State("") | |
# ์์ | |
with gr.Row(): | |
gr.Examples( | |
examples=[ | |
["์ค์งํ ์ค๋ ๋จ์ฑ์ด ์๋ก์ด ์ถ์ ์๋ฏธ๋ฅผ ์ฐพ์๊ฐ๋ ์ฌ์ "], | |
["๋์์์ ์๊ณจ๋ก ์ด์ฃผํ ์ฒญ๋ ์ ์ ์๊ณผ ์ฑ์ฅ ์ด์ผ๊ธฐ"], | |
["์ธ ์ธ๋๊ฐ ํจ๊ป ์ฌ๋ ๊ฐ์กฑ์ ๊ฐ๋ฑ๊ณผ ํํด"], | |
["A middle-aged woman's journey to rediscover herself after divorce"], | |
["The transformation of a cynical journalist through unexpected encounters"], | |
["์์ ์์ ์ ์ด์ํ๋ ๋ ธ๋ถ๋ถ์ ๋ง์ง๋ง 1๋ "], | |
["AI ์๋์ ์ผ์๋ฆฌ๋ฅผ ์์ ๋ฒ์ญ๊ฐ์ ์๋ก์ด ๋์ "] | |
], | |
inputs=query_input, | |
label="๐ก ์ฃผ์ ์์" | |
) | |
# ์ด๋ฒคํธ ํธ๋ค๋ฌ | |
def refresh_sessions(): | |
try: | |
sessions = get_active_sessions("Korean") | |
return gr.update(choices=sessions) | |
except Exception as e: | |
logger.error(f"Error refreshing sessions: {str(e)}") | |
return gr.update(choices=[]) | |
def handle_auto_recover(language): | |
session_id, message = auto_recover_session(language) | |
return session_id, message | |
# ์ด๋ฒคํธ ์ฐ๊ฒฐ | |
submit_btn.click( | |
fn=process_query, | |
inputs=[query_input, language_select, current_session_id], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
novel_output.change( | |
fn=lambda x: x, | |
inputs=[novel_output], | |
outputs=[novel_text_state] | |
) | |
resume_btn.click( | |
fn=lambda x: x.split("...")[0] if x and "..." in x else x, | |
inputs=[session_dropdown], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
auto_recover_btn.click( | |
fn=handle_auto_recover, | |
inputs=[language_select], | |
outputs=[current_session_id, status_text] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
refresh_btn.click( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
clear_btn.click( | |
fn=lambda: ("", "", "๐ ์ค๋น ์๋ฃ", "", None), | |
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] | |
) | |
def handle_download(format_type, language, session_id, novel_text): | |
if not session_id or not novel_text: | |
return gr.update(visible=False) | |
file_path = download_novel(novel_text, format_type, language, session_id) | |
if file_path: | |
return gr.update(value=file_path, visible=True) | |
else: | |
return gr.update(visible=False) | |
download_btn.click( | |
fn=handle_download, | |
inputs=[format_select, language_select, current_session_id, novel_text_state], | |
outputs=[download_file] | |
) | |
# ์์ ์ ์ธ์ ๋ก๋ | |
interface.load( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
return interface | |
# ๋ฉ์ธ ์คํ | |
if __name__ == "__main__": | |
logger.info("AI ์งํํ ์ฅํธ์์ค ์์ฑ ์์คํ ์์...") | |
logger.info("=" * 60) | |
# ํ๊ฒฝ ํ์ธ | |
logger.info(f"API ์๋ํฌ์ธํธ: {API_URL}") | |
logger.info(f"๋ชฉํ ๋ถ๋: {TARGET_WORDS:,}๋จ์ด") | |
logger.info(f"์๊ฐ๋น ์ต์ ๋ถ๋: {MIN_WORDS_PER_WRITER:,}๋จ์ด") | |
if BRAVE_SEARCH_API_KEY: | |
logger.info("์น ๊ฒ์์ด ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("์น ๊ฒ์์ด ๋นํ์ฑํ๋์์ต๋๋ค.") | |
if DOCX_AVAILABLE: | |
logger.info("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ๋นํ์ฑํ๋์์ต๋๋ค.") | |
logger.info("=" * 60) | |
# ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์ค...") | |
NovelDatabase.init_db() | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์๋ฃ.") | |
# ์ธํฐํ์ด์ค ์์ฑ ๋ฐ ์คํ | |
interface = create_interface() | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=True | |
) |