Spaces:
Running
Running
import gradio as gr | |
import os | |
import json | |
import requests | |
from datetime import datetime | |
import time | |
from typing import List, Dict, Any, Generator, Tuple, Optional, Set | |
import logging | |
import re | |
import tempfile | |
from pathlib import Path | |
import sqlite3 | |
import hashlib | |
import threading | |
from contextlib import contextmanager | |
from dataclasses import dataclass, field, asdict | |
from collections import defaultdict | |
# --- λ‘κΉ μ€μ --- | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
logger = logging.getLogger(__name__) | |
# --- Document export imports --- | |
try: | |
from docx import Document | |
from docx.shared import Inches, Pt, RGBColor | |
from docx.enum.text import WD_ALIGN_PARAGRAPH | |
from docx.enum.style import WD_STYLE_TYPE | |
from docx.oxml.ns import qn | |
from docx.oxml import OxmlElement | |
DOCX_AVAILABLE = True | |
except ImportError: | |
DOCX_AVAILABLE = False | |
logger.warning("python-docx not installed. DOCX export will be disabled.") | |
# --- νκ²½ λ³μ λ° μμ --- | |
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") | |
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") | |
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
MODEL_ID = "dep89a2fld32mcm" | |
DB_PATH = "novel_sessions_v5.db" | |
# λͺ©ν λΆλ μ€μ | |
TARGET_WORDS = 8000 # μμ λ§μ§μ μν΄ 8000λ¨μ΄ | |
MIN_WORDS_PER_WRITER = 800 # κ° μκ° μ΅μ λΆλ | |
# --- νκ²½ λ³μ κ²μ¦ --- | |
if not FRIENDLI_TOKEN: | |
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") | |
FRIENDLI_TOKEN = "dummy_token_for_testing" | |
if not BRAVE_SEARCH_API_KEY: | |
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") | |
# --- μ μ λ³μ --- | |
db_lock = threading.Lock() | |
# μμ¬ μ§ν λ¨κ³ μ μ | |
NARRATIVE_PHASES = [ | |
"λμ : μΌμκ³Ό κ· μ΄", | |
"λ°μ 1: λΆμμ κ³ μ‘°", | |
"λ°μ 2: μΈλΆ 좩격", | |
"λ°μ 3: λ΄μ κ°λ± μ¬ν", | |
"μ μ 1: μκΈ°μ μ μ ", | |
"μ μ 2: μ νμ μκ°", | |
"νκ° 1: κ²°κ³Όμ μ¬ν", | |
"νκ° 2: μλ‘μ΄ μΈμ", | |
"κ²°λ§ 1: λ³νλ μΌμ", | |
"κ²°λ§ 2: μ΄λ¦° μ§λ¬Έ" | |
] | |
# λ¨κ³λ³ κ΅¬μ± | |
PROGRESSIVE_STAGES = [ | |
("director", "π¬ κ°λ μ: ν΅ν©λ μμ¬ κ΅¬μ‘° κΈ°ν"), | |
("critic", "π λΉνκ°: μμ¬ μ§νμ±κ³Ό κΉμ΄ κ²ν "), | |
("director", "π¬ κ°λ μ: μμ λ λ§μ€ν°νλ"), | |
] + [ | |
(f"writer{i}", f"βοΈ μκ° {i}: μ΄μ - {NARRATIVE_PHASES[i-1]}") | |
for i in range(1, 11) | |
] + [ | |
("critic", "π λΉνκ°: μ€κ° κ²ν (μμ¬ λμ μ±κ³Ό λ³ν)"), | |
] + [ | |
(f"writer{i}", f"βοΈ μκ° {i}: μμ λ³Έ - {NARRATIVE_PHASES[i-1]}") | |
for i in range(1, 11) | |
] + [ | |
("critic", f"π λΉνκ°: μ΅μ’ κ²ν λ° λ¬Ένμ νκ°"), | |
] | |
# --- λ°μ΄ν° ν΄λμ€ --- | |
class CharacterArc: | |
"""μΈλ¬Όμ λ³ν κΆ€μ μΆμ """ | |
name: str | |
initial_state: Dict[str, Any] # μ΄κΈ° μν | |
phase_states: Dict[int, Dict[str, Any]] = field(default_factory=dict) # λ¨κ³λ³ μν | |
transformations: List[str] = field(default_factory=list) # μ£Όμ λ³νλ€ | |
relationships_evolution: Dict[str, List[str]] = field(default_factory=dict) # κ΄κ³ λ³ν | |
class PlotThread: | |
"""νλ‘― λΌμΈ μΆμ """ | |
thread_id: str | |
description: str | |
introduction_phase: int | |
development_phases: List[int] | |
resolution_phase: Optional[int] | |
status: str = "active" # active, resolved, suspended | |
class SymbolicEvolution: | |
"""μμ§μ μλ―Έ λ³ν μΆμ """ | |
symbol: str | |
initial_meaning: str | |
phase_meanings: Dict[int, str] = field(default_factory=dict) | |
transformation_complete: bool = False | |
# --- ν΅μ¬ λ‘μ§ ν΄λμ€ --- | |
class ProgressiveNarrativeTracker: | |
"""μμ¬ μ§νκ³Ό λμ μ μΆμ νλ μμ€ν """ | |
def __init__(self): | |
self.character_arcs: Dict[str, CharacterArc] = {} | |
self.plot_threads: Dict[str, PlotThread] = {} | |
self.symbolic_evolutions: Dict[str, SymbolicEvolution] = {} | |
self.phase_summaries: Dict[int, str] = {} | |
self.accumulated_events: List[Dict[str, Any]] = [] | |
self.thematic_deepening: List[str] = [] | |
def register_character_arc(self, name: str, initial_state: Dict[str, Any]): | |
"""μΊλ¦ν° μν¬ λ±λ‘""" | |
self.character_arcs[name] = CharacterArc(name=name, initial_state=initial_state) | |
logger.info(f"Character arc registered: {name}") | |
def update_character_state(self, name: str, phase: int, new_state: Dict[str, Any], transformation: str): | |
"""μΊλ¦ν° μν μ λ°μ΄νΈ λ° λ³ν κΈ°λ‘""" | |
if name in self.character_arcs: | |
arc = self.character_arcs[name] | |
arc.phase_states[phase] = new_state | |
arc.transformations.append(f"Phase {phase}: {transformation}") | |
logger.info(f"Character {name} transformed in phase {phase}: {transformation}") | |
def add_plot_thread(self, thread_id: str, description: str, intro_phase: int): | |
"""μλ‘μ΄ νλ‘― λΌμΈ μΆκ°""" | |
self.plot_threads[thread_id] = PlotThread( | |
thread_id=thread_id, | |
description=description, | |
introduction_phase=intro_phase, | |
development_phases=[] | |
) | |
def develop_plot_thread(self, thread_id: str, phase: int): | |
"""νλ‘― λΌμΈ λ°μ """ | |
if thread_id in self.plot_threads: | |
self.plot_threads[thread_id].development_phases.append(phase) | |
def check_narrative_progression(self, current_phase: int) -> Tuple[bool, List[str]]: | |
"""μμ¬κ° μ€μ λ‘ μ§νλκ³ μλμ§ νμΈ""" | |
issues = [] | |
# 1. μΊλ¦ν° λ³ν νμΈ | |
static_characters = [] | |
for name, arc in self.character_arcs.items(): | |
if len(arc.transformations) < current_phase // 3: # μ΅μ 3λ¨κ³λ§λ€ λ³ν νμ | |
static_characters.append(name) | |
if static_characters: | |
issues.append(f"λ€μ μΈλ¬Όλ€μ λ³νκ° λΆμ‘±ν©λλ€: {', '.join(static_characters)}") | |
# 2. νλ‘― μ§ν νμΈ | |
unresolved_threads = [] | |
for thread_id, thread in self.plot_threads.items(): | |
if thread.status == "active" and len(thread.development_phases) < 2: | |
unresolved_threads.append(thread.description) | |
if unresolved_threads: | |
issues.append(f"μ§μ λμ§ μμ νλ‘―: {', '.join(unresolved_threads)}") | |
# 3. μμ§ λ°μ νμΈ | |
static_symbols = [] | |
for symbol, evolution in self.symbolic_evolutions.items(): | |
if len(evolution.phase_meanings) < current_phase // 4: | |
static_symbols.append(symbol) | |
if static_symbols: | |
issues.append(f"μλ―Έκ° λ°μ νμ§ μμ μμ§: {', '.join(static_symbols)}") | |
return len(issues) == 0, issues | |
def generate_phase_requirements(self, phase: int) -> str: | |
"""κ° λ¨κ³λ³ νμ μꡬμ¬ν μμ±""" | |
requirements = [] | |
# μ΄μ λ¨κ³ μμ½ | |
if phase > 1 and (phase-1) in self.phase_summaries: | |
requirements.append(f"μ΄μ λ¨κ³ ν΅μ¬: {self.phase_summaries[phase-1]}") | |
# λ¨κ³λ³ νΉμ μꡬμ¬ν | |
phase_name = NARRATIVE_PHASES[phase-1] if phase <= 10 else "μμ " | |
if "λμ " in phase_name: | |
requirements.append("- μΌμμ κ· μ΄μ 보μ¬μ£Όλ, ν° μ¬κ±΄μ΄ μλ λ―Έλ¬ν λ³νλ‘ μμ") | |
requirements.append("- μ£Όμ μΈλ¬Όλ€μ μ΄κΈ° μνμ κ΄κ³ μ€μ ") | |
requirements.append("- ν΅μ¬ μμ§ λμ (μμ°μ€λ½κ²)") | |
elif "λ°μ " in phase_name: | |
requirements.append("- μ΄μ λ¨κ³μ κ· μ΄/κ°λ±μ΄ ꡬ체νλκ³ μ¬ν") | |
requirements.append("- μλ‘μ΄ μ¬κ±΄μ΄λ μΈμμ΄ μΆκ°λμ΄ λ³΅μ‘μ± μ¦κ°") | |
requirements.append("- μΈλ¬Ό κ° κ΄κ³μ λ―Έλ¬ν λ³ν") | |
elif "μ μ " in phase_name: | |
requirements.append("- μΆμ λ κ°λ±μ΄ μκ³μ μ λλ¬") | |
requirements.append("- μΈλ¬Όμ λ΄μ μ νμ΄λ μΈμμ μ νμ ") | |
requirements.append("- μμ§μ μλ―Έκ° μ 볡λκ±°λ μ¬ν") | |
elif "νκ°" in phase_name: | |
requirements.append("- μ μ μ μ¬νμ κ·Έλ‘ μΈν λ³ν") | |
requirements.append("- μλ‘μ΄ κ· νμ μ μ°Ύμκ°λ κ³Όμ ") | |
requirements.append("- μΈλ¬Όλ€μ λ³νλ κ΄κ³μ μΈμ") | |
elif "κ²°λ§" in phase_name: | |
requirements.append("- λ³νλ μΌμμ λͺ¨μ΅") | |
requirements.append("- ν΄κ²°λμ§ μμ μ§λ¬Έλ€") | |
requirements.append("- μ¬μ΄κ³Ό μ±μ°°μ μ¬μ§") | |
# λ°λ³΅ λ°©μ§ μꡬμ¬ν | |
requirements.append("\nβ οΈ μ λ κΈμ§μ¬ν:") | |
requirements.append("- μ΄μ λ¨κ³μ λμΌν μ¬κ±΄μ΄λ κ°λ± λ°λ³΅") | |
requirements.append("- μΈλ¬Όμ΄ κ°μ μκ°μ΄λ κ°μ μ 머무λ₯΄κΈ°") | |
requirements.append("- νλ‘―μ΄ μ μ리걸μνκΈ°") | |
return "\n".join(requirements) | |
class NovelDatabase: | |
"""λ°μ΄ν°λ² μ΄μ€ κ΄λ¦¬""" | |
def init_db(): | |
with sqlite3.connect(DB_PATH) as conn: | |
conn.execute("PRAGMA journal_mode=WAL") | |
cursor = conn.cursor() | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS sessions ( | |
session_id TEXT PRIMARY KEY, | |
user_query TEXT NOT NULL, | |
language TEXT NOT NULL, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
status TEXT DEFAULT 'active', | |
current_stage INTEGER DEFAULT 0, | |
final_novel TEXT, | |
literary_report TEXT, | |
total_words INTEGER DEFAULT 0, | |
narrative_tracker TEXT | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS stages ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
stage_number INTEGER NOT NULL, | |
stage_name TEXT NOT NULL, | |
role TEXT NOT NULL, | |
content TEXT, | |
word_count INTEGER DEFAULT 0, | |
status TEXT DEFAULT 'pending', | |
progression_score REAL DEFAULT 0.0, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
UNIQUE(session_id, stage_number) | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS plot_threads ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
thread_id TEXT NOT NULL, | |
description TEXT, | |
introduction_phase INTEGER, | |
status TEXT DEFAULT 'active', | |
created_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
) | |
''') | |
conn.commit() | |
# κΈ°μ‘΄ λ©μλλ€ μ μ§ | |
def get_db(): | |
with db_lock: | |
conn = sqlite3.connect(DB_PATH, timeout=30.0) | |
conn.row_factory = sqlite3.Row | |
try: | |
yield conn | |
finally: | |
conn.close() | |
def create_session(user_query: str, language: str) -> str: | |
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', | |
(session_id, user_query, language) | |
) | |
conn.commit() | |
return session_id | |
def save_stage(session_id: str, stage_number: int, stage_name: str, | |
role: str, content: str, status: str = 'complete', | |
progression_score: float = 0.0): | |
word_count = len(content.split()) if content else 0 | |
with NovelDatabase.get_db() as conn: | |
cursor = conn.cursor() | |
cursor.execute(''' | |
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, progression_score) | |
VALUES (?, ?, ?, ?, ?, ?, ?, ?) | |
ON CONFLICT(session_id, stage_number) | |
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, progression_score=?, updated_at=datetime('now') | |
''', (session_id, stage_number, stage_name, role, content, word_count, status, progression_score, | |
content, word_count, status, stage_name, progression_score)) | |
# μ΄ λ¨μ΄ μ μ λ°μ΄νΈ | |
cursor.execute(''' | |
UPDATE sessions | |
SET total_words = ( | |
SELECT SUM(word_count) | |
FROM stages | |
WHERE session_id = ? AND role LIKE 'writer%' AND content IS NOT NULL | |
), | |
updated_at = datetime('now'), | |
current_stage = ? | |
WHERE session_id = ? | |
''', (session_id, stage_number, session_id)) | |
conn.commit() | |
def get_writer_content(session_id: str) -> str: | |
"""μκ° μ½ν μΈ κ°μ Έμ€κΈ° (μμ λ³Έ μ°μ )""" | |
with NovelDatabase.get_db() as conn: | |
all_content = [] | |
for writer_num in range(1, 11): | |
# μμ λ³Έμ΄ μμΌλ©΄ μμ λ³Έμ, μμΌλ©΄ μ΄μμ | |
row = conn.cursor().execute(''' | |
SELECT content FROM stages | |
WHERE session_id = ? AND role = ? | |
AND stage_name LIKE '%μμ λ³Έ%' | |
ORDER BY stage_number DESC LIMIT 1 | |
''', (session_id, f'writer{writer_num}')).fetchone() | |
if not row or not row['content']: | |
# μμ λ³Έμ΄ μμΌλ©΄ μ΄μ μ¬μ© | |
row = conn.cursor().execute(''' | |
SELECT content FROM stages | |
WHERE session_id = ? AND role = ? | |
AND stage_name LIKE '%μ΄μ%' | |
ORDER BY stage_number DESC LIMIT 1 | |
''', (session_id, f'writer{writer_num}')).fetchone() | |
if row and row['content']: | |
all_content.append(row['content'].strip()) | |
return '\n\n'.join(all_content) | |
def get_total_words(session_id: str) -> int: | |
"""μ΄ λ¨μ΄ μ κ°μ Έμ€κΈ°""" | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute( | |
'SELECT total_words FROM sessions WHERE session_id = ?', | |
(session_id,) | |
).fetchone() | |
return row['total_words'] if row and row['total_words'] else 0 | |
def save_narrative_tracker(session_id: str, tracker: ProgressiveNarrativeTracker): | |
"""μμ¬ μΆμ κΈ° μ μ₯""" | |
with NovelDatabase.get_db() as conn: | |
tracker_data = json.dumps({ | |
'character_arcs': {k: asdict(v) for k, v in tracker.character_arcs.items()}, | |
'plot_threads': {k: asdict(v) for k, v in tracker.plot_threads.items()}, | |
'phase_summaries': tracker.phase_summaries, | |
'thematic_deepening': tracker.thematic_deepening | |
}) | |
conn.cursor().execute( | |
'UPDATE sessions SET narrative_tracker = ? WHERE session_id = ?', | |
(tracker_data, session_id) | |
) | |
conn.commit() | |
def load_narrative_tracker(session_id: str) -> Optional[ProgressiveNarrativeTracker]: | |
"""μμ¬ μΆμ κΈ° λ‘λ""" | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute( | |
'SELECT narrative_tracker FROM sessions WHERE session_id = ?', | |
(session_id,) | |
).fetchone() | |
if row and row['narrative_tracker']: | |
data = json.loads(row['narrative_tracker']) | |
tracker = ProgressiveNarrativeTracker() | |
# λ°μ΄ν° 볡μ | |
for name, arc_data in data.get('character_arcs', {}).items(): | |
tracker.character_arcs[name] = CharacterArc(**arc_data) | |
for thread_id, thread_data in data.get('plot_threads', {}).items(): | |
tracker.plot_threads[thread_id] = PlotThread(**thread_data) | |
tracker.phase_summaries = data.get('phase_summaries', {}) | |
tracker.thematic_deepening = data.get('thematic_deepening', []) | |
return tracker | |
return None | |
def get_session(session_id: str) -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone() | |
return dict(row) if row else None | |
def get_stages(session_id: str) -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall() | |
return [dict(row) for row in rows] | |
def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""): | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), literary_report = ? WHERE session_id = ?", | |
(final_novel, literary_report, session_id) | |
) | |
conn.commit() | |
def get_active_sessions() -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute( | |
"SELECT session_id, user_query, language, created_at, current_stage, total_words FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10" | |
).fetchall() | |
return [dict(row) for row in rows] | |
class WebSearchIntegration: | |
"""μΉ κ²μ κΈ°λ₯""" | |
def __init__(self): | |
self.brave_api_key = BRAVE_SEARCH_API_KEY | |
self.search_url = "https://api.search.brave.com/res/v1/web/search" | |
self.enabled = bool(self.brave_api_key) | |
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: | |
if not self.enabled: | |
return [] | |
headers = { | |
"Accept": "application/json", | |
"X-Subscription-Token": self.brave_api_key | |
} | |
params = { | |
"q": query, | |
"count": count, | |
"search_lang": "ko" if language == "Korean" else "en", | |
"text_decorations": False, | |
"safesearch": "moderate" | |
} | |
try: | |
response = requests.get(self.search_url, headers=headers, params=params, timeout=10) | |
response.raise_for_status() | |
results = response.json().get("web", {}).get("results", []) | |
return results | |
except requests.exceptions.RequestException as e: | |
logger.error(f"μΉ κ²μ API μ€λ₯: {e}") | |
return [] | |
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: | |
if not results: | |
return "" | |
extracted = [] | |
total_chars = 0 | |
for i, result in enumerate(results[:3], 1): | |
title = result.get("title", "") | |
description = result.get("description", "") | |
info = f"[{i}] {title}: {description}" | |
if total_chars + len(info) < max_chars: | |
extracted.append(info) | |
total_chars += len(info) | |
else: | |
break | |
return "\n".join(extracted) | |
class ProgressiveLiterarySystem: | |
"""μ§νν λ¬Έν μμ€ μμ± μμ€ν """ | |
def __init__(self): | |
self.token = FRIENDLI_TOKEN | |
self.api_url = API_URL | |
self.model_id = MODEL_ID | |
self.narrative_tracker = ProgressiveNarrativeTracker() | |
self.web_search = WebSearchIntegration() | |
self.current_session_id = None | |
NovelDatabase.init_db() | |
def create_headers(self): | |
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} | |
# --- ν둬ννΈ μμ± ν¨μλ€ --- | |
def create_director_initial_prompt(self, user_query: str, language: str) -> str: | |
"""κ°λ μ μ΄κΈ° κΈ°ν - ν΅ν©λ μμ¬ κ΅¬μ‘°""" | |
search_results_str = "" | |
if self.web_search.enabled: | |
queries = [f"{user_query} μ¬ν λ¬Έμ ", f"{user_query} νλ νκ΅"] | |
for q in queries[:1]: | |
results = self.web_search.search(q, count=2, language=language) | |
if results: | |
search_results_str += self.web_search.extract_relevant_info(results) + "\n" | |
lang_prompts = { | |
"Korean": f"""λΉμ μ νλ νκ΅ λ¬Ένμ κ±°μ₯μ λλ€. | |
λ¨νΈμ΄ μλ μ€νΈ μμ€(8,000λ¨μ΄ μ΄μ)μ μν ν΅ν©λ μμ¬ κ΅¬μ‘°λ₯Ό κΈ°ννμΈμ. | |
**μ£Όμ :** {user_query} | |
**μ°Έκ³ μλ£:** | |
{search_results_str if search_results_str else "N/A"} | |
**νμ μꡬμ¬ν:** | |
1. **ν΅ν©λ μμ¬ κ΅¬μ‘° (κ°μ₯ μ€μ)** | |
- 10κ° λ¨κ³κ° μ κΈ°μ μΌλ‘ μ°κ²°λ λ¨μΌ μμ¬ | |
- κ° λ¨κ³λ μ΄μ λ¨κ³μ κ²°κ³Όλ‘ μμ°μ€λ½κ² μ΄μ΄μ§ | |
- λ°λ³΅μ΄ μλ μΆμ κ³Ό λ°μ | |
λ¨κ³λ³ μμ¬ μ§ν: | |
1) λμ : μΌμκ³Ό κ· μ΄ - νλ²ν μΌμ μ 첫 κ· μ΄ | |
2) λ°μ 1: λΆμμ κ³ μ‘° - κ· μ΄μ΄ νλλλ©° λΆμ μ¦ν | |
3) λ°μ 2: μΈλΆ 좩격 - μμμΉ λͺ»ν μΈλΆ μ¬κ±΄ | |
4) λ°μ 3: λ΄μ κ°λ± μ¬ν - κ°μΉκ΄μ μΆ©λ | |
5) μ μ 1: μκΈ°μ μ μ - λͺ¨λ κ°λ±μ΄ κ·Ήλν | |
6) μ μ 2: μ νμ μκ° - κ²°μ μ μ ν | |
7) νκ° 1: κ²°κ³Όμ μ¬ν - μ νμ μ§μ μ κ²°κ³Ό | |
8) νκ° 2: μλ‘μ΄ μΈμ - λ³νλ μΈκ³κ΄ | |
9) κ²°λ§ 1: λ³νλ μΌμ - μλ‘μ΄ κ· ν | |
10) κ²°λ§ 2: μ΄λ¦° μ§λ¬Έ - λ μμκ² λμ§λ μ§λ¬Έ | |
2. **μΈλ¬Όμ λ³ν κΆ€μ ** | |
- μ£ΌμΈκ³΅: μ΄κΈ° μν β μ€κ° λ³ν β μ΅μ’ μν (λͺ νν arc) | |
- μ£Όμ μΈλ¬Όλ€λ κ°μμ λ³ν κ²½ν | |
- κ΄κ³μ μλμ λ³ν | |
3. **μ£Όμ νλ‘― λΌμΈ** (2-3κ°) | |
- λ©μΈ νλ‘―: μ 체λ₯Ό κ΄ν΅νλ ν΅μ¬ κ°λ± | |
- μλΈ νλ‘―: λ©μΈκ³Ό μ°κ²°λλ©° μ£Όμ λ₯Ό μ¬ν | |
4. **μμ§μ μ§ν** | |
- ν΅μ¬ μμ§ 1-2κ° μ€μ | |
- λ¨κ³λ³λ‘ μλ―Έκ° λ³ν/μ¬ν/μ 볡 | |
5. **μ¬νμ λ§₯λ½** | |
- κ°μΈμ λ¬Έμ κ° μ¬ν ꡬ쑰μ μ°κ²° | |
- ꡬ체μ μΈ νκ΅ μ¬νμ νμ€ λ°μ | |
**μ λ κΈμ§μ¬ν:** | |
- λμΌν μ¬κ±΄μ΄λ μν©μ λ°λ³΅ | |
- μΈλ¬Όμ΄ κ°μ κ°μ /μκ°μ 머무λ₯΄κΈ° | |
- νλ‘―μ 리μ μ΄λ μν ꡬ쑰 | |
- κ° λ¨κ³κ° λ 립λ μνΌμλλ‘ μ‘΄μ¬ | |
**λΆλ κ³ν:** | |
- μ΄ 8,000λ¨μ΄ μ΄μ | |
- κ° λ¨κ³ νκ· 800λ¨μ΄ | |
- κ· ν μ‘ν μμ¬ μ κ° | |
νλμ κ°λ ₯ν μμ¬κ° μμλΆν° λκΉμ§ κ΄ν΅νλ μνμ κΈ°ννμΈμ.""", | |
"English": f"""You are a master of contemporary literary fiction. | |
Plan an integrated narrative structure for a novella (8,000+ words), not a collection of short stories. | |
**Theme:** {user_query} | |
**Reference:** | |
{search_results_str if search_results_str else "N/A"} | |
**Essential Requirements:** | |
1. **Integrated Narrative Structure (Most Important)** | |
- Single narrative with 10 organically connected phases | |
- Each phase naturally follows from previous results | |
- Accumulation and development, not repetition | |
Phase Progression: | |
1) Introduction: Daily life and first crack | |
2) Development 1: Rising anxiety | |
3) Development 2: External shock | |
4) Development 3: Deepening internal conflict | |
5) Climax 1: Peak crisis | |
6) Climax 2: Moment of choice | |
7) Falling Action 1: Direct consequences | |
8) Falling Action 2: New awareness | |
9) Resolution 1: Changed daily life | |
10) Resolution 2: Open questions | |
2. **Character Transformation Arcs** | |
- Protagonist: Clear progression from initial β middle β final state | |
- Supporting characters also experience change | |
- Dynamic relationship evolution | |
3. **Plot Threads** (2-3) | |
- Main plot: Core conflict throughout | |
- Subplots: Connected and deepening themes | |
4. **Symbolic Evolution** | |
- 1-2 core symbols | |
- Meaning transforms across phases | |
5. **Social Context** | |
- Individual problems connected to social structures | |
- Specific contemporary realities | |
**Absolutely Forbidden:** | |
- Repetition of same events/situations | |
- Characters stuck in same emotions | |
- Plot resets or circular structure | |
- Independent episodes | |
**Length Planning:** | |
- Total 8,000+ words | |
- ~800 words per phase | |
- Balanced progression | |
Create a work with one powerful narrative from beginning to end.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: | |
"""λΉνκ°μ κ°λ μ κΈ°ν κ²ν - μμ¬ ν΅ν©μ± μ€μ¬""" | |
lang_prompts = { | |
"Korean": f"""λΉμ μ μμ¬ κ΅¬μ‘° μ λ¬Έ λΉνκ°μ λλ€. | |
μ΄ κΈ°νμ΄ μ§μ ν 'μ₯νΈ μμ€'μΈμ§ μ격ν κ²ν νμΈμ. | |
**μ μ£Όμ :** {user_query} | |
**κ°λ μ κΈ°ν:** | |
{director_plan} | |
**ν΅μ¬ κ²ν μ¬ν:** | |
1. **μμ¬μ ν΅ν©μ±κ³Ό μ§νμ±** | |
- 10κ° λ¨κ³κ° νλμ μ΄μΌκΈ°λ‘ μ°κ²°λλκ°? | |
- κ° λ¨κ³κ° μ΄μ λ¨κ³μ νμ°μ κ²°κ³ΌμΈκ°? | |
- λμΌν μν©μ λ°λ³΅μ μλκ°? | |
2. **μΈλ¬Ό λ³νμ κΆ€μ ** | |
- μ£ΌμΈκ³΅μ΄ λͺ νν λ³νμ arcλ₯Ό κ°μ§λκ°? | |
- λ³νκ° κ΅¬μ²΄μ μ΄κ³ μ λΉμ± μλκ°? | |
- κ΄κ³μ λ°μ μ΄ κ³νλμ΄ μλκ°? | |
3. **νλ‘―μ μΆμ μ±** | |
- κ°λ±μ΄ μ μ§μ μΌλ‘ μ¬νλλκ°? | |
- μλ‘μ΄ μμκ° μΆκ°λλ©° 볡μ‘μ±μ΄ μ¦κ°νλκ°? | |
- ν΄κ²°μ΄ μμ°μ€λ½κ³ νμ°μ μΈκ°? | |
4. **λΆλκ³Ό λ°λ** | |
- 8,000λ¨μ΄λ₯Ό μ±μΈ μΆ©λΆν λ΄μ©μΈκ°? | |
- κ° λ¨κ³κ° 800λ¨μ΄μ λ°λλ₯Ό κ°μ§ μ μλκ°? | |
**νμ :** | |
- ν΅κ³Ό: μ§μ ν μ₯νΈ μμ¬ κ΅¬μ‘° | |
- μ¬μμ±: λ°λ³΅μ /μνμ ꡬ쑰 | |
ꡬ체μ κ°μ λ°©ν₯μ μ μνμΈμ.""", | |
"English": f"""You are a narrative structure critic. | |
Strictly review whether this plan is a true 'novel' rather than repeated episodes. | |
**Original Theme:** {user_query} | |
**Director's Plan:** | |
{director_plan} | |
**Key Review Points:** | |
1. **Narrative Integration and Progression** | |
- Do 10 phases connect as one story? | |
- Does each phase necessarily follow from previous? | |
- No repetition of same situations? | |
2. **Character Transformation Arcs** | |
- Clear protagonist transformation arc? | |
- Concrete and credible changes? | |
- Planned relationship development? | |
3. **Plot Accumulation** | |
- Progressive conflict deepening? | |
- Added complexity through new elements? | |
- Natural and inevitable resolution? | |
4. **Length and Density** | |
- Sufficient content for 8,000 words? | |
- Can each phase sustain 800 words? | |
**Verdict:** | |
- Pass: True novel structure | |
- Rewrite: Repetitive/circular structure | |
Provide specific improvements.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_writer_prompt(self, writer_number: int, director_plan: str, | |
previous_content: str, phase_requirements: str, | |
narrative_summary: str, language: str) -> str: | |
"""μκ° ν둬ννΈ - μμ¬ μ§ν κ°μ """ | |
phase_name = NARRATIVE_PHASES[writer_number-1] | |
target_words = MIN_WORDS_PER_WRITER | |
lang_prompts = { | |
"Korean": f"""λΉμ μ μκ° {writer_number}λ²μ λλ€. | |
**νμ¬ λ¨κ³: {phase_name}** | |
**μ 체 μμ¬ κ΅¬μ‘°:** | |
{director_plan} | |
**μ§κΈκΉμ§μ μ΄μΌκΈ° μμ½:** | |
{narrative_summary} | |
**μ΄μ λ΄μ© (μ§μ λΆλΆ):** | |
{previous_content[-1500:] if previous_content else "μμ"} | |
**μ΄λ² λ¨κ³ νμ μꡬμ¬ν:** | |
{phase_requirements} | |
**μμ± μ§μΉ¨:** | |
1. **λΆλ**: {target_words}-900 λ¨μ΄ (νμ) | |
- λ΄λ©΄ λ¬μ¬μ ꡬ체μ λν μΌλ‘ λΆλ ν보 | |
- μ₯λ©΄μ μΆ©λΆν μ κ°νκ³ κΉμ΄ μκ² λ¬μ¬ | |
2. **μμ¬ μ§ν (κ°μ₯ μ€μ)** | |
- μ΄μ λ¨κ³μμ μΌμ΄λ μΌμ μ§μ μ κ²°κ³Όλ‘ μμ | |
- μλ‘μ΄ μ¬κ±΄/μΈμ/λ³νλ₯Ό μΆκ°νμ¬ μ΄μΌκΈ° μ μ§ | |
- λ€μ λ¨κ³λ‘ μμ°μ€λ½κ² μ°κ²°λ κ³ λ¦¬ λ§λ ¨ | |
3. **μΈλ¬Όμ λ³ν** | |
- μ΄ λ¨κ³μμ μΈλ¬Όμ΄ κ²ͺλ ꡬ체μ λ³ν λ¬μ¬ | |
- λ΄λ©΄μ λ―Έλ¬ν λ³νλ ν¬μ°© | |
- κ΄κ³μ μν λ³ν λ°μ | |
4. **문체μ κΈ°λ²** | |
- νκ΅ νλ λ¬Ένμ μ¬μΈν μ¬λ¦¬ λ¬μ¬ | |
- μΌμ μ μ¬νμ λ§₯λ½ λ Ήμ¬λ΄κΈ° | |
- κ°κ°μ λν μΌκ³Ό λ΄λ©΄ μμμ κ· ν | |
5. **μ°μμ± μ μ§** | |
- μΈλ¬Όμ λͺ©μ리μ λ§ν¬ μΌκ΄μ± | |
- 곡κ°κ³Ό μκ°μ μ°μμ± | |
- μμ§κ³Ό λͺ¨ν°νμ λ°μ | |
**μ λ κΈμ§:** | |
- μ΄μ κ³Ό λμΌν μν© λ°λ³΅ | |
- μμ¬μ μ 체λ νν΄ | |
- λΆλ λ―Έλ¬ (μ΅μ {target_words}λ¨μ΄) | |
μ΄μ μ νλ¦μ μ΄μ΄λ°μ μλ‘μ΄ κ΅λ©΄μΌλ‘ λ°μ μν€μΈμ.""", | |
"English": f"""You are Writer #{writer_number}. | |
**Current Phase: {phase_name}** | |
**Overall Narrative Structure:** | |
{director_plan} | |
**Story So Far:** | |
{narrative_summary} | |
**Previous Content (immediately before):** | |
{previous_content[-1500:] if previous_content else "Beginning"} | |
**Phase Requirements:** | |
{phase_requirements} | |
**Writing Guidelines:** | |
1. **Length**: {target_words}-900 words (mandatory) | |
- Use interior description and concrete details | |
- Fully develop scenes with depth | |
2. **Narrative Progression (Most Important)** | |
- Start as direct result of previous phase | |
- Add new events/awareness/changes to advance story | |
- Create natural connection to next phase | |
3. **Character Change** | |
- Concrete changes in this phase | |
- Capture subtle interior shifts | |
- Reflect relationship dynamics | |
4. **Style and Technique** | |
- Delicate psychological portrayal | |
- Social context in daily life | |
- Balance sensory details with consciousness | |
5. **Continuity** | |
- Consistent character voices | |
- Spatial/temporal continuity | |
- Symbol/motif development | |
**Absolutely Forbidden:** | |
- Repeating previous situations | |
- Narrative stagnation/regression | |
- Under word count (minimum {target_words}) | |
Continue the flow and develop into new phase.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_critic_consistency_prompt(self, all_content: str, | |
narrative_tracker: ProgressiveNarrativeTracker, | |
user_query: str, language: str) -> str: | |
"""λΉνκ° μ€κ° κ²ν - μμ¬ λμ μ± νμΈ""" | |
# μμ¬ μ§ν μ²΄ν¬ | |
phase_count = len(narrative_tracker.phase_summaries) | |
progression_ok, issues = narrative_tracker.check_narrative_progression(phase_count) | |
lang_prompts = { | |
"Korean": f"""μμ¬ μ§ν μ λ¬Έ λΉνκ°λ‘μ μνμ κ²ν νμΈμ. | |
**μ μ£Όμ :** {user_query} | |
**νμ¬κΉμ§ μ§νλ μμ¬ λ¨κ³:** {phase_count}/10 | |
**λ°κ²¬λ μ§ν λ¬Έμ :** | |
{chr(10).join(issues) if issues else "μμ"} | |
**μν λ΄μ© (μ΅κ·Ό λΆλΆ):** | |
{all_content[-4000:]} | |
**μ§μ€ κ²ν μ¬ν:** | |
1. **μμ¬μ μΆμ κ³Ό μ§ν** | |
- μ΄μΌκΈ°κ° μ€μ λ‘ μ μ§νκ³ μλκ°? | |
- κ° λ¨κ³κ° μ΄μ μ κ²°κ³Όλ‘ μ°κ²°λλκ°? | |
- λμΌν κ°λ±μ΄λ μν©μ΄ λ°λ³΅λμ§ μλκ°? | |
2. **μΈλ¬Όμ λ³ν κΆ€μ ** | |
- μ£ΌμΈκ³΅μ΄ μ΄κΈ°μ λΉκ΅ν΄ μ΄λ»κ² λ³νλκ°? | |
- λ³νκ° μ€λλ ₯ μκ³ μ μ§μ μΈκ°? | |
- κ΄κ³κ° μλμ μΌλ‘ λ°μ νλκ°? | |
3. **μ£Όμ μ μ¬ν** | |
- μ΄κΈ° μ£Όμ κ° μ΄λ»κ² λ°μ νλκ°? | |
- μλ‘μ΄ μΈ΅μκ° μΆκ°λμλκ°? | |
- 볡μ‘μ±μ΄ μ¦κ°νλκ°? | |
4. **λΆλκ³Ό λ°λ** | |
- νμ¬κΉμ§ μ΄ λ¨μ΄ μ νμΈ | |
- λͺ©ν(8,000λ¨μ΄)μ λλ¬ κ°λ₯νκ°? | |
**μμ μ§μ:** | |
κ° μκ°μκ² κ΅¬μ²΄μ μΈ μ§ν λ°©ν₯ μ μ.""", | |
"English": f"""As a narrative progression critic, review the work. | |
**Original Theme:** {user_query} | |
**Narrative Phases Completed:** {phase_count}/10 | |
**Detected Progression Issues:** | |
{chr(10).join(issues) if issues else "None"} | |
**Work Content (recent):** | |
{all_content[-4000:]} | |
**Focus Review Areas:** | |
1. **Narrative Accumulation and Progress** | |
- Is story actually moving forward? | |
- Does each phase connect as result of previous? | |
- No repetition of same conflicts/situations? | |
2. **Character Transformation Arcs** | |
- How has protagonist changed from beginning? | |
- Are changes credible and gradual? | |
- Dynamic relationship development? | |
3. **Thematic Deepening** | |
- How has initial theme developed? | |
- New layers added? | |
- Increased complexity? | |
4. **Length and Density** | |
- Current total word count | |
- Can reach 8,000 word target? | |
**Revision Instructions:** | |
Specific progression directions for each writer.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_writer_revision_prompt(self, writer_number: int, initial_content: str, | |
critic_feedback: str, language: str) -> str: | |
"""μκ° μμ ν둬ννΈ""" | |
target_words = MIN_WORDS_PER_WRITER | |
return f"""μκ° {writer_number}λ², λΉνμ λ°μνμ¬ μμ νμΈμ. | |
**μ΄μ:** | |
{initial_content} | |
**λΉν νΌλλ°±:** | |
{critic_feedback} | |
**μμ ν΅μ¬:** | |
1. μμ¬ μ§νμ± κ°ν - λ°λ³΅ μ κ±°, μλ‘μ΄ μ κ° μΆκ° | |
2. μΈλ¬Ό λ³ν ꡬ체ν - μ΄μ κ³Ό λ¬λΌμ§ λͺ¨μ΅ λͺ νν | |
3. λΆλ ν보 - μ΅μ {target_words}λ¨μ΄ μ μ§ | |
4. λ΄λ©΄ λ¬μ¬μ μ¬νμ λ§₯λ½ μ¬ν | |
μ λ©΄ μ¬μμ±μ΄ νμνλ©΄ κ³Όκ°ν μμ νμΈμ. | |
μμ λ³Έλ§ μ μνμΈμ.""" | |
def create_critic_final_prompt(self, complete_novel: str, word_count: int, language: str) -> str: | |
"""μ΅μ’ λΉν - AGI νκ° κΈ°μ€""" | |
return f"""μμ±λ μμ€μ AGI νλ§ν μ€νΈ κΈ°μ€μΌλ‘ νκ°νμΈμ. | |
**μν μ 보:** | |
- μ΄ λΆλ: {word_count}λ¨μ΄ | |
- λͺ©ν λΆλ: 8,000λ¨μ΄ μ΄μ | |
**μν (λ§μ§λ§ λΆλΆ):** | |
{complete_novel[-3000:]} | |
**νκ° κΈ°μ€ (AGI νλ§ν μ€νΈ):** | |
1. **μ₯νΈμμ€λ‘μμ μμ±λ (40μ )** | |
- ν΅ν©λ μμ¬ κ΅¬μ‘° (λ°λ³΅ μμ) | |
- μΈλ¬Όμ λͺ νν λ³ν arc | |
- νλ‘―μ μΆμ κ³Ό ν΄κ²° | |
- 8,000λ¨μ΄ μ΄μ λΆλ | |
2. **λ¬Ένμ μ±μ·¨ (30μ )** | |
- μ£Όμ μμμ κΉμ΄ | |
- μΈλ¬Ό μ¬λ¦¬μ μ€λλ ₯ | |
- 문체μ μΌκ΄μ±κ³Ό μλ¦λ€μ | |
- μμ§κ³Ό μμ μ ν¨κ³Ό | |
3. **μ¬νμ ν΅μ°° (20μ )** | |
- νλ μ¬ν λ¬Έμ ν¬μ°© | |
- κ°μΈκ³Ό ꡬ쑰μ μ°κ²° | |
- 보νΈμ±κ³Ό νΉμμ± κ· ν | |
4. **λ μ°½μ±κ³Ό μΈκ°μ± (10μ )** | |
- AIκ° μλ μΈκ° μκ°μ λλ | |
- λ μ°½μ ννκ³Ό ν΅μ°° | |
- κ°μ μ μ§μ μ± | |
**μ΄μ : /100μ ** | |
νΉν 'λ°λ³΅ ꡬ쑰' λ¬Έμ κ° μμλμ§ μ격ν νκ°νμΈμ.""" | |
# --- LLM νΈμΆ ν¨μλ€ --- | |
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: | |
full_content = "" | |
for chunk in self.call_llm_streaming(messages, role, language): | |
full_content += chunk | |
if full_content.startswith("β"): | |
raise Exception(f"LLM Call Failed: {full_content}") | |
return full_content | |
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]: | |
try: | |
system_prompts = self.get_system_prompts(language) | |
full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages] | |
# μκ° μν μΌ λλ λ λ§μ ν ν° νμ© | |
max_tokens = 15000 if role.startswith("writer") else 10000 | |
payload = { | |
"model": self.model_id, | |
"messages": full_messages, | |
"max_tokens": max_tokens, | |
"temperature": 0.8, | |
"top_p": 0.95, | |
"presence_penalty": 0.5, | |
"frequency_penalty": 0.3, | |
"stream": True | |
} | |
response = requests.post( | |
self.api_url, | |
headers=self.create_headers(), | |
json=payload, | |
stream=True, | |
timeout=180 | |
) | |
if response.status_code != 200: | |
yield f"β API μ€λ₯ (μν μ½λ: {response.status_code})" | |
return | |
buffer = "" | |
for line in response.iter_lines(): | |
if not line: | |
continue | |
try: | |
line_str = line.decode('utf-8').strip() | |
if not line_str.startswith("data: "): | |
continue | |
data_str = line_str[6:] | |
if data_str == "[DONE]": | |
break | |
data = json.loads(data_str) | |
choices = data.get("choices", []) | |
if choices and choices[0].get("delta", {}).get("content"): | |
content = choices[0]["delta"]["content"] | |
buffer += content | |
if len(buffer) >= 50 or '\n' in buffer: | |
yield buffer | |
buffer = "" | |
time.sleep(0.01) | |
except Exception as e: | |
logger.error(f"μ²ν¬ μ²λ¦¬ μ€λ₯: {str(e)}") | |
continue | |
if buffer: | |
yield buffer | |
except Exception as e: | |
logger.error(f"μ€νΈλ¦¬λ° μ€λ₯: {type(e).__name__}: {str(e)}") | |
yield f"β μ€λ₯ λ°μ: {str(e)}" | |
def get_system_prompts(self, language: str) -> Dict[str, str]: | |
"""μν λ³ μμ€ν ν둬ννΈ""" | |
base_prompts = { | |
"Korean": { | |
"director": """λΉμ μ νκ΅ νλ λ¬Ένμ κ±°μ₯μ λλ€. | |
λ°λ³΅μ΄ μλ μ§ν, μνμ΄ μλ λ°μ μ ν΅ν΄ νλμ κ°λ ₯ν μμ¬λ₯Ό ꡬμΆνμΈμ. | |
κ°μΈμ λ¬Έμ λ₯Ό μ¬ν ꡬ쑰μ μ°κ²°νλ©°, μΈλ¬Όμ μ§μ ν λ³νλ₯Ό κ·Έλ €λ΄μΈμ.""", | |
"critic": """λΉμ μ μ격ν λ¬Έν λΉνκ°μ λλ€. | |
νΉν 'λ°λ³΅ ꡬ쑰'μ 'μμ¬ μ 체'λ₯Ό μ² μ ν κ°μνμΈμ. | |
μνμ΄ μ§μ ν μ₯νΈμμ€μΈμ§, μλλ©΄ λ°λ³΅λλ λ¨νΈμ μ§ν©μΈμ§ ꡬλ³νμΈμ.""", | |
"writer_base": """λΉμ μ νλ νκ΅ λ¬Έν μκ°μ λλ€. | |
μ΄μ λ¨κ³μ κ²°κ³Όλ₯Ό λ°μ μλ‘μ΄ κ΅λ©΄μΌλ‘ λ°μ μν€μΈμ. | |
μ΅μ 800λ¨μ΄λ₯Ό μμ±νλ©°, λ΄λ©΄κ³Ό μ¬νλ₯Ό λμμ ν¬μ°©νμΈμ. | |
μ λ μ΄μ κ³Ό κ°μ μν©μ λ°λ³΅νμ§ λ§μΈμ.""" | |
}, | |
"English": { | |
"director": """You are a master of contemporary literary fiction. | |
Build one powerful narrative through progression not repetition, development not cycles. | |
Connect individual problems to social structures while depicting genuine character transformation.""", | |
"critic": """You are a strict literary critic. | |
Vigilantly monitor for 'repetitive structure' and 'narrative stagnation'. | |
Distinguish whether this is a true novel or a collection of repeated episodes.""", | |
"writer_base": """You are a contemporary literary writer. | |
Take results from previous phase and develop into new territory. | |
Write minimum 800 words, capturing both interior and society. | |
Never repeat previous situations.""" | |
} | |
} | |
prompts = base_prompts.get(language, base_prompts["Korean"]).copy() | |
# νΉμ μκ° ν둬ννΈ | |
for i in range(1, 11): | |
prompts[f"writer{i}"] = prompts["writer_base"] | |
return prompts | |
# --- λ©μΈ νλ‘μΈμ€ --- | |
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: | |
"""μμ€ μμ± νλ‘μΈμ€""" | |
try: | |
resume_from_stage = 0 | |
if session_id: | |
self.current_session_id = session_id | |
session = NovelDatabase.get_session(session_id) | |
if session: | |
query = session['user_query'] | |
language = session['language'] | |
resume_from_stage = session['current_stage'] + 1 | |
# μμ¬ μΆμ κΈ° 볡μ | |
saved_tracker = NovelDatabase.load_narrative_tracker(session_id) | |
if saved_tracker: | |
self.narrative_tracker = saved_tracker | |
else: | |
self.current_session_id = NovelDatabase.create_session(query, language) | |
logger.info(f"Created new session: {self.current_session_id}") | |
stages = [] | |
if resume_from_stage > 0: | |
stages = [{ | |
"name": s['stage_name'], | |
"status": s['status'], | |
"content": s.get('content', ''), | |
"word_count": s.get('word_count', 0), | |
"progression_score": s.get('progression_score', 0.0) | |
} for s in NovelDatabase.get_stages(self.current_session_id)] | |
# μ΄ λ¨μ΄ μ μΆμ | |
total_words = NovelDatabase.get_total_words(self.current_session_id) | |
for stage_idx in range(resume_from_stage, len(PROGRESSIVE_STAGES)): | |
role, stage_name = PROGRESSIVE_STAGES[stage_idx] | |
if stage_idx >= len(stages): | |
stages.append({ | |
"name": stage_name, | |
"status": "active", | |
"content": "", | |
"word_count": 0, | |
"progression_score": 0.0 | |
}) | |
else: | |
stages[stage_idx]["status"] = "active" | |
yield f"π μ§ν μ€... (νμ¬ {total_words:,}λ¨μ΄)", stages, self.current_session_id | |
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) | |
stage_content = "" | |
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): | |
stage_content += chunk | |
stages[stage_idx]["content"] = stage_content | |
stages[stage_idx]["word_count"] = len(stage_content.split()) | |
yield f"π {stage_name} μμ± μ€... ({total_words + stages[stage_idx]['word_count']:,}λ¨μ΄)", stages, self.current_session_id | |
# μ§νλ νκ° | |
if role.startswith("writer"): | |
writer_num = int(re.search(r'\d+', role).group()) | |
progression_score = self.evaluate_progression(stage_content, writer_num) | |
stages[stage_idx]["progression_score"] = progression_score | |
# μμ¬ μΆμ κΈ° μ λ°μ΄νΈ | |
self.update_narrative_tracker(stage_content, writer_num) | |
stages[stage_idx]["status"] = "complete" | |
NovelDatabase.save_stage( | |
self.current_session_id, stage_idx, stage_name, role, | |
stage_content, "complete", stages[stage_idx].get("progression_score", 0.0) | |
) | |
# μμ¬ μΆμ κΈ° μ μ₯ | |
NovelDatabase.save_narrative_tracker(self.current_session_id, self.narrative_tracker) | |
# μ΄ λ¨μ΄ μ μ λ°μ΄νΈ | |
total_words = NovelDatabase.get_total_words(self.current_session_id) | |
yield f"β {stage_name} μλ£ (μ΄ {total_words:,}λ¨μ΄)", stages, self.current_session_id | |
# μ΅μ’ μμ€ μ 리 | |
final_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
final_word_count = len(final_novel.split()) | |
final_report = self.generate_literary_report(final_novel, final_word_count, language) | |
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) | |
yield f"β μμ€ μμ±! μ΄ {final_word_count:,}λ¨μ΄ (λͺ©ν: {TARGET_WORDS:,}λ¨μ΄)", stages, self.current_session_id | |
except Exception as e: | |
logger.error(f"μμ€ μμ± νλ‘μΈμ€ μ€λ₯: {e}", exc_info=True) | |
yield f"β μ€λ₯ λ°μ: {e}", stages if 'stages' in locals() else [], self.current_session_id | |
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str: | |
"""λ¨κ³λ³ ν둬ννΈ μμ±""" | |
if stage_idx == 0: | |
return self.create_director_initial_prompt(query, language) | |
if stage_idx == 1: | |
return self.create_critic_director_prompt(stages[0]["content"], query, language) | |
if stage_idx == 2: | |
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language) | |
master_plan = stages[2]["content"] | |
if 3 <= stage_idx <= 12: # μκ° μ΄μ | |
writer_num = stage_idx - 2 | |
previous_content = self.get_previous_writer_content(stages, writer_num) | |
phase_requirements = self.narrative_tracker.generate_phase_requirements(writer_num) | |
narrative_summary = self.generate_narrative_summary(stages, writer_num) | |
return self.create_writer_prompt( | |
writer_num, master_plan, previous_content, | |
phase_requirements, narrative_summary, language | |
) | |
if stage_idx == 13: # λΉνκ° μ€κ° κ²ν | |
all_content = self.get_all_writer_content(stages, 12) | |
return self.create_critic_consistency_prompt( | |
all_content, self.narrative_tracker, query, language | |
) | |
if 14 <= stage_idx <= 23: # μκ° μμ | |
writer_num = stage_idx - 13 | |
initial_content = stages[2 + writer_num]["content"] | |
feedback = stages[13]["content"] | |
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language) | |
if stage_idx == 24: # μ΅μ’ κ²ν | |
complete_novel = self.get_all_writer_content(stages, 23) | |
word_count = len(complete_novel.split()) | |
return self.create_critic_final_prompt(complete_novel, word_count, language) | |
return "" | |
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str: | |
"""κ°λ μ μμ ν둬ννΈ""" | |
return f"""λΉνμ λ°μνμ¬ ν΅ν©λ μμ¬ κ΅¬μ‘°λ₯Ό μμ±νμΈμ. | |
**μ μ£Όμ :** {user_query} | |
**μ΄κΈ° κΈ°ν:** | |
{initial_plan} | |
**λΉν:** | |
{critic_feedback} | |
**ν΅μ¬ μμ μ¬ν:** | |
1. λ°λ³΅ ꡬ쑰 μμ μ κ±° | |
2. 10λ¨κ³κ° νλμ μ΄μΌκΈ°λ‘ μ°κ²° | |
3. μΈλ¬Όμ λͺ νν λ³ν κΆ€μ | |
4. 8,000λ¨μ΄ λΆλ κ³ν | |
κ° λ¨κ³κ° μ΄μ μ νμ°μ κ²°κ³Όκ° λλλ‘ μμ νμΈμ.""" | |
def get_previous_writer_content(self, stages: List[Dict], current_writer: int) -> str: | |
"""μ΄μ μκ°μ λ΄μ© κ°μ Έμ€κΈ°""" | |
if current_writer == 1: | |
return "" | |
# λ°λ‘ μ΄μ μκ°μ λ΄μ© | |
prev_idx = current_writer + 1 # stages μΈλ±μ€λ writer_num + 2 | |
if prev_idx < len(stages) and stages[prev_idx]["content"]: | |
return stages[prev_idx]["content"] | |
return "" | |
def get_all_writer_content(self, stages: List[Dict], up_to_stage: int) -> str: | |
"""νΉμ λ¨κ³κΉμ§μ λͺ¨λ μκ° λ΄μ©""" | |
contents = [] | |
for i, s in enumerate(stages): | |
if i <= up_to_stage and "writer" in s.get("name", "") and s["content"]: | |
contents.append(s["content"]) | |
return "\n\n".join(contents) | |
def generate_narrative_summary(self, stages: List[Dict], up_to_writer: int) -> str: | |
"""νμ¬κΉμ§μ μμ¬ μμ½""" | |
if up_to_writer == 1: | |
return "첫 μμμ λλ€." | |
summary_parts = [] | |
for i in range(1, up_to_writer): | |
if i in self.narrative_tracker.phase_summaries: | |
summary_parts.append(f"[{NARRATIVE_PHASES[i-1]}]: {self.narrative_tracker.phase_summaries[i]}") | |
return "\n".join(summary_parts) if summary_parts else "μ΄μ λ΄μ©μ μ΄μ΄λ°μ μ§ννμΈμ." | |
def update_narrative_tracker(self, content: str, writer_num: int): | |
"""μμ¬ μΆμ κΈ° μ λ°μ΄νΈ""" | |
# κ°λ¨ν μμ½ μμ± (μ€μ λ‘λ λ μ κ΅ν λΆμ νμ) | |
lines = content.split('\n') | |
key_events = [line.strip() for line in lines if len(line.strip()) > 50][:3] | |
if key_events: | |
summary = " ".join(key_events[:2])[:200] + "..." | |
self.narrative_tracker.phase_summaries[writer_num] = summary | |
def evaluate_progression(self, content: str, phase: int) -> float: | |
"""μμ¬ μ§νλ νκ°""" | |
score = 5.0 | |
# λΆλ μ²΄ν¬ | |
word_count = len(content.split()) | |
if word_count >= MIN_WORDS_PER_WRITER: | |
score += 2.0 | |
# μλ‘μ΄ μμ μ²΄ν¬ | |
if phase > 1: | |
prev_summary = self.narrative_tracker.phase_summaries.get(phase-1, "") | |
if prev_summary and len(set(content.split()) - set(prev_summary.split())) > 100: | |
score += 1.5 | |
# λ³ν μΈκΈ μ²΄ν¬ | |
change_keywords = ['λ³ν', 'λ¬λΌμ‘', 'μλ‘μ΄', 'μ΄μ λ', 'λ μ΄μ', | |
'changed', 'different', 'new', 'now', 'no longer'] | |
if any(keyword in content for keyword in change_keywords): | |
score += 1.5 | |
return min(10.0, score) | |
def generate_literary_report(self, complete_novel: str, word_count: int, language: str) -> str: | |
"""μ΅μ’ λ¬Ένμ νκ°""" | |
prompt = self.create_critic_final_prompt(complete_novel, word_count, language) | |
try: | |
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language) | |
return report | |
except Exception as e: | |
logger.error(f"μ΅μ’ λ³΄κ³ μ μμ± μ€ν¨: {e}") | |
return "λ³΄κ³ μ μμ± μ€ μ€λ₯ λ°μ" | |
# --- μ νΈλ¦¬ν° ν¨μλ€ --- | |
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""λ©μΈ 쿼리 μ²λ¦¬ ν¨μ""" | |
if not query.strip(): | |
yield "", "", "β μ£Όμ λ₯Ό μ λ ₯ν΄μ£ΌμΈμ.", session_id | |
return | |
system = ProgressiveLiterarySystem() | |
stages_markdown = "" | |
novel_content = "" | |
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): | |
stages_markdown = format_stages_display(stages) | |
# μ΅μ’ μμ€ λ΄μ© κ°μ Έμ€κΈ° | |
if stages and all(s.get("status") == "complete" for s in stages[-10:]): | |
novel_content = NovelDatabase.get_writer_content(current_session_id) | |
novel_content = format_novel_display(novel_content) | |
yield stages_markdown, novel_content, status or "π μ²λ¦¬ μ€...", current_session_id | |
def get_active_sessions(language: str) -> List[str]: | |
"""νμ± μΈμ λͺ©λ‘""" | |
sessions = NovelDatabase.get_active_sessions() | |
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']}) [{s['total_words']:,}λ¨μ΄]" | |
for s in sessions] | |
def auto_recover_session(language: str) -> Tuple[Optional[str], str]: | |
"""μ΅κ·Ό μΈμ μλ 볡ꡬ""" | |
sessions = NovelDatabase.get_active_sessions() | |
if sessions: | |
latest_session = sessions[0] | |
return latest_session['session_id'], f"μΈμ {latest_session['session_id'][:8]}... 볡ꡬλ¨" | |
return None, "볡ꡬν μΈμ μ΄ μμ΅λλ€." | |
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""μΈμ μ¬κ°""" | |
if not session_id: | |
yield "", "", "β μΈμ IDκ° μμ΅λλ€.", session_id | |
return | |
if "..." in session_id: | |
session_id = session_id.split("...")[0] | |
session = NovelDatabase.get_session(session_id) | |
if not session: | |
yield "", "", "β μΈμ μ μ°Ύμ μ μμ΅λλ€.", None | |
return | |
yield from process_query(session['user_query'], session['language'], session_id) | |
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: | |
"""μμ€ λ€μ΄λ‘λ νμΌ μμ±""" | |
if not novel_text or not session_id: | |
return None | |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
filename = f"novel_{session_id[:8]}_{timestamp}" | |
try: | |
if format_type == "DOCX" and DOCX_AVAILABLE: | |
return export_to_docx(novel_text, filename, language, session_id) | |
else: | |
return export_to_txt(novel_text, filename) | |
except Exception as e: | |
logger.error(f"νμΌ μμ± μ€ν¨: {e}") | |
return None | |
def format_stages_display(stages: List[Dict]) -> str: | |
"""λ¨κ³λ³ μ§ν μν© νμ""" | |
markdown = "## π¬ μ§ν μν©\n\n" | |
# μ΄ λ¨μ΄ μ κ³μ° | |
total_words = sum(s.get('word_count', 0) for s in stages if 'writer' in s.get('name', '')) | |
markdown += f"**μ΄ λ¨μ΄ μ: {total_words:,} / {TARGET_WORDS:,}**\n\n" | |
for i, stage in enumerate(stages): | |
status_icon = "β " if stage['status'] == 'complete' else "π" if stage['status'] == 'active' else "β³" | |
markdown += f"{status_icon} **{stage['name']}**" | |
if stage.get('word_count', 0) > 0: | |
markdown += f" ({stage['word_count']:,}λ¨μ΄)" | |
if stage.get('progression_score', 0) > 0: | |
markdown += f" [μ§νλ: {stage['progression_score']:.1f}/10]" | |
markdown += "\n" | |
if stage['content']: | |
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content'] | |
markdown += f"> {preview}\n\n" | |
return markdown | |
def format_novel_display(novel_text: str) -> str: | |
"""μμ€ λ΄μ© νμ""" | |
if not novel_text: | |
return "μμ§ μμ±λ λ΄μ©μ΄ μμ΅λλ€." | |
formatted = "# π μμ±λ μμ€\n\n" | |
# λ¨μ΄ μ νμ | |
word_count = len(novel_text.split()) | |
formatted += f"**μ΄ λΆλ: {word_count:,}λ¨μ΄ (λͺ©ν: {TARGET_WORDS:,}λ¨μ΄)**\n\n" | |
formatted += "---\n\n" | |
# κ° λ¨κ³λ₯Ό ꡬλΆνμ¬ νμ | |
sections = novel_text.split('\n\n') | |
for i, section in enumerate(sections): | |
if section.strip(): | |
formatted += f"{section}\n\n" | |
return formatted | |
def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str: | |
"""DOCX νμΌλ‘ λ΄λ³΄λ΄κΈ°""" | |
doc = Document() | |
# νμ΄μ§ μ€μ | |
section = doc.sections[0] | |
section.page_height = Inches(11) | |
section.page_width = Inches(8.5) | |
section.top_margin = Inches(1) | |
section.bottom_margin = Inches(1) | |
section.left_margin = Inches(1.25) | |
section.right_margin = Inches(1.25) | |
# μΈμ μ 보 | |
session = NovelDatabase.get_session(session_id) | |
# μ λͺ© νμ΄μ§ | |
title_para = doc.add_paragraph() | |
title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
if session: | |
title_run = title_para.add_run(session["user_query"]) | |
title_run.font.size = Pt(24) | |
title_run.bold = True | |
# λ©ν μ 보 | |
doc.add_paragraph() | |
meta_para = doc.add_paragraph() | |
meta_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
meta_para.add_run(f"μμ±μΌ: {datetime.now().strftime('%Yλ %mμ %dμΌ')}\n") | |
meta_para.add_run(f"μ΄ λ¨μ΄ μ: {len(content.split()):,}λ¨μ΄") | |
# νμ΄μ§ λλκΈ° | |
doc.add_page_break() | |
# λ³Έλ¬Έ μ€νμΌ μ€μ | |
style = doc.styles['Normal'] | |
style.font.name = 'Calibri' | |
style.font.size = Pt(11) | |
style.paragraph_format.line_spacing = 1.5 | |
style.paragraph_format.space_after = Pt(6) | |
# λ³Έλ¬Έ μΆκ° | |
paragraphs = content.split('\n\n') | |
for para_text in paragraphs: | |
if para_text.strip(): | |
para = doc.add_paragraph(para_text.strip()) | |
# νμΌ μ μ₯ | |
filepath = f"{filename}.docx" | |
doc.save(filepath) | |
return filepath | |
def export_to_txt(content: str, filename: str) -> str: | |
"""TXT νμΌλ‘ λ΄λ³΄λ΄κΈ°""" | |
filepath = f"{filename}.txt" | |
with open(filepath, 'w', encoding='utf-8') as f: | |
f.write(content) | |
return filepath | |
# CSS μ€νμΌ | |
custom_css = """ | |
.gradio-container { | |
background: linear-gradient(135deg, #1e3c72 0%, #2a5298 50%, #1e3c72 100%); | |
min-height: 100vh; | |
} | |
.main-header { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 30px; | |
border-radius: 12px; | |
margin-bottom: 30px; | |
text-align: center; | |
color: white; | |
border: 1px solid rgba(255, 255, 255, 0.2); | |
} | |
.progress-note { | |
background-color: rgba(255, 223, 0, 0.1); | |
border-left: 3px solid #ffd700; | |
padding: 15px; | |
margin: 20px 0; | |
border-radius: 8px; | |
color: #fff; | |
} | |
.input-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 20px; | |
border-radius: 12px; | |
margin-bottom: 20px; | |
border: 1px solid rgba(255, 255, 255, 0.2); | |
} | |
.session-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
color: white; | |
border: 1px solid rgba(255, 255, 255, 0.2); | |
} | |
#stages-display { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 20px; | |
border-radius: 12px; | |
max-height: 600px; | |
overflow-y: auto; | |
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
} | |
#novel-output { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 30px; | |
border-radius: 12px; | |
max-height: 700px; | |
overflow-y: auto; | |
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
} | |
.download-section { | |
background-color: rgba(255, 255, 255, 0.9); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); | |
} | |
/* μ§ν νμκΈ° μ€νμΌ */ | |
.progress-bar { | |
background-color: #e0e0e0; | |
height: 20px; | |
border-radius: 10px; | |
overflow: hidden; | |
margin: 10px 0; | |
} | |
.progress-fill { | |
background-color: #4CAF50; | |
height: 100%; | |
transition: width 0.3s ease; | |
} | |
""" | |
# Gradio μΈν°νμ΄μ€ μμ± | |
def create_interface(): | |
with gr.Blocks(css=custom_css, title="AI μ§νν μ₯νΈμμ€ μμ± μμ€ν ") as interface: | |
gr.HTML(""" | |
<div class="main-header"> | |
<h1 style="font-size: 2.5em; margin-bottom: 10px;"> | |
π AI μ§νν μ₯νΈμμ€ μμ± μμ€ν | |
</h1> | |
<h3 style="color: #ddd; margin-bottom: 20px;"> | |
8,000λ¨μ΄ μ΄μμ ν΅ν©λ μμ¬ κ΅¬μ‘°λ₯Ό κ°μ§ μ€νΈμμ€ μ°½μ | |
</h3> | |
<p style="font-size: 1.1em; color: #eee; max-width: 800px; margin: 0 auto;"> | |
10κ°μ μ κΈ°μ μΌλ‘ μ°κ²°λ λ¨κ³λ₯Ό ν΅ν΄ νλμ μμ ν μ΄μΌκΈ°λ₯Ό λ§λ€μ΄λ λλ€. | |
<br> | |
κ° λ¨κ³λ μ΄μ λ¨κ³μ νμ°μ κ²°κ³Όλ‘ μ΄μ΄μ§λ©°, μΈλ¬Όμ λ³νμ μ±μ₯μ μΆμ ν©λλ€. | |
</p> | |
<div class="progress-note"> | |
β‘ λ°λ³΅μ΄ μλ μΆμ , μνμ΄ μλ μ§νμ ν΅ν μ§μ ν μ₯νΈ μμ¬ | |
</div> | |
</div> | |
""") | |
# μν κ΄λ¦¬ | |
current_session_id = gr.State(None) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
with gr.Group(elem_classes=["input-section"]): | |
query_input = gr.Textbox( | |
label="μμ€ μ£Όμ / Novel Theme", | |
placeholder="μ€νΈμμ€μ μ£Όμ λ₯Ό μ λ ₯νμΈμ. μΈλ¬Όμ λ³νμ μ±μ₯μ΄ μ€μ¬μ΄ λλ μ΄μΌκΈ°...\nEnter the theme for your novella. Focus on character transformation and growth...", | |
lines=4 | |
) | |
language_select = gr.Radio( | |
choices=["Korean", "English"], | |
value="Korean", | |
label="μΈμ΄ / Language" | |
) | |
with gr.Row(): | |
submit_btn = gr.Button("π μμ€ μμ± μμ", variant="primary", scale=2) | |
clear_btn = gr.Button("ποΈ μ΄κΈ°ν", scale=1) | |
status_text = gr.Textbox( | |
label="μν", | |
interactive=False, | |
value="π μ€λΉ μλ£" | |
) | |
# μΈμ κ΄λ¦¬ | |
with gr.Group(elem_classes=["session-section"]): | |
gr.Markdown("### πΎ μ§ν μ€μΈ μΈμ ") | |
session_dropdown = gr.Dropdown( | |
label="μΈμ μ ν", | |
choices=[], | |
interactive=True | |
) | |
with gr.Row(): | |
refresh_btn = gr.Button("π λͺ©λ‘ μλ‘κ³ μΉ¨", scale=1) | |
resume_btn = gr.Button("βΆοΈ μ ν μ¬κ°", variant="secondary", scale=1) | |
auto_recover_btn = gr.Button("β»οΈ μ΅κ·Ό μΈμ 볡ꡬ", scale=1) | |
with gr.Column(scale=2): | |
with gr.Tab("π μ°½μ μ§ν"): | |
stages_display = gr.Markdown( | |
value="μ°½μ κ³Όμ μ΄ μ¬κΈ°μ νμλ©λλ€...", | |
elem_id="stages-display" | |
) | |
with gr.Tab("π μμ±λ μμ€"): | |
novel_output = gr.Markdown( | |
value="μμ±λ μμ€μ΄ μ¬κΈ°μ νμλ©λλ€...", | |
elem_id="novel-output" | |
) | |
with gr.Group(elem_classes=["download-section"]): | |
gr.Markdown("### π₯ μμ€ λ€μ΄λ‘λ") | |
with gr.Row(): | |
format_select = gr.Radio( | |
choices=["DOCX", "TXT"], | |
value="DOCX" if DOCX_AVAILABLE else "TXT", | |
label="νμ" | |
) | |
download_btn = gr.Button("β¬οΈ λ€μ΄λ‘λ", variant="secondary") | |
download_file = gr.File( | |
label="λ€μ΄λ‘λλ νμΌ", | |
visible=False | |
) | |
# μ¨κ²¨μ§ μν | |
novel_text_state = gr.State("") | |
# μμ | |
with gr.Row(): | |
gr.Examples( | |
examples=[ | |
["μ€μ§ν μ€λ λ¨μ±μ΄ μλ‘μ΄ μΆμ μλ―Έλ₯Ό μ°Ύμκ°λ μ¬μ "], | |
["λμμμ μκ³¨λ‘ μ΄μ£Όν μ²λ μ μ μκ³Ό μ±μ₯ μ΄μΌκΈ°"], | |
["μΈ μΈλκ° ν¨κ» μ¬λ κ°μ‘±μ κ°λ±κ³Ό νν΄"], | |
["A middle-aged woman's journey to rediscover herself after divorce"], | |
["The transformation of a cynical journalist through unexpected encounters"], | |
["μμ μμ μ μ΄μνλ λ ΈλΆλΆμ λ§μ§λ§ 1λ "], | |
["AI μλμ μΌμ리λ₯Ό μμ λ²μκ°μ μλ‘μ΄ λμ "] | |
], | |
inputs=query_input, | |
label="π‘ μ£Όμ μμ" | |
) | |
# μ΄λ²€νΈ νΈλ€λ¬ | |
def refresh_sessions(): | |
try: | |
sessions = get_active_sessions("Korean") | |
return gr.update(choices=sessions) | |
except Exception as e: | |
logger.error(f"Error refreshing sessions: {str(e)}") | |
return gr.update(choices=[]) | |
def handle_auto_recover(language): | |
session_id, message = auto_recover_session(language) | |
return session_id, message | |
# μ΄λ²€νΈ μ°κ²° | |
submit_btn.click( | |
fn=process_query, | |
inputs=[query_input, language_select, current_session_id], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
novel_output.change( | |
fn=lambda x: x, | |
inputs=[novel_output], | |
outputs=[novel_text_state] | |
) | |
resume_btn.click( | |
fn=lambda x: x.split("...")[0] if x and "..." in x else x, | |
inputs=[session_dropdown], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
auto_recover_btn.click( | |
fn=handle_auto_recover, | |
inputs=[language_select], | |
outputs=[current_session_id, status_text] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
refresh_btn.click( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
clear_btn.click( | |
fn=lambda: ("", "", "π μ€λΉ μλ£", "", None), | |
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] | |
) | |
def handle_download(format_type, language, session_id, novel_text): | |
if not session_id or not novel_text: | |
return gr.update(visible=False) | |
file_path = download_novel(novel_text, format_type, language, session_id) | |
if file_path: | |
return gr.update(value=file_path, visible=True) | |
else: | |
return gr.update(visible=False) | |
download_btn.click( | |
fn=handle_download, | |
inputs=[format_select, language_select, current_session_id, novel_text_state], | |
outputs=[download_file] | |
) | |
# μμ μ μΈμ λ‘λ | |
interface.load( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
return interface | |
# λ©μΈ μ€ν | |
if __name__ == "__main__": | |
logger.info("AI μ§νν μ₯νΈμμ€ μμ± μμ€ν μμ...") | |
logger.info("=" * 60) | |
# νκ²½ νμΈ | |
logger.info(f"API μλν¬μΈνΈ: {API_URL}") | |
logger.info(f"λͺ©ν λΆλ: {TARGET_WORDS:,}λ¨μ΄") | |
logger.info(f"μκ°λΉ μ΅μ λΆλ: {MIN_WORDS_PER_WRITER:,}λ¨μ΄") | |
if BRAVE_SEARCH_API_KEY: | |
logger.info("μΉ κ²μμ΄ νμ±νλμμ΅λλ€.") | |
else: | |
logger.warning("μΉ κ²μμ΄ λΉνμ±νλμμ΅λλ€.") | |
if DOCX_AVAILABLE: | |
logger.info("DOCX λ΄λ³΄λ΄κΈ°κ° νμ±νλμμ΅λλ€.") | |
else: | |
logger.warning("DOCX λ΄λ³΄λ΄κΈ°κ° λΉνμ±νλμμ΅λλ€.") | |
logger.info("=" * 60) | |
# λ°μ΄ν°λ² μ΄μ€ μ΄κΈ°ν | |
logger.info("λ°μ΄ν°λ² μ΄μ€ μ΄κΈ°ν μ€...") | |
NovelDatabase.init_db() | |
logger.info("λ°μ΄ν°λ² μ΄μ€ μ΄κΈ°ν μλ£.") | |
# μΈν°νμ΄μ€ μμ± λ° μ€ν | |
interface = create_interface() | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=True | |
) |