openfree's picture
Rename app-backup2-5์ 1ํ‰๊ฐ€.py to app.py
ac07338 verified
raw
history blame
69.3 kB
import gradio as gr
import os
import json
import requests
from datetime import datetime
import time
from typing import List, Dict, Any, Generator, Tuple, Optional, Set
import logging
import re
import tempfile
from pathlib import Path
import sqlite3
import hashlib
import threading
from contextlib import contextmanager
from dataclasses import dataclass, field, asdict
from collections import defaultdict
# --- ๋กœ๊น… ์„ค์ • ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# --- Document export imports ---
try:
from docx import Document
from docx.shared import Inches, Pt, RGBColor
from docx.enum.text import WD_ALIGN_PARAGRAPH
from docx.enum.style import WD_STYLE_TYPE
from docx.oxml.ns import qn
from docx.oxml import OxmlElement
DOCX_AVAILABLE = True
except ImportError:
DOCX_AVAILABLE = False
logger.warning("python-docx not installed. DOCX export will be disabled.")
# --- ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๋ฐ ์ƒ์ˆ˜ ---
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "")
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "")
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions"
MODEL_ID = "dep89a2fld32mcm"
DB_PATH = "novel_sessions_v5.db"
# ๋ชฉํ‘œ ๋ถ„๋Ÿ‰ ์„ค์ •
TARGET_WORDS = 8000 # ์•ˆ์ „ ๋งˆ์ง„์„ ์œ„ํ•ด 8000๋‹จ์–ด
MIN_WORDS_PER_WRITER = 800 # ๊ฐ ์ž‘๊ฐ€ ์ตœ์†Œ ๋ถ„๋Ÿ‰
# --- ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๊ฒ€์ฆ ---
if not FRIENDLI_TOKEN:
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.")
FRIENDLI_TOKEN = "dummy_token_for_testing"
if not BRAVE_SEARCH_API_KEY:
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.")
# --- ์ „์—ญ ๋ณ€์ˆ˜ ---
db_lock = threading.Lock()
# ์„œ์‚ฌ ์ง„ํ–‰ ๋‹จ๊ณ„ ์ •์˜
NARRATIVE_PHASES = [
"๋„์ž…: ์ผ์ƒ๊ณผ ๊ท ์—ด",
"๋ฐœ์ „ 1: ๋ถˆ์•ˆ์˜ ๊ณ ์กฐ",
"๋ฐœ์ „ 2: ์™ธ๋ถ€ ์ถฉ๊ฒฉ",
"๋ฐœ์ „ 3: ๋‚ด์  ๊ฐˆ๋“ฑ ์‹ฌํ™”",
"์ ˆ์ • 1: ์œ„๊ธฐ์˜ ์ •์ ",
"์ ˆ์ • 2: ์„ ํƒ์˜ ์ˆœ๊ฐ„",
"ํ•˜๊ฐ• 1: ๊ฒฐ๊ณผ์™€ ์—ฌํŒŒ",
"ํ•˜๊ฐ• 2: ์ƒˆ๋กœ์šด ์ธ์‹",
"๊ฒฐ๋ง 1: ๋ณ€ํ™”๋œ ์ผ์ƒ",
"๊ฒฐ๋ง 2: ์—ด๋ฆฐ ์งˆ๋ฌธ"
]
# ๋‹จ๊ณ„๋ณ„ ๊ตฌ์„ฑ
PROGRESSIVE_STAGES = [
("director", "๐ŸŽฌ ๊ฐ๋…์ž: ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ ๊ธฐํš"),
("critic", "๐Ÿ“ ๋น„ํ‰๊ฐ€: ์„œ์‚ฌ ์ง„ํ–‰์„ฑ๊ณผ ๊นŠ์ด ๊ฒ€ํ† "),
("director", "๐ŸŽฌ ๊ฐ๋…์ž: ์ˆ˜์ •๋œ ๋งˆ์Šคํ„ฐํ”Œ๋žœ"),
] + [
(f"writer{i}", f"โœ๏ธ ์ž‘๊ฐ€ {i}: ์ดˆ์•ˆ - {NARRATIVE_PHASES[i-1]}")
for i in range(1, 11)
] + [
("critic", "๐Ÿ“ ๋น„ํ‰๊ฐ€: ์ค‘๊ฐ„ ๊ฒ€ํ†  (์„œ์‚ฌ ๋ˆ„์ ์„ฑ๊ณผ ๋ณ€ํ™”)"),
] + [
(f"writer{i}", f"โœ๏ธ ์ž‘๊ฐ€ {i}: ์ˆ˜์ •๋ณธ - {NARRATIVE_PHASES[i-1]}")
for i in range(1, 11)
] + [
("critic", f"๐Ÿ“ ๋น„ํ‰๊ฐ€: ์ตœ์ข… ๊ฒ€ํ†  ๋ฐ ๋ฌธํ•™์  ํ‰๊ฐ€"),
]
# --- ๋ฐ์ดํ„ฐ ํด๋ž˜์Šค ---
@dataclass
class CharacterArc:
"""์ธ๋ฌผ์˜ ๋ณ€ํ™” ๊ถค์  ์ถ”์ """
name: str
initial_state: Dict[str, Any] # ์ดˆ๊ธฐ ์ƒํƒœ
phase_states: Dict[int, Dict[str, Any]] = field(default_factory=dict) # ๋‹จ๊ณ„๋ณ„ ์ƒํƒœ
transformations: List[str] = field(default_factory=list) # ์ฃผ์š” ๋ณ€ํ™”๋“ค
relationships_evolution: Dict[str, List[str]] = field(default_factory=dict) # ๊ด€๊ณ„ ๋ณ€ํ™”
@dataclass
class PlotThread:
"""ํ”Œ๋กฏ ๋ผ์ธ ์ถ”์ """
thread_id: str
description: str
introduction_phase: int
development_phases: List[int]
resolution_phase: Optional[int]
status: str = "active" # active, resolved, suspended
@dataclass
class SymbolicEvolution:
"""์ƒ์ง•์˜ ์˜๋ฏธ ๋ณ€ํ™” ์ถ”์ """
symbol: str
initial_meaning: str
phase_meanings: Dict[int, str] = field(default_factory=dict)
transformation_complete: bool = False
# --- ํ•ต์‹ฌ ๋กœ์ง ํด๋ž˜์Šค ---
class ProgressiveNarrativeTracker:
"""์„œ์‚ฌ ์ง„ํ–‰๊ณผ ๋ˆ„์ ์„ ์ถ”์ ํ•˜๋Š” ์‹œ์Šคํ…œ"""
def __init__(self):
self.character_arcs: Dict[str, CharacterArc] = {}
self.plot_threads: Dict[str, PlotThread] = {}
self.symbolic_evolutions: Dict[str, SymbolicEvolution] = {}
self.phase_summaries: Dict[int, str] = {}
self.accumulated_events: List[Dict[str, Any]] = []
self.thematic_deepening: List[str] = []
def register_character_arc(self, name: str, initial_state: Dict[str, Any]):
"""์บ๋ฆญํ„ฐ ์•„ํฌ ๋“ฑ๋ก"""
self.character_arcs[name] = CharacterArc(name=name, initial_state=initial_state)
logger.info(f"Character arc registered: {name}")
def update_character_state(self, name: str, phase: int, new_state: Dict[str, Any], transformation: str):
"""์บ๋ฆญํ„ฐ ์ƒํƒœ ์—…๋ฐ์ดํŠธ ๋ฐ ๋ณ€ํ™” ๊ธฐ๋ก"""
if name in self.character_arcs:
arc = self.character_arcs[name]
arc.phase_states[phase] = new_state
arc.transformations.append(f"Phase {phase}: {transformation}")
logger.info(f"Character {name} transformed in phase {phase}: {transformation}")
def add_plot_thread(self, thread_id: str, description: str, intro_phase: int):
"""์ƒˆ๋กœ์šด ํ”Œ๋กฏ ๋ผ์ธ ์ถ”๊ฐ€"""
self.plot_threads[thread_id] = PlotThread(
thread_id=thread_id,
description=description,
introduction_phase=intro_phase,
development_phases=[]
)
def develop_plot_thread(self, thread_id: str, phase: int):
"""ํ”Œ๋กฏ ๋ผ์ธ ๋ฐœ์ „"""
if thread_id in self.plot_threads:
self.plot_threads[thread_id].development_phases.append(phase)
def check_narrative_progression(self, current_phase: int) -> Tuple[bool, List[str]]:
"""์„œ์‚ฌ๊ฐ€ ์‹ค์ œ๋กœ ์ง„ํ–‰๋˜๊ณ  ์žˆ๋Š”์ง€ ํ™•์ธ"""
issues = []
# 1. ์บ๋ฆญํ„ฐ ๋ณ€ํ™” ํ™•์ธ
static_characters = []
for name, arc in self.character_arcs.items():
if len(arc.transformations) < current_phase // 3: # ์ตœ์†Œ 3๋‹จ๊ณ„๋งˆ๋‹ค ๋ณ€ํ™” ํ•„์š”
static_characters.append(name)
if static_characters:
issues.append(f"๋‹ค์Œ ์ธ๋ฌผ๋“ค์˜ ๋ณ€ํ™”๊ฐ€ ๋ถ€์กฑํ•ฉ๋‹ˆ๋‹ค: {', '.join(static_characters)}")
# 2. ํ”Œ๋กฏ ์ง„ํ–‰ ํ™•์ธ
unresolved_threads = []
for thread_id, thread in self.plot_threads.items():
if thread.status == "active" and len(thread.development_phases) < 2:
unresolved_threads.append(thread.description)
if unresolved_threads:
issues.append(f"์ง„์ „๋˜์ง€ ์•Š์€ ํ”Œ๋กฏ: {', '.join(unresolved_threads)}")
# 3. ์ƒ์ง• ๋ฐœ์ „ ํ™•์ธ
static_symbols = []
for symbol, evolution in self.symbolic_evolutions.items():
if len(evolution.phase_meanings) < current_phase // 4:
static_symbols.append(symbol)
if static_symbols:
issues.append(f"์˜๋ฏธ๊ฐ€ ๋ฐœ์ „ํ•˜์ง€ ์•Š์€ ์ƒ์ง•: {', '.join(static_symbols)}")
return len(issues) == 0, issues
def generate_phase_requirements(self, phase: int) -> str:
"""๊ฐ ๋‹จ๊ณ„๋ณ„ ํ•„์ˆ˜ ์š”๊ตฌ์‚ฌํ•ญ ์ƒ์„ฑ"""
requirements = []
# ์ด์ „ ๋‹จ๊ณ„ ์š”์•ฝ
if phase > 1 and (phase-1) in self.phase_summaries:
requirements.append(f"์ด์ „ ๋‹จ๊ณ„ ํ•ต์‹ฌ: {self.phase_summaries[phase-1]}")
# ๋‹จ๊ณ„๋ณ„ ํŠน์ˆ˜ ์š”๊ตฌ์‚ฌํ•ญ
phase_name = NARRATIVE_PHASES[phase-1] if phase <= 10 else "์ˆ˜์ •"
if "๋„์ž…" in phase_name:
requirements.append("- ์ผ์ƒ์˜ ๊ท ์—ด์„ ๋ณด์—ฌ์ฃผ๋˜, ํฐ ์‚ฌ๊ฑด์ด ์•„๋‹Œ ๋ฏธ๋ฌ˜ํ•œ ๋ณ€ํ™”๋กœ ์‹œ์ž‘")
requirements.append("- ์ฃผ์š” ์ธ๋ฌผ๋“ค์˜ ์ดˆ๊ธฐ ์ƒํƒœ์™€ ๊ด€๊ณ„ ์„ค์ •")
requirements.append("- ํ•ต์‹ฌ ์ƒ์ง• ๋„์ž… (์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ)")
elif "๋ฐœ์ „" in phase_name:
requirements.append("- ์ด์ „ ๋‹จ๊ณ„์˜ ๊ท ์—ด/๊ฐˆ๋“ฑ์ด ๊ตฌ์ฒดํ™”๋˜๊ณ  ์‹ฌํ™”")
requirements.append("- ์ƒˆ๋กœ์šด ์‚ฌ๊ฑด์ด๋‚˜ ์ธ์‹์ด ์ถ”๊ฐ€๋˜์–ด ๋ณต์žก์„ฑ ์ฆ๊ฐ€")
requirements.append("- ์ธ๋ฌผ ๊ฐ„ ๊ด€๊ณ„์˜ ๋ฏธ๋ฌ˜ํ•œ ๋ณ€ํ™”")
elif "์ ˆ์ •" in phase_name:
requirements.append("- ์ถ•์ ๋œ ๊ฐˆ๋“ฑ์ด ์ž„๊ณ„์ ์— ๋„๋‹ฌ")
requirements.append("- ์ธ๋ฌผ์˜ ๋‚ด์  ์„ ํƒ์ด๋‚˜ ์ธ์‹์˜ ์ „ํ™˜์ ")
requirements.append("- ์ƒ์ง•์˜ ์˜๋ฏธ๊ฐ€ ์ „๋ณต๋˜๊ฑฐ๋‚˜ ์‹ฌํ™”")
elif "ํ•˜๊ฐ•" in phase_name:
requirements.append("- ์ ˆ์ •์˜ ์—ฌํŒŒ์™€ ๊ทธ๋กœ ์ธํ•œ ๋ณ€ํ™”")
requirements.append("- ์ƒˆ๋กœ์šด ๊ท ํ˜•์ ์„ ์ฐพ์•„๊ฐ€๋Š” ๊ณผ์ •")
requirements.append("- ์ธ๋ฌผ๋“ค์˜ ๋ณ€ํ™”๋œ ๊ด€๊ณ„์™€ ์ธ์‹")
elif "๊ฒฐ๋ง" in phase_name:
requirements.append("- ๋ณ€ํ™”๋œ ์ผ์ƒ์˜ ๋ชจ์Šต")
requirements.append("- ํ•ด๊ฒฐ๋˜์ง€ ์•Š์€ ์งˆ๋ฌธ๋“ค")
requirements.append("- ์—ฌ์šด๊ณผ ์„ฑ์ฐฐ์˜ ์—ฌ์ง€")
# ๋ฐ˜๋ณต ๋ฐฉ์ง€ ์š”๊ตฌ์‚ฌํ•ญ
requirements.append("\nโš ๏ธ ์ ˆ๋Œ€ ๊ธˆ์ง€์‚ฌํ•ญ:")
requirements.append("- ์ด์ „ ๋‹จ๊ณ„์™€ ๋™์ผํ•œ ์‚ฌ๊ฑด์ด๋‚˜ ๊ฐˆ๋“ฑ ๋ฐ˜๋ณต")
requirements.append("- ์ธ๋ฌผ์ด ๊ฐ™์€ ์ƒ๊ฐ์ด๋‚˜ ๊ฐ์ •์— ๋จธ๋ฌด๋ฅด๊ธฐ")
requirements.append("- ํ”Œ๋กฏ์ด ์ œ์ž๋ฆฌ๊ฑธ์Œํ•˜๊ธฐ")
return "\n".join(requirements)
class NovelDatabase:
"""๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ๊ด€๋ฆฌ"""
@staticmethod
def init_db():
with sqlite3.connect(DB_PATH) as conn:
conn.execute("PRAGMA journal_mode=WAL")
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS sessions (
session_id TEXT PRIMARY KEY,
user_query TEXT NOT NULL,
language TEXT NOT NULL,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
status TEXT DEFAULT 'active',
current_stage INTEGER DEFAULT 0,
final_novel TEXT,
literary_report TEXT,
total_words INTEGER DEFAULT 0,
narrative_tracker TEXT
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS stages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
stage_number INTEGER NOT NULL,
stage_name TEXT NOT NULL,
role TEXT NOT NULL,
content TEXT,
word_count INTEGER DEFAULT 0,
status TEXT DEFAULT 'pending',
progression_score REAL DEFAULT 0.0,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id),
UNIQUE(session_id, stage_number)
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS plot_threads (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
thread_id TEXT NOT NULL,
description TEXT,
introduction_phase INTEGER,
status TEXT DEFAULT 'active',
created_at TEXT DEFAULT (datetime('now')),
FOREIGN KEY (session_id) REFERENCES sessions(session_id)
)
''')
conn.commit()
# ๊ธฐ์กด ๋ฉ”์„œ๋“œ๋“ค ์œ ์ง€
@staticmethod
@contextmanager
def get_db():
with db_lock:
conn = sqlite3.connect(DB_PATH, timeout=30.0)
conn.row_factory = sqlite3.Row
try:
yield conn
finally:
conn.close()
@staticmethod
def create_session(user_query: str, language: str) -> str:
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest()
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)',
(session_id, user_query, language)
)
conn.commit()
return session_id
@staticmethod
def save_stage(session_id: str, stage_number: int, stage_name: str,
role: str, content: str, status: str = 'complete',
progression_score: float = 0.0):
word_count = len(content.split()) if content else 0
with NovelDatabase.get_db() as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, progression_score)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id, stage_number)
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, progression_score=?, updated_at=datetime('now')
''', (session_id, stage_number, stage_name, role, content, word_count, status, progression_score,
content, word_count, status, stage_name, progression_score))
# ์ด ๋‹จ์–ด ์ˆ˜ ์—…๋ฐ์ดํŠธ
cursor.execute('''
UPDATE sessions
SET total_words = (
SELECT SUM(word_count)
FROM stages
WHERE session_id = ? AND role LIKE 'writer%' AND content IS NOT NULL
),
updated_at = datetime('now'),
current_stage = ?
WHERE session_id = ?
''', (session_id, stage_number, session_id))
conn.commit()
@staticmethod
def get_writer_content(session_id: str) -> str:
"""์ž‘๊ฐ€ ์ฝ˜ํ…์ธ  ๊ฐ€์ ธ์˜ค๊ธฐ (์ˆ˜์ •๋ณธ ์šฐ์„ )"""
with NovelDatabase.get_db() as conn:
all_content = []
for writer_num in range(1, 11):
# ์ˆ˜์ •๋ณธ์ด ์žˆ์œผ๋ฉด ์ˆ˜์ •๋ณธ์„, ์—†์œผ๋ฉด ์ดˆ์•ˆ์„
row = conn.cursor().execute('''
SELECT content FROM stages
WHERE session_id = ? AND role = ?
AND stage_name LIKE '%์ˆ˜์ •๋ณธ%'
ORDER BY stage_number DESC LIMIT 1
''', (session_id, f'writer{writer_num}')).fetchone()
if not row or not row['content']:
# ์ˆ˜์ •๋ณธ์ด ์—†์œผ๋ฉด ์ดˆ์•ˆ ์‚ฌ์šฉ
row = conn.cursor().execute('''
SELECT content FROM stages
WHERE session_id = ? AND role = ?
AND stage_name LIKE '%์ดˆ์•ˆ%'
ORDER BY stage_number DESC LIMIT 1
''', (session_id, f'writer{writer_num}')).fetchone()
if row and row['content']:
all_content.append(row['content'].strip())
return '\n\n'.join(all_content)
@staticmethod
def get_total_words(session_id: str) -> int:
"""์ด ๋‹จ์–ด ์ˆ˜ ๊ฐ€์ ธ์˜ค๊ธฐ"""
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute(
'SELECT total_words FROM sessions WHERE session_id = ?',
(session_id,)
).fetchone()
return row['total_words'] if row and row['total_words'] else 0
@staticmethod
def save_narrative_tracker(session_id: str, tracker: ProgressiveNarrativeTracker):
"""์„œ์‚ฌ ์ถ”์ ๊ธฐ ์ €์žฅ"""
with NovelDatabase.get_db() as conn:
tracker_data = json.dumps({
'character_arcs': {k: asdict(v) for k, v in tracker.character_arcs.items()},
'plot_threads': {k: asdict(v) for k, v in tracker.plot_threads.items()},
'phase_summaries': tracker.phase_summaries,
'thematic_deepening': tracker.thematic_deepening
})
conn.cursor().execute(
'UPDATE sessions SET narrative_tracker = ? WHERE session_id = ?',
(tracker_data, session_id)
)
conn.commit()
@staticmethod
def load_narrative_tracker(session_id: str) -> Optional[ProgressiveNarrativeTracker]:
"""์„œ์‚ฌ ์ถ”์ ๊ธฐ ๋กœ๋“œ"""
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute(
'SELECT narrative_tracker FROM sessions WHERE session_id = ?',
(session_id,)
).fetchone()
if row and row['narrative_tracker']:
data = json.loads(row['narrative_tracker'])
tracker = ProgressiveNarrativeTracker()
# ๋ฐ์ดํ„ฐ ๋ณต์›
for name, arc_data in data.get('character_arcs', {}).items():
tracker.character_arcs[name] = CharacterArc(**arc_data)
for thread_id, thread_data in data.get('plot_threads', {}).items():
tracker.plot_threads[thread_id] = PlotThread(**thread_data)
tracker.phase_summaries = data.get('phase_summaries', {})
tracker.thematic_deepening = data.get('thematic_deepening', [])
return tracker
return None
@staticmethod
def get_session(session_id: str) -> Optional[Dict]:
with NovelDatabase.get_db() as conn:
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone()
return dict(row) if row else None
@staticmethod
def get_stages(session_id: str) -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall()
return [dict(row) for row in rows]
@staticmethod
def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""):
with NovelDatabase.get_db() as conn:
conn.cursor().execute(
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), literary_report = ? WHERE session_id = ?",
(final_novel, literary_report, session_id)
)
conn.commit()
@staticmethod
def get_active_sessions() -> List[Dict]:
with NovelDatabase.get_db() as conn:
rows = conn.cursor().execute(
"SELECT session_id, user_query, language, created_at, current_stage, total_words FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10"
).fetchall()
return [dict(row) for row in rows]
class WebSearchIntegration:
"""์›น ๊ฒ€์ƒ‰ ๊ธฐ๋Šฅ"""
def __init__(self):
self.brave_api_key = BRAVE_SEARCH_API_KEY
self.search_url = "https://api.search.brave.com/res/v1/web/search"
self.enabled = bool(self.brave_api_key)
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]:
if not self.enabled:
return []
headers = {
"Accept": "application/json",
"X-Subscription-Token": self.brave_api_key
}
params = {
"q": query,
"count": count,
"search_lang": "ko" if language == "Korean" else "en",
"text_decorations": False,
"safesearch": "moderate"
}
try:
response = requests.get(self.search_url, headers=headers, params=params, timeout=10)
response.raise_for_status()
results = response.json().get("web", {}).get("results", [])
return results
except requests.exceptions.RequestException as e:
logger.error(f"์›น ๊ฒ€์ƒ‰ API ์˜ค๋ฅ˜: {e}")
return []
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str:
if not results:
return ""
extracted = []
total_chars = 0
for i, result in enumerate(results[:3], 1):
title = result.get("title", "")
description = result.get("description", "")
info = f"[{i}] {title}: {description}"
if total_chars + len(info) < max_chars:
extracted.append(info)
total_chars += len(info)
else:
break
return "\n".join(extracted)
class ProgressiveLiterarySystem:
"""์ง„ํ–‰ํ˜• ๋ฌธํ•™ ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ"""
def __init__(self):
self.token = FRIENDLI_TOKEN
self.api_url = API_URL
self.model_id = MODEL_ID
self.narrative_tracker = ProgressiveNarrativeTracker()
self.web_search = WebSearchIntegration()
self.current_session_id = None
NovelDatabase.init_db()
def create_headers(self):
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"}
# --- ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ ํ•จ์ˆ˜๋“ค ---
def create_director_initial_prompt(self, user_query: str, language: str) -> str:
"""๊ฐ๋…์ž ์ดˆ๊ธฐ ๊ธฐํš - ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ"""
search_results_str = ""
if self.web_search.enabled:
queries = [f"{user_query} ์‚ฌํšŒ ๋ฌธ์ œ", f"{user_query} ํ˜„๋Œ€ ํ•œ๊ตญ"]
for q in queries[:1]:
results = self.web_search.search(q, count=2, language=language)
if results:
search_results_str += self.web_search.extract_relevant_info(results) + "\n"
lang_prompts = {
"Korean": f"""๋‹น์‹ ์€ ํ˜„๋Œ€ ํ•œ๊ตญ ๋ฌธํ•™์˜ ๊ฑฐ์žฅ์ž…๋‹ˆ๋‹ค.
๋‹จํŽธ์ด ์•„๋‹Œ ์ค‘ํŽธ ์†Œ์„ค(8,000๋‹จ์–ด ์ด์ƒ)์„ ์œ„ํ•œ ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ๋ฅผ ๊ธฐํšํ•˜์„ธ์š”.
**์ฃผ์ œ:** {user_query}
**์ฐธ๊ณ  ์ž๋ฃŒ:**
{search_results_str if search_results_str else "N/A"}
**ํ•„์ˆ˜ ์š”๊ตฌ์‚ฌํ•ญ:**
1. **ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ (๊ฐ€์žฅ ์ค‘์š”)**
- 10๊ฐœ ๋‹จ๊ณ„๊ฐ€ ์œ ๊ธฐ์ ์œผ๋กœ ์—ฐ๊ฒฐ๋œ ๋‹จ์ผ ์„œ์‚ฌ
- ๊ฐ ๋‹จ๊ณ„๋Š” ์ด์ „ ๋‹จ๊ณ„์˜ ๊ฒฐ๊ณผ๋กœ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ์ด์–ด์ง
- ๋ฐ˜๋ณต์ด ์•„๋‹Œ ์ถ•์ ๊ณผ ๋ฐœ์ „
๋‹จ๊ณ„๋ณ„ ์„œ์‚ฌ ์ง„ํ–‰:
1) ๋„์ž…: ์ผ์ƒ๊ณผ ๊ท ์—ด - ํ‰๋ฒ”ํ•œ ์ผ์ƒ ์† ์ฒซ ๊ท ์—ด
2) ๋ฐœ์ „ 1: ๋ถˆ์•ˆ์˜ ๊ณ ์กฐ - ๊ท ์—ด์ด ํ™•๋Œ€๋˜๋ฉฐ ๋ถˆ์•ˆ ์ฆํญ
3) ๋ฐœ์ „ 2: ์™ธ๋ถ€ ์ถฉ๊ฒฉ - ์˜ˆ์ƒ์น˜ ๋ชปํ•œ ์™ธ๋ถ€ ์‚ฌ๊ฑด
4) ๋ฐœ์ „ 3: ๋‚ด์  ๊ฐˆ๋“ฑ ์‹ฌํ™” - ๊ฐ€์น˜๊ด€์˜ ์ถฉ๋Œ
5) ์ ˆ์ • 1: ์œ„๊ธฐ์˜ ์ •์  - ๋ชจ๋“  ๊ฐˆ๋“ฑ์ด ๊ทน๋Œ€ํ™”
6) ์ ˆ์ • 2: ์„ ํƒ์˜ ์ˆœ๊ฐ„ - ๊ฒฐ์ •์  ์„ ํƒ
7) ํ•˜๊ฐ• 1: ๊ฒฐ๊ณผ์™€ ์—ฌํŒŒ - ์„ ํƒ์˜ ์ง์ ‘์  ๊ฒฐ๊ณผ
8) ํ•˜๊ฐ• 2: ์ƒˆ๋กœ์šด ์ธ์‹ - ๋ณ€ํ™”๋œ ์„ธ๊ณ„๊ด€
9) ๊ฒฐ๋ง 1: ๋ณ€ํ™”๋œ ์ผ์ƒ - ์ƒˆ๋กœ์šด ๊ท ํ˜•
10) ๊ฒฐ๋ง 2: ์—ด๋ฆฐ ์งˆ๋ฌธ - ๋…์ž์—๊ฒŒ ๋˜์ง€๋Š” ์งˆ๋ฌธ
2. **์ธ๋ฌผ์˜ ๋ณ€ํ™” ๊ถค์ **
- ์ฃผ์ธ๊ณต: ์ดˆ๊ธฐ ์ƒํƒœ โ†’ ์ค‘๊ฐ„ ๋ณ€ํ™” โ†’ ์ตœ์ข… ์ƒํƒœ (๋ช…ํ™•ํ•œ arc)
- ์ฃผ์š” ์ธ๋ฌผ๋“ค๋„ ๊ฐ์ž์˜ ๋ณ€ํ™” ๊ฒฝํ—˜
- ๊ด€๊ณ„์˜ ์—ญ๋™์  ๋ณ€ํ™”
3. **์ฃผ์š” ํ”Œ๋กฏ ๋ผ์ธ** (2-3๊ฐœ)
- ๋ฉ”์ธ ํ”Œ๋กฏ: ์ „์ฒด๋ฅผ ๊ด€ํ†ตํ•˜๋Š” ํ•ต์‹ฌ ๊ฐˆ๋“ฑ
- ์„œ๋ธŒ ํ”Œ๋กฏ: ๋ฉ”์ธ๊ณผ ์—ฐ๊ฒฐ๋˜๋ฉฐ ์ฃผ์ œ๋ฅผ ์‹ฌํ™”
4. **์ƒ์ง•์˜ ์ง„ํ™”**
- ํ•ต์‹ฌ ์ƒ์ง• 1-2๊ฐœ ์„ค์ •
- ๋‹จ๊ณ„๋ณ„๋กœ ์˜๋ฏธ๊ฐ€ ๋ณ€ํ™”/์‹ฌํ™”/์ „๋ณต
5. **์‚ฌํšŒ์  ๋งฅ๋ฝ**
- ๊ฐœ์ธ์˜ ๋ฌธ์ œ๊ฐ€ ์‚ฌํšŒ ๊ตฌ์กฐ์™€ ์—ฐ๊ฒฐ
- ๊ตฌ์ฒด์ ์ธ ํ•œ๊ตญ ์‚ฌํšŒ์˜ ํ˜„์‹ค ๋ฐ˜์˜
**์ ˆ๋Œ€ ๊ธˆ์ง€์‚ฌํ•ญ:**
- ๋™์ผํ•œ ์‚ฌ๊ฑด์ด๋‚˜ ์ƒํ™ฉ์˜ ๋ฐ˜๋ณต
- ์ธ๋ฌผ์ด ๊ฐ™์€ ๊ฐ์ •/์ƒ๊ฐ์— ๋จธ๋ฌด๋ฅด๊ธฐ
- ํ”Œ๋กฏ์˜ ๋ฆฌ์…‹์ด๋‚˜ ์ˆœํ™˜ ๊ตฌ์กฐ
- ๊ฐ ๋‹จ๊ณ„๊ฐ€ ๋…๋ฆฝ๋œ ์—ํ”ผ์†Œ๋“œ๋กœ ์กด์žฌ
**๋ถ„๋Ÿ‰ ๊ณ„ํš:**
- ์ด 8,000๋‹จ์–ด ์ด์ƒ
- ๊ฐ ๋‹จ๊ณ„ ํ‰๊ท  800๋‹จ์–ด
- ๊ท ํ˜• ์žกํžŒ ์„œ์‚ฌ ์ „๊ฐœ
ํ•˜๋‚˜์˜ ๊ฐ•๋ ฅํ•œ ์„œ์‚ฌ๊ฐ€ ์‹œ์ž‘๋ถ€ํ„ฐ ๋๊นŒ์ง€ ๊ด€ํ†ตํ•˜๋Š” ์ž‘ํ’ˆ์„ ๊ธฐํšํ•˜์„ธ์š”.""",
"English": f"""You are a master of contemporary literary fiction.
Plan an integrated narrative structure for a novella (8,000+ words), not a collection of short stories.
**Theme:** {user_query}
**Reference:**
{search_results_str if search_results_str else "N/A"}
**Essential Requirements:**
1. **Integrated Narrative Structure (Most Important)**
- Single narrative with 10 organically connected phases
- Each phase naturally follows from previous results
- Accumulation and development, not repetition
Phase Progression:
1) Introduction: Daily life and first crack
2) Development 1: Rising anxiety
3) Development 2: External shock
4) Development 3: Deepening internal conflict
5) Climax 1: Peak crisis
6) Climax 2: Moment of choice
7) Falling Action 1: Direct consequences
8) Falling Action 2: New awareness
9) Resolution 1: Changed daily life
10) Resolution 2: Open questions
2. **Character Transformation Arcs**
- Protagonist: Clear progression from initial โ†’ middle โ†’ final state
- Supporting characters also experience change
- Dynamic relationship evolution
3. **Plot Threads** (2-3)
- Main plot: Core conflict throughout
- Subplots: Connected and deepening themes
4. **Symbolic Evolution**
- 1-2 core symbols
- Meaning transforms across phases
5. **Social Context**
- Individual problems connected to social structures
- Specific contemporary realities
**Absolutely Forbidden:**
- Repetition of same events/situations
- Characters stuck in same emotions
- Plot resets or circular structure
- Independent episodes
**Length Planning:**
- Total 8,000+ words
- ~800 words per phase
- Balanced progression
Create a work with one powerful narrative from beginning to end."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str:
"""๋น„ํ‰๊ฐ€์˜ ๊ฐ๋…์ž ๊ธฐํš ๊ฒ€ํ†  - ์„œ์‚ฌ ํ†ตํ•ฉ์„ฑ ์ค‘์‹ฌ"""
lang_prompts = {
"Korean": f"""๋‹น์‹ ์€ ์„œ์‚ฌ ๊ตฌ์กฐ ์ „๋ฌธ ๋น„ํ‰๊ฐ€์ž…๋‹ˆ๋‹ค.
์ด ๊ธฐํš์ด ์ง„์ •ํ•œ '์žฅํŽธ ์†Œ์„ค'์ธ์ง€ ์—„๊ฒฉํžˆ ๊ฒ€ํ† ํ•˜์„ธ์š”.
**์› ์ฃผ์ œ:** {user_query}
**๊ฐ๋…์ž ๊ธฐํš:**
{director_plan}
**ํ•ต์‹ฌ ๊ฒ€ํ†  ์‚ฌํ•ญ:**
1. **์„œ์‚ฌ์˜ ํ†ตํ•ฉ์„ฑ๊ณผ ์ง„ํ–‰์„ฑ**
- 10๊ฐœ ๋‹จ๊ณ„๊ฐ€ ํ•˜๋‚˜์˜ ์ด์•ผ๊ธฐ๋กœ ์—ฐ๊ฒฐ๋˜๋Š”๊ฐ€?
- ๊ฐ ๋‹จ๊ณ„๊ฐ€ ์ด์ „ ๋‹จ๊ณ„์˜ ํ•„์—ฐ์  ๊ฒฐ๊ณผ์ธ๊ฐ€?
- ๋™์ผํ•œ ์ƒํ™ฉ์˜ ๋ฐ˜๋ณต์€ ์—†๋Š”๊ฐ€?
2. **์ธ๋ฌผ ๋ณ€ํ™”์˜ ๊ถค์ **
- ์ฃผ์ธ๊ณต์ด ๋ช…ํ™•ํ•œ ๋ณ€ํ™”์˜ arc๋ฅผ ๊ฐ€์ง€๋Š”๊ฐ€?
- ๋ณ€ํ™”๊ฐ€ ๊ตฌ์ฒด์ ์ด๊ณ  ์‹ ๋น™์„ฑ ์žˆ๋Š”๊ฐ€?
- ๊ด€๊ณ„์˜ ๋ฐœ์ „์ด ๊ณ„ํš๋˜์–ด ์žˆ๋Š”๊ฐ€?
3. **ํ”Œ๋กฏ์˜ ์ถ•์ ์„ฑ**
- ๊ฐˆ๋“ฑ์ด ์ ์ง„์ ์œผ๋กœ ์‹ฌํ™”๋˜๋Š”๊ฐ€?
- ์ƒˆ๋กœ์šด ์š”์†Œ๊ฐ€ ์ถ”๊ฐ€๋˜๋ฉฐ ๋ณต์žก์„ฑ์ด ์ฆ๊ฐ€ํ•˜๋Š”๊ฐ€?
- ํ•ด๊ฒฐ์ด ์ž์—ฐ์Šค๋Ÿฝ๊ณ  ํ•„์—ฐ์ ์ธ๊ฐ€?
4. **๋ถ„๋Ÿ‰๊ณผ ๋ฐ€๋„**
- 8,000๋‹จ์–ด๋ฅผ ์ฑ„์šธ ์ถฉ๋ถ„ํ•œ ๋‚ด์šฉ์ธ๊ฐ€?
- ๊ฐ ๋‹จ๊ณ„๊ฐ€ 800๋‹จ์–ด์˜ ๋ฐ€๋„๋ฅผ ๊ฐ€์งˆ ์ˆ˜ ์žˆ๋Š”๊ฐ€?
**ํŒ์ •:**
- ํ†ต๊ณผ: ์ง„์ •ํ•œ ์žฅํŽธ ์„œ์‚ฌ ๊ตฌ์กฐ
- ์žฌ์ž‘์„ฑ: ๋ฐ˜๋ณต์ /์ˆœํ™˜์  ๊ตฌ์กฐ
๊ตฌ์ฒด์  ๊ฐœ์„  ๋ฐฉํ–ฅ์„ ์ œ์‹œํ•˜์„ธ์š”.""",
"English": f"""You are a narrative structure critic.
Strictly review whether this plan is a true 'novel' rather than repeated episodes.
**Original Theme:** {user_query}
**Director's Plan:**
{director_plan}
**Key Review Points:**
1. **Narrative Integration and Progression**
- Do 10 phases connect as one story?
- Does each phase necessarily follow from previous?
- No repetition of same situations?
2. **Character Transformation Arcs**
- Clear protagonist transformation arc?
- Concrete and credible changes?
- Planned relationship development?
3. **Plot Accumulation**
- Progressive conflict deepening?
- Added complexity through new elements?
- Natural and inevitable resolution?
4. **Length and Density**
- Sufficient content for 8,000 words?
- Can each phase sustain 800 words?
**Verdict:**
- Pass: True novel structure
- Rewrite: Repetitive/circular structure
Provide specific improvements."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_writer_prompt(self, writer_number: int, director_plan: str,
previous_content: str, phase_requirements: str,
narrative_summary: str, language: str) -> str:
"""์ž‘๊ฐ€ ํ”„๋กฌํ”„ํŠธ - ์„œ์‚ฌ ์ง„ํ–‰ ๊ฐ•์ œ"""
phase_name = NARRATIVE_PHASES[writer_number-1]
target_words = MIN_WORDS_PER_WRITER
lang_prompts = {
"Korean": f"""๋‹น์‹ ์€ ์ž‘๊ฐ€ {writer_number}๋ฒˆ์ž…๋‹ˆ๋‹ค.
**ํ˜„์žฌ ๋‹จ๊ณ„: {phase_name}**
**์ „์ฒด ์„œ์‚ฌ ๊ตฌ์กฐ:**
{director_plan}
**์ง€๊ธˆ๊นŒ์ง€์˜ ์ด์•ผ๊ธฐ ์š”์•ฝ:**
{narrative_summary}
**์ด์ „ ๋‚ด์šฉ (์ง์ „ ๋ถ€๋ถ„):**
{previous_content[-1500:] if previous_content else "์‹œ์ž‘"}
**์ด๋ฒˆ ๋‹จ๊ณ„ ํ•„์ˆ˜ ์š”๊ตฌ์‚ฌํ•ญ:**
{phase_requirements}
**์ž‘์„ฑ ์ง€์นจ:**
1. **๋ถ„๋Ÿ‰**: {target_words}-900 ๋‹จ์–ด (ํ•„์ˆ˜)
- ๋‚ด๋ฉด ๋ฌ˜์‚ฌ์™€ ๊ตฌ์ฒด์  ๋””ํ…Œ์ผ๋กœ ๋ถ„๋Ÿ‰ ํ™•๋ณด
- ์žฅ๋ฉด์„ ์ถฉ๋ถ„ํžˆ ์ „๊ฐœํ•˜๊ณ  ๊นŠ์ด ์žˆ๊ฒŒ ๋ฌ˜์‚ฌ
2. **์„œ์‚ฌ ์ง„ํ–‰ (๊ฐ€์žฅ ์ค‘์š”)**
- ์ด์ „ ๋‹จ๊ณ„์—์„œ ์ผ์–ด๋‚œ ์ผ์˜ ์ง์ ‘์  ๊ฒฐ๊ณผ๋กœ ์‹œ์ž‘
- ์ƒˆ๋กœ์šด ์‚ฌ๊ฑด/์ธ์‹/๋ณ€ํ™”๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ์ด์•ผ๊ธฐ ์ „์ง„
- ๋‹ค์Œ ๋‹จ๊ณ„๋กœ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ์—ฐ๊ฒฐ๋  ๊ณ ๋ฆฌ ๋งˆ๋ จ
3. **์ธ๋ฌผ์˜ ๋ณ€ํ™”**
- ์ด ๋‹จ๊ณ„์—์„œ ์ธ๋ฌผ์ด ๊ฒช๋Š” ๊ตฌ์ฒด์  ๋ณ€ํ™” ๋ฌ˜์‚ฌ
- ๋‚ด๋ฉด์˜ ๋ฏธ๋ฌ˜ํ•œ ๋ณ€ํ™”๋„ ํฌ์ฐฉ
- ๊ด€๊ณ„์˜ ์—ญํ•™ ๋ณ€ํ™” ๋ฐ˜์˜
4. **๋ฌธ์ฒด์™€ ๊ธฐ๋ฒ•**
- ํ•œ๊ตญ ํ˜„๋Œ€ ๋ฌธํ•™์˜ ์„ฌ์„ธํ•œ ์‹ฌ๋ฆฌ ๋ฌ˜์‚ฌ
- ์ผ์ƒ ์† ์‚ฌํšŒ์  ๋งฅ๋ฝ ๋…น์—ฌ๋‚ด๊ธฐ
- ๊ฐ๊ฐ์  ๋””ํ…Œ์ผ๊ณผ ๋‚ด๋ฉด ์˜์‹์˜ ๊ท ํ˜•
5. **์—ฐ์†์„ฑ ์œ ์ง€**
- ์ธ๋ฌผ์˜ ๋ชฉ์†Œ๋ฆฌ์™€ ๋งํˆฌ ์ผ๊ด€์„ฑ
- ๊ณต๊ฐ„๊ณผ ์‹œ๊ฐ„์˜ ์—ฐ์†์„ฑ
- ์ƒ์ง•๊ณผ ๋ชจํ‹ฐํ”„์˜ ๋ฐœ์ „
**์ ˆ๋Œ€ ๊ธˆ์ง€:**
- ์ด์ „๊ณผ ๋™์ผํ•œ ์ƒํ™ฉ ๋ฐ˜๋ณต
- ์„œ์‚ฌ์˜ ์ •์ฒด๋‚˜ ํ›„ํ‡ด
- ๋ถ„๋Ÿ‰ ๋ฏธ๋‹ฌ (์ตœ์†Œ {target_words}๋‹จ์–ด)
์ด์ „์˜ ํ๋ฆ„์„ ์ด์–ด๋ฐ›์•„ ์ƒˆ๋กœ์šด ๊ตญ๋ฉด์œผ๋กœ ๋ฐœ์ „์‹œํ‚ค์„ธ์š”.""",
"English": f"""You are Writer #{writer_number}.
**Current Phase: {phase_name}**
**Overall Narrative Structure:**
{director_plan}
**Story So Far:**
{narrative_summary}
**Previous Content (immediately before):**
{previous_content[-1500:] if previous_content else "Beginning"}
**Phase Requirements:**
{phase_requirements}
**Writing Guidelines:**
1. **Length**: {target_words}-900 words (mandatory)
- Use interior description and concrete details
- Fully develop scenes with depth
2. **Narrative Progression (Most Important)**
- Start as direct result of previous phase
- Add new events/awareness/changes to advance story
- Create natural connection to next phase
3. **Character Change**
- Concrete changes in this phase
- Capture subtle interior shifts
- Reflect relationship dynamics
4. **Style and Technique**
- Delicate psychological portrayal
- Social context in daily life
- Balance sensory details with consciousness
5. **Continuity**
- Consistent character voices
- Spatial/temporal continuity
- Symbol/motif development
**Absolutely Forbidden:**
- Repeating previous situations
- Narrative stagnation/regression
- Under word count (minimum {target_words})
Continue the flow and develop into new phase."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_critic_consistency_prompt(self, all_content: str,
narrative_tracker: ProgressiveNarrativeTracker,
user_query: str, language: str) -> str:
"""๋น„ํ‰๊ฐ€ ์ค‘๊ฐ„ ๊ฒ€ํ†  - ์„œ์‚ฌ ๋ˆ„์ ์„ฑ ํ™•์ธ"""
# ์„œ์‚ฌ ์ง„ํ–‰ ์ฒดํฌ
phase_count = len(narrative_tracker.phase_summaries)
progression_ok, issues = narrative_tracker.check_narrative_progression(phase_count)
lang_prompts = {
"Korean": f"""์„œ์‚ฌ ์ง„ํ–‰ ์ „๋ฌธ ๋น„ํ‰๊ฐ€๋กœ์„œ ์ž‘ํ’ˆ์„ ๊ฒ€ํ† ํ•˜์„ธ์š”.
**์› ์ฃผ์ œ:** {user_query}
**ํ˜„์žฌ๊นŒ์ง€ ์ง„ํ–‰๋œ ์„œ์‚ฌ ๋‹จ๊ณ„:** {phase_count}/10
**๋ฐœ๊ฒฌ๋œ ์ง„ํ–‰ ๋ฌธ์ œ:**
{chr(10).join(issues) if issues else "์—†์Œ"}
**์ž‘ํ’ˆ ๋‚ด์šฉ (์ตœ๊ทผ ๋ถ€๋ถ„):**
{all_content[-4000:]}
**์ง‘์ค‘ ๊ฒ€ํ†  ์‚ฌํ•ญ:**
1. **์„œ์‚ฌ์˜ ์ถ•์ ๊ณผ ์ง„ํ–‰**
- ์ด์•ผ๊ธฐ๊ฐ€ ์‹ค์ œ๋กœ ์ „์ง„ํ•˜๊ณ  ์žˆ๋Š”๊ฐ€?
- ๊ฐ ๋‹จ๊ณ„๊ฐ€ ์ด์ „์˜ ๊ฒฐ๊ณผ๋กœ ์—ฐ๊ฒฐ๋˜๋Š”๊ฐ€?
- ๋™์ผํ•œ ๊ฐˆ๋“ฑ์ด๋‚˜ ์ƒํ™ฉ์ด ๋ฐ˜๋ณต๋˜์ง€ ์•Š๋Š”๊ฐ€?
2. **์ธ๋ฌผ์˜ ๋ณ€ํ™” ๊ถค์ **
- ์ฃผ์ธ๊ณต์ด ์ดˆ๊ธฐ์™€ ๋น„๊ตํ•ด ์–ด๋–ป๊ฒŒ ๋ณ€ํ–ˆ๋Š”๊ฐ€?
- ๋ณ€ํ™”๊ฐ€ ์„ค๋“๋ ฅ ์žˆ๊ณ  ์ ์ง„์ ์ธ๊ฐ€?
- ๊ด€๊ณ„๊ฐ€ ์—ญ๋™์ ์œผ๋กœ ๋ฐœ์ „ํ•˜๋Š”๊ฐ€?
3. **์ฃผ์ œ์˜ ์‹ฌํ™”**
- ์ดˆ๊ธฐ ์ฃผ์ œ๊ฐ€ ์–ด๋–ป๊ฒŒ ๋ฐœ์ „ํ–ˆ๋Š”๊ฐ€?
- ์ƒˆ๋กœ์šด ์ธต์œ„๊ฐ€ ์ถ”๊ฐ€๋˜์—ˆ๋Š”๊ฐ€?
- ๋ณต์žก์„ฑ์ด ์ฆ๊ฐ€ํ–ˆ๋Š”๊ฐ€?
4. **๋ถ„๋Ÿ‰๊ณผ ๋ฐ€๋„**
- ํ˜„์žฌ๊นŒ์ง€ ์ด ๋‹จ์–ด ์ˆ˜ ํ™•์ธ
- ๋ชฉํ‘œ(8,000๋‹จ์–ด)์— ๋„๋‹ฌ ๊ฐ€๋Šฅํ•œ๊ฐ€?
**์ˆ˜์ • ์ง€์‹œ:**
๊ฐ ์ž‘๊ฐ€์—๊ฒŒ ๊ตฌ์ฒด์ ์ธ ์ง„ํ–‰ ๋ฐฉํ–ฅ ์ œ์‹œ.""",
"English": f"""As a narrative progression critic, review the work.
**Original Theme:** {user_query}
**Narrative Phases Completed:** {phase_count}/10
**Detected Progression Issues:**
{chr(10).join(issues) if issues else "None"}
**Work Content (recent):**
{all_content[-4000:]}
**Focus Review Areas:**
1. **Narrative Accumulation and Progress**
- Is story actually moving forward?
- Does each phase connect as result of previous?
- No repetition of same conflicts/situations?
2. **Character Transformation Arcs**
- How has protagonist changed from beginning?
- Are changes credible and gradual?
- Dynamic relationship development?
3. **Thematic Deepening**
- How has initial theme developed?
- New layers added?
- Increased complexity?
4. **Length and Density**
- Current total word count
- Can reach 8,000 word target?
**Revision Instructions:**
Specific progression directions for each writer."""
}
return lang_prompts.get(language, lang_prompts["Korean"])
def create_writer_revision_prompt(self, writer_number: int, initial_content: str,
critic_feedback: str, language: str) -> str:
"""์ž‘๊ฐ€ ์ˆ˜์ • ํ”„๋กฌํ”„ํŠธ"""
target_words = MIN_WORDS_PER_WRITER
return f"""์ž‘๊ฐ€ {writer_number}๋ฒˆ, ๋น„ํ‰์„ ๋ฐ˜์˜ํ•˜์—ฌ ์ˆ˜์ •ํ•˜์„ธ์š”.
**์ดˆ์•ˆ:**
{initial_content}
**๋น„ํ‰ ํ”ผ๋“œ๋ฐฑ:**
{critic_feedback}
**์ˆ˜์ • ํ•ต์‹ฌ:**
1. ์„œ์‚ฌ ์ง„ํ–‰์„ฑ ๊ฐ•ํ™” - ๋ฐ˜๋ณต ์ œ๊ฑฐ, ์ƒˆ๋กœ์šด ์ „๊ฐœ ์ถ”๊ฐ€
2. ์ธ๋ฌผ ๋ณ€ํ™” ๊ตฌ์ฒดํ™” - ์ด์ „๊ณผ ๋‹ฌ๋ผ์ง„ ๋ชจ์Šต ๋ช…ํ™•ํžˆ
3. ๋ถ„๋Ÿ‰ ํ™•๋ณด - ์ตœ์†Œ {target_words}๋‹จ์–ด ์œ ์ง€
4. ๋‚ด๋ฉด ๋ฌ˜์‚ฌ์™€ ์‚ฌํšŒ์  ๋งฅ๋ฝ ์‹ฌํ™”
์ „๋ฉด ์žฌ์ž‘์„ฑ์ด ํ•„์š”ํ•˜๋ฉด ๊ณผ๊ฐํžˆ ์ˆ˜์ •ํ•˜์„ธ์š”.
์ˆ˜์ •๋ณธ๋งŒ ์ œ์‹œํ•˜์„ธ์š”."""
def create_critic_final_prompt(self, complete_novel: str, word_count: int, language: str) -> str:
"""์ตœ์ข… ๋น„ํ‰ - AGI ํ‰๊ฐ€ ๊ธฐ์ค€"""
return f"""์™„์„ฑ๋œ ์†Œ์„ค์„ AGI ํŠœ๋งํ…Œ์ŠคํŠธ ๊ธฐ์ค€์œผ๋กœ ํ‰๊ฐ€ํ•˜์„ธ์š”.
**์ž‘ํ’ˆ ์ •๋ณด:**
- ์ด ๋ถ„๋Ÿ‰: {word_count}๋‹จ์–ด
- ๋ชฉํ‘œ ๋ถ„๋Ÿ‰: 8,000๋‹จ์–ด ์ด์ƒ
**์ž‘ํ’ˆ (๋งˆ์ง€๋ง‰ ๋ถ€๋ถ„):**
{complete_novel[-3000:]}
**ํ‰๊ฐ€ ๊ธฐ์ค€ (AGI ํŠœ๋งํ…Œ์ŠคํŠธ):**
1. **์žฅํŽธ์†Œ์„ค๋กœ์„œ์˜ ์™„์„ฑ๋„ (40์ )**
- ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ (๋ฐ˜๋ณต ์—†์Œ)
- ์ธ๋ฌผ์˜ ๋ช…ํ™•ํ•œ ๋ณ€ํ™” arc
- ํ”Œ๋กฏ์˜ ์ถ•์ ๊ณผ ํ•ด๊ฒฐ
- 8,000๋‹จ์–ด ์ด์ƒ ๋ถ„๋Ÿ‰
2. **๋ฌธํ•™์  ์„ฑ์ทจ (30์ )**
- ์ฃผ์ œ ์˜์‹์˜ ๊นŠ์ด
- ์ธ๋ฌผ ์‹ฌ๋ฆฌ์˜ ์„ค๋“๋ ฅ
- ๋ฌธ์ฒด์˜ ์ผ๊ด€์„ฑ๊ณผ ์•„๋ฆ„๋‹ค์›€
- ์ƒ์ง•๊ณผ ์€์œ ์˜ ํšจ๊ณผ
3. **์‚ฌํšŒ์  ํ†ต์ฐฐ (20์ )**
- ํ˜„๋Œ€ ์‚ฌํšŒ ๋ฌธ์ œ ํฌ์ฐฉ
- ๊ฐœ์ธ๊ณผ ๊ตฌ์กฐ์˜ ์—ฐ๊ฒฐ
- ๋ณดํŽธ์„ฑ๊ณผ ํŠน์ˆ˜์„ฑ ๊ท ํ˜•
4. **๋…์ฐฝ์„ฑ๊ณผ ์ธ๊ฐ„์„ฑ (10์ )**
- AI๊ฐ€ ์•„๋‹Œ ์ธ๊ฐ„ ์ž‘๊ฐ€์˜ ๋А๋‚Œ
- ๋…์ฐฝ์  ํ‘œํ˜„๊ณผ ํ†ต์ฐฐ
- ๊ฐ์ •์  ์ง„์ •์„ฑ
**์ด์ : /100์ **
ํŠนํžˆ '๋ฐ˜๋ณต ๊ตฌ์กฐ' ๋ฌธ์ œ๊ฐ€ ์žˆ์—ˆ๋Š”์ง€ ์—„๊ฒฉํžˆ ํ‰๊ฐ€ํ•˜์„ธ์š”."""
# --- LLM ํ˜ธ์ถœ ํ•จ์ˆ˜๋“ค ---
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str:
full_content = ""
for chunk in self.call_llm_streaming(messages, role, language):
full_content += chunk
if full_content.startswith("โŒ"):
raise Exception(f"LLM Call Failed: {full_content}")
return full_content
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]:
try:
system_prompts = self.get_system_prompts(language)
full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages]
# ์ž‘๊ฐ€ ์—ญํ• ์ผ ๋•Œ๋Š” ๋” ๋งŽ์€ ํ† ํฐ ํ—ˆ์šฉ
max_tokens = 15000 if role.startswith("writer") else 10000
payload = {
"model": self.model_id,
"messages": full_messages,
"max_tokens": max_tokens,
"temperature": 0.8,
"top_p": 0.95,
"presence_penalty": 0.5,
"frequency_penalty": 0.3,
"stream": True
}
response = requests.post(
self.api_url,
headers=self.create_headers(),
json=payload,
stream=True,
timeout=180
)
if response.status_code != 200:
yield f"โŒ API ์˜ค๋ฅ˜ (์ƒํƒœ ์ฝ”๋“œ: {response.status_code})"
return
buffer = ""
for line in response.iter_lines():
if not line:
continue
try:
line_str = line.decode('utf-8').strip()
if not line_str.startswith("data: "):
continue
data_str = line_str[6:]
if data_str == "[DONE]":
break
data = json.loads(data_str)
choices = data.get("choices", [])
if choices and choices[0].get("delta", {}).get("content"):
content = choices[0]["delta"]["content"]
buffer += content
if len(buffer) >= 50 or '\n' in buffer:
yield buffer
buffer = ""
time.sleep(0.01)
except Exception as e:
logger.error(f"์ฒญํฌ ์ฒ˜๋ฆฌ ์˜ค๋ฅ˜: {str(e)}")
continue
if buffer:
yield buffer
except Exception as e:
logger.error(f"์ŠคํŠธ๋ฆฌ๋ฐ ์˜ค๋ฅ˜: {type(e).__name__}: {str(e)}")
yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
def get_system_prompts(self, language: str) -> Dict[str, str]:
"""์—ญํ• ๋ณ„ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ"""
base_prompts = {
"Korean": {
"director": """๋‹น์‹ ์€ ํ•œ๊ตญ ํ˜„๋Œ€ ๋ฌธํ•™์˜ ๊ฑฐ์žฅ์ž…๋‹ˆ๋‹ค.
๋ฐ˜๋ณต์ด ์•„๋‹Œ ์ง„ํ–‰, ์ˆœํ™˜์ด ์•„๋‹Œ ๋ฐœ์ „์„ ํ†ตํ•ด ํ•˜๋‚˜์˜ ๊ฐ•๋ ฅํ•œ ์„œ์‚ฌ๋ฅผ ๊ตฌ์ถ•ํ•˜์„ธ์š”.
๊ฐœ์ธ์˜ ๋ฌธ์ œ๋ฅผ ์‚ฌํšŒ ๊ตฌ์กฐ์™€ ์—ฐ๊ฒฐํ•˜๋ฉฐ, ์ธ๋ฌผ์˜ ์ง„์ •ํ•œ ๋ณ€ํ™”๋ฅผ ๊ทธ๋ ค๋‚ด์„ธ์š”.""",
"critic": """๋‹น์‹ ์€ ์—„๊ฒฉํ•œ ๋ฌธํ•™ ๋น„ํ‰๊ฐ€์ž…๋‹ˆ๋‹ค.
ํŠนํžˆ '๋ฐ˜๋ณต ๊ตฌ์กฐ'์™€ '์„œ์‚ฌ ์ •์ฒด'๋ฅผ ์ฒ ์ €ํžˆ ๊ฐ์‹œํ•˜์„ธ์š”.
์ž‘ํ’ˆ์ด ์ง„์ •ํ•œ ์žฅํŽธ์†Œ์„ค์ธ์ง€, ์•„๋‹ˆ๋ฉด ๋ฐ˜๋ณต๋˜๋Š” ๋‹จํŽธ์˜ ์ง‘ํ•ฉ์ธ์ง€ ๊ตฌ๋ณ„ํ•˜์„ธ์š”.""",
"writer_base": """๋‹น์‹ ์€ ํ˜„๋Œ€ ํ•œ๊ตญ ๋ฌธํ•™ ์ž‘๊ฐ€์ž…๋‹ˆ๋‹ค.
์ด์ „ ๋‹จ๊ณ„์˜ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ›์•„ ์ƒˆ๋กœ์šด ๊ตญ๋ฉด์œผ๋กœ ๋ฐœ์ „์‹œํ‚ค์„ธ์š”.
์ตœ์†Œ 800๋‹จ์–ด๋ฅผ ์ž‘์„ฑํ•˜๋ฉฐ, ๋‚ด๋ฉด๊ณผ ์‚ฌํšŒ๋ฅผ ๋™์‹œ์— ํฌ์ฐฉํ•˜์„ธ์š”.
์ ˆ๋Œ€ ์ด์ „๊ณผ ๊ฐ™์€ ์ƒํ™ฉ์„ ๋ฐ˜๋ณตํ•˜์ง€ ๋งˆ์„ธ์š”."""
},
"English": {
"director": """You are a master of contemporary literary fiction.
Build one powerful narrative through progression not repetition, development not cycles.
Connect individual problems to social structures while depicting genuine character transformation.""",
"critic": """You are a strict literary critic.
Vigilantly monitor for 'repetitive structure' and 'narrative stagnation'.
Distinguish whether this is a true novel or a collection of repeated episodes.""",
"writer_base": """You are a contemporary literary writer.
Take results from previous phase and develop into new territory.
Write minimum 800 words, capturing both interior and society.
Never repeat previous situations."""
}
}
prompts = base_prompts.get(language, base_prompts["Korean"]).copy()
# ํŠน์ˆ˜ ์ž‘๊ฐ€ ํ”„๋กฌํ”„ํŠธ
for i in range(1, 11):
prompts[f"writer{i}"] = prompts["writer_base"]
return prompts
# --- ๋ฉ”์ธ ํ”„๋กœ์„ธ์Šค ---
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]:
"""์†Œ์„ค ์ƒ์„ฑ ํ”„๋กœ์„ธ์Šค"""
try:
resume_from_stage = 0
if session_id:
self.current_session_id = session_id
session = NovelDatabase.get_session(session_id)
if session:
query = session['user_query']
language = session['language']
resume_from_stage = session['current_stage'] + 1
# ์„œ์‚ฌ ์ถ”์ ๊ธฐ ๋ณต์›
saved_tracker = NovelDatabase.load_narrative_tracker(session_id)
if saved_tracker:
self.narrative_tracker = saved_tracker
else:
self.current_session_id = NovelDatabase.create_session(query, language)
logger.info(f"Created new session: {self.current_session_id}")
stages = []
if resume_from_stage > 0:
stages = [{
"name": s['stage_name'],
"status": s['status'],
"content": s.get('content', ''),
"word_count": s.get('word_count', 0),
"progression_score": s.get('progression_score', 0.0)
} for s in NovelDatabase.get_stages(self.current_session_id)]
# ์ด ๋‹จ์–ด ์ˆ˜ ์ถ”์ 
total_words = NovelDatabase.get_total_words(self.current_session_id)
for stage_idx in range(resume_from_stage, len(PROGRESSIVE_STAGES)):
role, stage_name = PROGRESSIVE_STAGES[stage_idx]
if stage_idx >= len(stages):
stages.append({
"name": stage_name,
"status": "active",
"content": "",
"word_count": 0,
"progression_score": 0.0
})
else:
stages[stage_idx]["status"] = "active"
yield f"๐Ÿ”„ ์ง„ํ–‰ ์ค‘... (ํ˜„์žฌ {total_words:,}๋‹จ์–ด)", stages, self.current_session_id
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages)
stage_content = ""
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language):
stage_content += chunk
stages[stage_idx]["content"] = stage_content
stages[stage_idx]["word_count"] = len(stage_content.split())
yield f"๐Ÿ”„ {stage_name} ์ž‘์„ฑ ์ค‘... ({total_words + stages[stage_idx]['word_count']:,}๋‹จ์–ด)", stages, self.current_session_id
# ์ง„ํ–‰๋„ ํ‰๊ฐ€
if role.startswith("writer"):
writer_num = int(re.search(r'\d+', role).group())
progression_score = self.evaluate_progression(stage_content, writer_num)
stages[stage_idx]["progression_score"] = progression_score
# ์„œ์‚ฌ ์ถ”์ ๊ธฐ ์—…๋ฐ์ดํŠธ
self.update_narrative_tracker(stage_content, writer_num)
stages[stage_idx]["status"] = "complete"
NovelDatabase.save_stage(
self.current_session_id, stage_idx, stage_name, role,
stage_content, "complete", stages[stage_idx].get("progression_score", 0.0)
)
# ์„œ์‚ฌ ์ถ”์ ๊ธฐ ์ €์žฅ
NovelDatabase.save_narrative_tracker(self.current_session_id, self.narrative_tracker)
# ์ด ๋‹จ์–ด ์ˆ˜ ์—…๋ฐ์ดํŠธ
total_words = NovelDatabase.get_total_words(self.current_session_id)
yield f"โœ… {stage_name} ์™„๋ฃŒ (์ด {total_words:,}๋‹จ์–ด)", stages, self.current_session_id
# ์ตœ์ข… ์†Œ์„ค ์ •๋ฆฌ
final_novel = NovelDatabase.get_writer_content(self.current_session_id)
final_word_count = len(final_novel.split())
final_report = self.generate_literary_report(final_novel, final_word_count, language)
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report)
yield f"โœ… ์†Œ์„ค ์™„์„ฑ! ์ด {final_word_count:,}๋‹จ์–ด (๋ชฉํ‘œ: {TARGET_WORDS:,}๋‹จ์–ด)", stages, self.current_session_id
except Exception as e:
logger.error(f"์†Œ์„ค ์ƒ์„ฑ ํ”„๋กœ์„ธ์Šค ์˜ค๋ฅ˜: {e}", exc_info=True)
yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {e}", stages if 'stages' in locals() else [], self.current_session_id
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str:
"""๋‹จ๊ณ„๋ณ„ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
if stage_idx == 0:
return self.create_director_initial_prompt(query, language)
if stage_idx == 1:
return self.create_critic_director_prompt(stages[0]["content"], query, language)
if stage_idx == 2:
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language)
master_plan = stages[2]["content"]
if 3 <= stage_idx <= 12: # ์ž‘๊ฐ€ ์ดˆ์•ˆ
writer_num = stage_idx - 2
previous_content = self.get_previous_writer_content(stages, writer_num)
phase_requirements = self.narrative_tracker.generate_phase_requirements(writer_num)
narrative_summary = self.generate_narrative_summary(stages, writer_num)
return self.create_writer_prompt(
writer_num, master_plan, previous_content,
phase_requirements, narrative_summary, language
)
if stage_idx == 13: # ๋น„ํ‰๊ฐ€ ์ค‘๊ฐ„ ๊ฒ€ํ† 
all_content = self.get_all_writer_content(stages, 12)
return self.create_critic_consistency_prompt(
all_content, self.narrative_tracker, query, language
)
if 14 <= stage_idx <= 23: # ์ž‘๊ฐ€ ์ˆ˜์ •
writer_num = stage_idx - 13
initial_content = stages[2 + writer_num]["content"]
feedback = stages[13]["content"]
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language)
if stage_idx == 24: # ์ตœ์ข… ๊ฒ€ํ† 
complete_novel = self.get_all_writer_content(stages, 23)
word_count = len(complete_novel.split())
return self.create_critic_final_prompt(complete_novel, word_count, language)
return ""
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str:
"""๊ฐ๋…์ž ์ˆ˜์ • ํ”„๋กฌํ”„ํŠธ"""
return f"""๋น„ํ‰์„ ๋ฐ˜์˜ํ•˜์—ฌ ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ๋ฅผ ์™„์„ฑํ•˜์„ธ์š”.
**์› ์ฃผ์ œ:** {user_query}
**์ดˆ๊ธฐ ๊ธฐํš:**
{initial_plan}
**๋น„ํ‰:**
{critic_feedback}
**ํ•ต์‹ฌ ์ˆ˜์ • ์‚ฌํ•ญ:**
1. ๋ฐ˜๋ณต ๊ตฌ์กฐ ์™„์ „ ์ œ๊ฑฐ
2. 10๋‹จ๊ณ„๊ฐ€ ํ•˜๋‚˜์˜ ์ด์•ผ๊ธฐ๋กœ ์—ฐ๊ฒฐ
3. ์ธ๋ฌผ์˜ ๋ช…ํ™•ํ•œ ๋ณ€ํ™” ๊ถค์ 
4. 8,000๋‹จ์–ด ๋ถ„๋Ÿ‰ ๊ณ„ํš
๊ฐ ๋‹จ๊ณ„๊ฐ€ ์ด์ „์˜ ํ•„์—ฐ์  ๊ฒฐ๊ณผ๊ฐ€ ๋˜๋„๋ก ์ˆ˜์ •ํ•˜์„ธ์š”."""
def get_previous_writer_content(self, stages: List[Dict], current_writer: int) -> str:
"""์ด์ „ ์ž‘๊ฐ€์˜ ๋‚ด์šฉ ๊ฐ€์ ธ์˜ค๊ธฐ"""
if current_writer == 1:
return ""
# ๋ฐ”๋กœ ์ด์ „ ์ž‘๊ฐ€์˜ ๋‚ด์šฉ
prev_idx = current_writer + 1 # stages ์ธ๋ฑ์Šค๋Š” writer_num + 2
if prev_idx < len(stages) and stages[prev_idx]["content"]:
return stages[prev_idx]["content"]
return ""
def get_all_writer_content(self, stages: List[Dict], up_to_stage: int) -> str:
"""ํŠน์ • ๋‹จ๊ณ„๊นŒ์ง€์˜ ๋ชจ๋“  ์ž‘๊ฐ€ ๋‚ด์šฉ"""
contents = []
for i, s in enumerate(stages):
if i <= up_to_stage and "writer" in s.get("name", "") and s["content"]:
contents.append(s["content"])
return "\n\n".join(contents)
def generate_narrative_summary(self, stages: List[Dict], up_to_writer: int) -> str:
"""ํ˜„์žฌ๊นŒ์ง€์˜ ์„œ์‚ฌ ์š”์•ฝ"""
if up_to_writer == 1:
return "์ฒซ ์‹œ์ž‘์ž…๋‹ˆ๋‹ค."
summary_parts = []
for i in range(1, up_to_writer):
if i in self.narrative_tracker.phase_summaries:
summary_parts.append(f"[{NARRATIVE_PHASES[i-1]}]: {self.narrative_tracker.phase_summaries[i]}")
return "\n".join(summary_parts) if summary_parts else "์ด์ „ ๋‚ด์šฉ์„ ์ด์–ด๋ฐ›์•„ ์ง„ํ–‰ํ•˜์„ธ์š”."
def update_narrative_tracker(self, content: str, writer_num: int):
"""์„œ์‚ฌ ์ถ”์ ๊ธฐ ์—…๋ฐ์ดํŠธ"""
# ๊ฐ„๋‹จํ•œ ์š”์•ฝ ์ƒ์„ฑ (์‹ค์ œ๋กœ๋Š” ๋” ์ •๊ตํ•œ ๋ถ„์„ ํ•„์š”)
lines = content.split('\n')
key_events = [line.strip() for line in lines if len(line.strip()) > 50][:3]
if key_events:
summary = " ".join(key_events[:2])[:200] + "..."
self.narrative_tracker.phase_summaries[writer_num] = summary
def evaluate_progression(self, content: str, phase: int) -> float:
"""์„œ์‚ฌ ์ง„ํ–‰๋„ ํ‰๊ฐ€"""
score = 5.0
# ๋ถ„๋Ÿ‰ ์ฒดํฌ
word_count = len(content.split())
if word_count >= MIN_WORDS_PER_WRITER:
score += 2.0
# ์ƒˆ๋กœ์šด ์š”์†Œ ์ฒดํฌ
if phase > 1:
prev_summary = self.narrative_tracker.phase_summaries.get(phase-1, "")
if prev_summary and len(set(content.split()) - set(prev_summary.split())) > 100:
score += 1.5
# ๋ณ€ํ™” ์–ธ๊ธ‰ ์ฒดํฌ
change_keywords = ['๋ณ€ํ–ˆ', '๋‹ฌ๋ผ์กŒ', '์ƒˆ๋กœ์šด', '์ด์ œ๋Š”', '๋” ์ด์ƒ',
'changed', 'different', 'new', 'now', 'no longer']
if any(keyword in content for keyword in change_keywords):
score += 1.5
return min(10.0, score)
def generate_literary_report(self, complete_novel: str, word_count: int, language: str) -> str:
"""์ตœ์ข… ๋ฌธํ•™์  ํ‰๊ฐ€"""
prompt = self.create_critic_final_prompt(complete_novel, word_count, language)
try:
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language)
return report
except Exception as e:
logger.error(f"์ตœ์ข… ๋ณด๊ณ ์„œ ์ƒ์„ฑ ์‹คํŒจ: {e}")
return "๋ณด๊ณ ์„œ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ"
# --- ์œ ํ‹ธ๋ฆฌํ‹ฐ ํ•จ์ˆ˜๋“ค ---
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]:
"""๋ฉ”์ธ ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜"""
if not query.strip():
yield "", "", "โŒ ์ฃผ์ œ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.", session_id
return
system = ProgressiveLiterarySystem()
stages_markdown = ""
novel_content = ""
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id):
stages_markdown = format_stages_display(stages)
# ์ตœ์ข… ์†Œ์„ค ๋‚ด์šฉ ๊ฐ€์ ธ์˜ค๊ธฐ
if stages and all(s.get("status") == "complete" for s in stages[-10:]):
novel_content = NovelDatabase.get_writer_content(current_session_id)
novel_content = format_novel_display(novel_content)
yield stages_markdown, novel_content, status or "๐Ÿ”„ ์ฒ˜๋ฆฌ ์ค‘...", current_session_id
def get_active_sessions(language: str) -> List[str]:
"""ํ™œ์„ฑ ์„ธ์…˜ ๋ชฉ๋ก"""
sessions = NovelDatabase.get_active_sessions()
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']}) [{s['total_words']:,}๋‹จ์–ด]"
for s in sessions]
def auto_recover_session(language: str) -> Tuple[Optional[str], str]:
"""์ตœ๊ทผ ์„ธ์…˜ ์ž๋™ ๋ณต๊ตฌ"""
sessions = NovelDatabase.get_active_sessions()
if sessions:
latest_session = sessions[0]
return latest_session['session_id'], f"์„ธ์…˜ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ"
return None, "๋ณต๊ตฌํ•  ์„ธ์…˜์ด ์—†์Šต๋‹ˆ๋‹ค."
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]:
"""์„ธ์…˜ ์žฌ๊ฐœ"""
if not session_id:
yield "", "", "โŒ ์„ธ์…˜ ID๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", session_id
return
if "..." in session_id:
session_id = session_id.split("...")[0]
session = NovelDatabase.get_session(session_id)
if not session:
yield "", "", "โŒ ์„ธ์…˜์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", None
return
yield from process_query(session['user_query'], session['language'], session_id)
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]:
"""์†Œ์„ค ๋‹ค์šด๋กœ๋“œ ํŒŒ์ผ ์ƒ์„ฑ"""
if not novel_text or not session_id:
return None
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"novel_{session_id[:8]}_{timestamp}"
try:
if format_type == "DOCX" and DOCX_AVAILABLE:
return export_to_docx(novel_text, filename, language, session_id)
else:
return export_to_txt(novel_text, filename)
except Exception as e:
logger.error(f"ํŒŒ์ผ ์ƒ์„ฑ ์‹คํŒจ: {e}")
return None
def format_stages_display(stages: List[Dict]) -> str:
"""๋‹จ๊ณ„๋ณ„ ์ง„ํ–‰ ์ƒํ™ฉ ํ‘œ์‹œ"""
markdown = "## ๐ŸŽฌ ์ง„ํ–‰ ์ƒํ™ฉ\n\n"
# ์ด ๋‹จ์–ด ์ˆ˜ ๊ณ„์‚ฐ
total_words = sum(s.get('word_count', 0) for s in stages if 'writer' in s.get('name', ''))
markdown += f"**์ด ๋‹จ์–ด ์ˆ˜: {total_words:,} / {TARGET_WORDS:,}**\n\n"
for i, stage in enumerate(stages):
status_icon = "โœ…" if stage['status'] == 'complete' else "๐Ÿ”„" if stage['status'] == 'active' else "โณ"
markdown += f"{status_icon} **{stage['name']}**"
if stage.get('word_count', 0) > 0:
markdown += f" ({stage['word_count']:,}๋‹จ์–ด)"
if stage.get('progression_score', 0) > 0:
markdown += f" [์ง„ํ–‰๋„: {stage['progression_score']:.1f}/10]"
markdown += "\n"
if stage['content']:
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content']
markdown += f"> {preview}\n\n"
return markdown
def format_novel_display(novel_text: str) -> str:
"""์†Œ์„ค ๋‚ด์šฉ ํ‘œ์‹œ"""
if not novel_text:
return "์•„์ง ์™„์„ฑ๋œ ๋‚ด์šฉ์ด ์—†์Šต๋‹ˆ๋‹ค."
formatted = "# ๐Ÿ“– ์™„์„ฑ๋œ ์†Œ์„ค\n\n"
# ๋‹จ์–ด ์ˆ˜ ํ‘œ์‹œ
word_count = len(novel_text.split())
formatted += f"**์ด ๋ถ„๋Ÿ‰: {word_count:,}๋‹จ์–ด (๋ชฉํ‘œ: {TARGET_WORDS:,}๋‹จ์–ด)**\n\n"
formatted += "---\n\n"
# ๊ฐ ๋‹จ๊ณ„๋ฅผ ๊ตฌ๋ถ„ํ•˜์—ฌ ํ‘œ์‹œ
sections = novel_text.split('\n\n')
for i, section in enumerate(sections):
if section.strip():
formatted += f"{section}\n\n"
return formatted
def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str:
"""DOCX ํŒŒ์ผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ"""
doc = Document()
# ํŽ˜์ด์ง€ ์„ค์ •
section = doc.sections[0]
section.page_height = Inches(11)
section.page_width = Inches(8.5)
section.top_margin = Inches(1)
section.bottom_margin = Inches(1)
section.left_margin = Inches(1.25)
section.right_margin = Inches(1.25)
# ์„ธ์…˜ ์ •๋ณด
session = NovelDatabase.get_session(session_id)
# ์ œ๋ชฉ ํŽ˜์ด์ง€
title_para = doc.add_paragraph()
title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER
if session:
title_run = title_para.add_run(session["user_query"])
title_run.font.size = Pt(24)
title_run.bold = True
# ๋ฉ”ํƒ€ ์ •๋ณด
doc.add_paragraph()
meta_para = doc.add_paragraph()
meta_para.alignment = WD_ALIGN_PARAGRAPH.CENTER
meta_para.add_run(f"์ƒ์„ฑ์ผ: {datetime.now().strftime('%Y๋…„ %m์›” %d์ผ')}\n")
meta_para.add_run(f"์ด ๋‹จ์–ด ์ˆ˜: {len(content.split()):,}๋‹จ์–ด")
# ํŽ˜์ด์ง€ ๋‚˜๋ˆ„๊ธฐ
doc.add_page_break()
# ๋ณธ๋ฌธ ์Šคํƒ€์ผ ์„ค์ •
style = doc.styles['Normal']
style.font.name = 'Calibri'
style.font.size = Pt(11)
style.paragraph_format.line_spacing = 1.5
style.paragraph_format.space_after = Pt(6)
# ๋ณธ๋ฌธ ์ถ”๊ฐ€
paragraphs = content.split('\n\n')
for para_text in paragraphs:
if para_text.strip():
para = doc.add_paragraph(para_text.strip())
# ํŒŒ์ผ ์ €์žฅ
filepath = f"{filename}.docx"
doc.save(filepath)
return filepath
def export_to_txt(content: str, filename: str) -> str:
"""TXT ํŒŒ์ผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ"""
filepath = f"{filename}.txt"
with open(filepath, 'w', encoding='utf-8') as f:
f.write(content)
return filepath
# CSS ์Šคํƒ€์ผ
custom_css = """
.gradio-container {
background: linear-gradient(135deg, #1e3c72 0%, #2a5298 50%, #1e3c72 100%);
min-height: 100vh;
}
.main-header {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 30px;
border-radius: 12px;
margin-bottom: 30px;
text-align: center;
color: white;
border: 1px solid rgba(255, 255, 255, 0.2);
}
.progress-note {
background-color: rgba(255, 223, 0, 0.1);
border-left: 3px solid #ffd700;
padding: 15px;
margin: 20px 0;
border-radius: 8px;
color: #fff;
}
.input-section {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 20px;
border-radius: 12px;
margin-bottom: 20px;
border: 1px solid rgba(255, 255, 255, 0.2);
}
.session-section {
background-color: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
padding: 15px;
border-radius: 8px;
margin-top: 20px;
color: white;
border: 1px solid rgba(255, 255, 255, 0.2);
}
#stages-display {
background-color: rgba(255, 255, 255, 0.95);
padding: 20px;
border-radius: 12px;
max-height: 600px;
overflow-y: auto;
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
}
#novel-output {
background-color: rgba(255, 255, 255, 0.95);
padding: 30px;
border-radius: 12px;
max-height: 700px;
overflow-y: auto;
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
}
.download-section {
background-color: rgba(255, 255, 255, 0.9);
padding: 15px;
border-radius: 8px;
margin-top: 20px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
/* ์ง„ํ–‰ ํ‘œ์‹œ๊ธฐ ์Šคํƒ€์ผ */
.progress-bar {
background-color: #e0e0e0;
height: 20px;
border-radius: 10px;
overflow: hidden;
margin: 10px 0;
}
.progress-fill {
background-color: #4CAF50;
height: 100%;
transition: width 0.3s ease;
}
"""
# Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ
def create_interface():
with gr.Blocks(css=custom_css, title="AI ์ง„ํ–‰ํ˜• ์žฅํŽธ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ") as interface:
gr.HTML("""
<div class="main-header">
<h1 style="font-size: 2.5em; margin-bottom: 10px;">
๐Ÿ“š AI ์ง„ํ–‰ํ˜• ์žฅํŽธ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ
</h1>
<h3 style="color: #ddd; margin-bottom: 20px;">
8,000๋‹จ์–ด ์ด์ƒ์˜ ํ†ตํ•ฉ๋œ ์„œ์‚ฌ ๊ตฌ์กฐ๋ฅผ ๊ฐ€์ง„ ์ค‘ํŽธ์†Œ์„ค ์ฐฝ์ž‘
</h3>
<p style="font-size: 1.1em; color: #eee; max-width: 800px; margin: 0 auto;">
10๊ฐœ์˜ ์œ ๊ธฐ์ ์œผ๋กœ ์—ฐ๊ฒฐ๋œ ๋‹จ๊ณ„๋ฅผ ํ†ตํ•ด ํ•˜๋‚˜์˜ ์™„์ „ํ•œ ์ด์•ผ๊ธฐ๋ฅผ ๋งŒ๋“ค์–ด๋ƒ…๋‹ˆ๋‹ค.
<br>
๊ฐ ๋‹จ๊ณ„๋Š” ์ด์ „ ๋‹จ๊ณ„์˜ ํ•„์—ฐ์  ๊ฒฐ๊ณผ๋กœ ์ด์–ด์ง€๋ฉฐ, ์ธ๋ฌผ์˜ ๋ณ€ํ™”์™€ ์„ฑ์žฅ์„ ์ถ”์ ํ•ฉ๋‹ˆ๋‹ค.
</p>
<div class="progress-note">
โšก ๋ฐ˜๋ณต์ด ์•„๋‹Œ ์ถ•์ , ์ˆœํ™˜์ด ์•„๋‹Œ ์ง„ํ–‰์„ ํ†ตํ•œ ์ง„์ •ํ•œ ์žฅํŽธ ์„œ์‚ฌ
</div>
</div>
""")
# ์ƒํƒœ ๊ด€๋ฆฌ
current_session_id = gr.State(None)
with gr.Row():
with gr.Column(scale=1):
with gr.Group(elem_classes=["input-section"]):
query_input = gr.Textbox(
label="์†Œ์„ค ์ฃผ์ œ / Novel Theme",
placeholder="์ค‘ํŽธ์†Œ์„ค์˜ ์ฃผ์ œ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”. ์ธ๋ฌผ์˜ ๋ณ€ํ™”์™€ ์„ฑ์žฅ์ด ์ค‘์‹ฌ์ด ๋˜๋Š” ์ด์•ผ๊ธฐ...\nEnter the theme for your novella. Focus on character transformation and growth...",
lines=4
)
language_select = gr.Radio(
choices=["Korean", "English"],
value="Korean",
label="์–ธ์–ด / Language"
)
with gr.Row():
submit_btn = gr.Button("๐Ÿš€ ์†Œ์„ค ์ƒ์„ฑ ์‹œ์ž‘", variant="primary", scale=2)
clear_btn = gr.Button("๐Ÿ—‘๏ธ ์ดˆ๊ธฐํ™”", scale=1)
status_text = gr.Textbox(
label="์ƒํƒœ",
interactive=False,
value="๐Ÿ”„ ์ค€๋น„ ์™„๋ฃŒ"
)
# ์„ธ์…˜ ๊ด€๋ฆฌ
with gr.Group(elem_classes=["session-section"]):
gr.Markdown("### ๐Ÿ’พ ์ง„ํ–‰ ์ค‘์ธ ์„ธ์…˜")
session_dropdown = gr.Dropdown(
label="์„ธ์…˜ ์„ ํƒ",
choices=[],
interactive=True
)
with gr.Row():
refresh_btn = gr.Button("๐Ÿ”„ ๋ชฉ๋ก ์ƒˆ๋กœ๊ณ ์นจ", scale=1)
resume_btn = gr.Button("โ–ถ๏ธ ์„ ํƒ ์žฌ๊ฐœ", variant="secondary", scale=1)
auto_recover_btn = gr.Button("โ™ป๏ธ ์ตœ๊ทผ ์„ธ์…˜ ๋ณต๊ตฌ", scale=1)
with gr.Column(scale=2):
with gr.Tab("๐Ÿ“ ์ฐฝ์ž‘ ์ง„ํ–‰"):
stages_display = gr.Markdown(
value="์ฐฝ์ž‘ ๊ณผ์ •์ด ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค...",
elem_id="stages-display"
)
with gr.Tab("๐Ÿ“– ์™„์„ฑ๋œ ์†Œ์„ค"):
novel_output = gr.Markdown(
value="์™„์„ฑ๋œ ์†Œ์„ค์ด ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค...",
elem_id="novel-output"
)
with gr.Group(elem_classes=["download-section"]):
gr.Markdown("### ๐Ÿ“ฅ ์†Œ์„ค ๋‹ค์šด๋กœ๋“œ")
with gr.Row():
format_select = gr.Radio(
choices=["DOCX", "TXT"],
value="DOCX" if DOCX_AVAILABLE else "TXT",
label="ํ˜•์‹"
)
download_btn = gr.Button("โฌ‡๏ธ ๋‹ค์šด๋กœ๋“œ", variant="secondary")
download_file = gr.File(
label="๋‹ค์šด๋กœ๋“œ๋œ ํŒŒ์ผ",
visible=False
)
# ์ˆจ๊ฒจ์ง„ ์ƒํƒœ
novel_text_state = gr.State("")
# ์˜ˆ์ œ
with gr.Row():
gr.Examples(
examples=[
["์‹ค์งํ•œ ์ค‘๋…„ ๋‚จ์„ฑ์ด ์ƒˆ๋กœ์šด ์‚ถ์˜ ์˜๋ฏธ๋ฅผ ์ฐพ์•„๊ฐ€๋Š” ์—ฌ์ •"],
["๋„์‹œ์—์„œ ์‹œ๊ณจ๋กœ ์ด์ฃผํ•œ ์ฒญ๋…„์˜ ์ ์‘๊ณผ ์„ฑ์žฅ ์ด์•ผ๊ธฐ"],
["์„ธ ์„ธ๋Œ€๊ฐ€ ํ•จ๊ป˜ ์‚ฌ๋Š” ๊ฐ€์กฑ์˜ ๊ฐˆ๋“ฑ๊ณผ ํ™”ํ•ด"],
["A middle-aged woman's journey to rediscover herself after divorce"],
["The transformation of a cynical journalist through unexpected encounters"],
["์ž‘์€ ์„œ์ ์„ ์šด์˜ํ•˜๋Š” ๋…ธ๋ถ€๋ถ€์˜ ๋งˆ์ง€๋ง‰ 1๋…„"],
["AI ์‹œ๋Œ€์— ์ผ์ž๋ฆฌ๋ฅผ ์žƒ์€ ๋ฒˆ์—ญ๊ฐ€์˜ ์ƒˆ๋กœ์šด ๋„์ „"]
],
inputs=query_input,
label="๐Ÿ’ก ์ฃผ์ œ ์˜ˆ์‹œ"
)
# ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
def refresh_sessions():
try:
sessions = get_active_sessions("Korean")
return gr.update(choices=sessions)
except Exception as e:
logger.error(f"Error refreshing sessions: {str(e)}")
return gr.update(choices=[])
def handle_auto_recover(language):
session_id, message = auto_recover_session(language)
return session_id, message
# ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
submit_btn.click(
fn=process_query,
inputs=[query_input, language_select, current_session_id],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
novel_output.change(
fn=lambda x: x,
inputs=[novel_output],
outputs=[novel_text_state]
)
resume_btn.click(
fn=lambda x: x.split("...")[0] if x and "..." in x else x,
inputs=[session_dropdown],
outputs=[current_session_id]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
auto_recover_btn.click(
fn=handle_auto_recover,
inputs=[language_select],
outputs=[current_session_id, status_text]
).then(
fn=resume_session,
inputs=[current_session_id, language_select],
outputs=[stages_display, novel_output, status_text, current_session_id]
)
refresh_btn.click(
fn=refresh_sessions,
outputs=[session_dropdown]
)
clear_btn.click(
fn=lambda: ("", "", "๐Ÿ”„ ์ค€๋น„ ์™„๋ฃŒ", "", None),
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id]
)
def handle_download(format_type, language, session_id, novel_text):
if not session_id or not novel_text:
return gr.update(visible=False)
file_path = download_novel(novel_text, format_type, language, session_id)
if file_path:
return gr.update(value=file_path, visible=True)
else:
return gr.update(visible=False)
download_btn.click(
fn=handle_download,
inputs=[format_select, language_select, current_session_id, novel_text_state],
outputs=[download_file]
)
# ์‹œ์ž‘ ์‹œ ์„ธ์…˜ ๋กœ๋“œ
interface.load(
fn=refresh_sessions,
outputs=[session_dropdown]
)
return interface
# ๋ฉ”์ธ ์‹คํ–‰
if __name__ == "__main__":
logger.info("AI ์ง„ํ–‰ํ˜• ์žฅํŽธ์†Œ์„ค ์ƒ์„ฑ ์‹œ์Šคํ…œ ์‹œ์ž‘...")
logger.info("=" * 60)
# ํ™˜๊ฒฝ ํ™•์ธ
logger.info(f"API ์—”๋“œํฌ์ธํŠธ: {API_URL}")
logger.info(f"๋ชฉํ‘œ ๋ถ„๋Ÿ‰: {TARGET_WORDS:,}๋‹จ์–ด")
logger.info(f"์ž‘๊ฐ€๋‹น ์ตœ์†Œ ๋ถ„๋Ÿ‰: {MIN_WORDS_PER_WRITER:,}๋‹จ์–ด")
if BRAVE_SEARCH_API_KEY:
logger.info("์›น ๊ฒ€์ƒ‰์ด ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
else:
logger.warning("์›น ๊ฒ€์ƒ‰์ด ๋น„ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
if DOCX_AVAILABLE:
logger.info("DOCX ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
else:
logger.warning("DOCX ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ๋น„ํ™œ์„ฑํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
logger.info("=" * 60)
# ๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™”
logger.info("๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™” ์ค‘...")
NovelDatabase.init_db()
logger.info("๋ฐ์ดํ„ฐ๋ฒ ์ด์Šค ์ดˆ๊ธฐํ™” ์™„๋ฃŒ.")
# ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ ๋ฐ ์‹คํ–‰
interface = create_interface()
interface.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
debug=True
)