Spaces:
Running
Running
import gradio as gr | |
import os | |
import json | |
import requests | |
from datetime import datetime | |
import time | |
from typing import List, Dict, Any, Generator, Tuple, Optional | |
import logging | |
import re | |
import tempfile | |
from pathlib import Path | |
import sqlite3 | |
import hashlib | |
import threading | |
from contextlib import contextmanager | |
from dataclasses import dataclass, field | |
from collections import defaultdict | |
# --- ๋ก๊น ์ค์ --- | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
logger = logging.getLogger(__name__) | |
# --- Document export imports --- | |
try: | |
from docx import Document | |
from docx.shared import Inches, Pt, RGBColor | |
from docx.enum.text import WD_ALIGN_PARAGRAPH | |
from docx.enum.style import WD_STYLE_TYPE | |
DOCX_AVAILABLE = True | |
except ImportError: | |
DOCX_AVAILABLE = False | |
logger.warning("python-docx not installed. DOCX export will be disabled.") | |
# --- ํ๊ฒฝ ๋ณ์ ๋ฐ ์์ --- | |
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") | |
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") | |
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
MODEL_ID = "dep89a2fld32mcm" | |
DB_PATH = "novel_sessions_v2.db" | |
# --- ํ๊ฒฝ ๋ณ์ ๊ฒ์ฆ --- | |
if not FRIENDLI_TOKEN: | |
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") | |
# ์ค์ ํ๊ฒฝ์์๋ ์ฌ๊ธฐ์ ํ๋ก๊ทธ๋จ์ ์ข ๋ฃํด์ผ ํ์ง๋ง, ๋ฐ๋ชจ๋ฅผ ์ํด ๋๋ฏธ ํ ํฐ์ ์ฌ์ฉํฉ๋๋ค. | |
FRIENDLI_TOKEN = "dummy_token_for_testing" | |
if not BRAVE_SEARCH_API_KEY: | |
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") | |
# --- ์ ์ญ ๋ณ์ --- | |
db_lock = threading.Lock() | |
# ์ต์ ํ๋ ๋จ๊ณ ๊ตฌ์ฑ (25๋จ๊ณ๋ก ์์ถ ๋ฐ ๊ฐํ) | |
OPTIMIZED_STAGES = [ | |
("director", "๐ฌ ๊ฐ๋ ์: ์ด๊ธฐ ๊ธฐํ (์น ๊ฒ์ ํฌํจ)"), | |
("critic", "๐ ๋นํ๊ฐ: ๊ธฐํ ๊ฒํ (ํ ๋ง ๋ฐ ์ผ๊ด์ฑ)"), | |
("director", "๐ฌ ๊ฐ๋ ์: ์์ ๋ ๋ง์คํฐํ๋"), | |
] + [ | |
(f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์ด์ (ํ์ด์ง {(i-1)*3+1}-{i*3})") | |
for i in range(1, 11) | |
] + [ | |
("critic", "๐ ๋นํ๊ฐ: ์ค๊ฐ ๊ฒํ (์ผ๊ด์ฑ ๋ฐ ํ ๋ง ์ ์ง)"), | |
] + [ | |
(f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์์ ๋ณธ (ํ์ด์ง {(i-1)*3+1}-{i*3})") | |
for i in range(1, 11) | |
] + [ | |
("critic", f"๐ ๋นํ๊ฐ: ์ต์ข ๊ฒํ ๋ฐ ์ข ํฉ ๋ณด๊ณ ์ ์์ฑ"), | |
] | |
# --- ๋ฐ์ดํฐ ํด๋์ค --- | |
class CharacterState: | |
"""์บ๋ฆญํฐ์ ํ์ฌ ์ํ๋ฅผ ๋ํ๋ด๋ ๋ฐ์ดํฐ ํด๋์ค""" | |
name: str | |
alive: bool = True | |
location: str = "" | |
injuries: List[str] = field(default_factory=list) | |
emotional_state: str = "" | |
relationships: Dict[str, str] = field(default_factory=dict) | |
last_seen_chapter: int = 0 | |
description: str = "" | |
role: str = "" | |
class PlotPoint: | |
"""ํ๋กฏ ํฌ์ธํธ๋ฅผ ๋ํ๋ด๋ ๋ฐ์ดํฐ ํด๋์ค""" | |
chapter: int | |
event_type: str | |
description: str | |
characters_involved: List[str] | |
impact_level: int | |
timestamp: str = "" | |
class TimelineEvent: | |
"""์๊ฐ์ ์ด๋ฒคํธ๋ฅผ ๋ํ๋ด๋ ๋ฐ์ดํฐ ํด๋์ค""" | |
chapter: int | |
time_reference: str | |
event_description: str | |
duration: str = "" | |
relative_time: str = "" | |
# --- ํต์ฌ ๋ก์ง ํด๋์ค --- | |
class ConsistencyTracker: | |
"""์ผ๊ด์ฑ ์ถ์ ์์คํ """ | |
def __init__(self): | |
self.character_states: Dict[str, CharacterState] = {} | |
self.plot_points: List[PlotPoint] = [] | |
self.timeline_events: List[TimelineEvent] = [] | |
self.locations: Dict[str, str] = {} | |
self.established_facts: List[str] = [] | |
self.content_hashes: Dict[str, int] = {} # ํด์์ ํด๋น ์ฑํฐ ๋ฒํธ๋ฅผ ์ ์ฅ | |
def register_character(self, character: CharacterState): | |
"""์ ์บ๋ฆญํฐ ๋ฑ๋ก""" | |
self.character_states[character.name] = character | |
logger.info(f"Character registered: {character.name}") | |
def update_character_state(self, name: str, chapter: int, updates: Dict[str, Any]): | |
"""์บ๋ฆญํฐ ์ํ ์ ๋ฐ์ดํธ""" | |
if name not in self.character_states: | |
self.register_character(CharacterState(name=name, last_seen_chapter=chapter)) | |
char = self.character_states[name] | |
for key, value in updates.items(): | |
if hasattr(char, key): | |
setattr(char, key, value) | |
char.last_seen_chapter = chapter | |
def add_plot_point(self, plot_point: PlotPoint): | |
"""ํ๋กฏ ํฌ์ธํธ ์ถ๊ฐ""" | |
plot_point.timestamp = datetime.now().isoformat() | |
self.plot_points.append(plot_point) | |
def check_repetition(self, content: str, current_chapter: int) -> Tuple[bool, str]: | |
"""ํฅ์๋ ๋ฐ๋ณต ๋ด์ฉ ๊ฒ์ฌ""" | |
sentences = re.split(r'[.!?]+', content) | |
for sentence in sentences: | |
sentence_strip = sentence.strip() | |
if len(sentence_strip) > 20: # ๋๋ฌด ์งง์ ๋ฌธ์ฅ์ ๋ฌด์ | |
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest() | |
if sentence_hash in self.content_hashes: | |
previous_chapter = self.content_hashes[sentence_hash] | |
# ๋ฐ๋ก ์ด์ ์ฑํฐ์์ ๋ฐ๋ณต์ ํ์ฉํ ์ ์์ผ๋ฏ๋ก, 2์ฑํฐ ์ด์ ์ฐจ์ด๋ ๋๋ง ์ค๋ฅ๋ก ๊ฐ์ฃผ | |
if current_chapter > previous_chapter + 1: | |
return True, f"๋ฌธ์ฅ ๋ฐ๋ณต (์ฑํฐ {previous_chapter}๊ณผ ์ ์ฌ): {sentence_strip[:50]}..." | |
# ์ ๋ด์ฉ์ ํด์ ์ถ๊ฐ | |
for sentence in sentences: | |
sentence_strip = sentence.strip() | |
if len(sentence_strip) > 20: | |
sentence_hash = hashlib.md5(sentence_strip.encode('utf-8')).hexdigest() | |
self.content_hashes[sentence_hash] = current_chapter | |
return False, "" | |
def validate_consistency(self, chapter: int, content: str) -> List[str]: | |
"""์ผ๊ด์ฑ ๊ฒ์ฆ""" | |
errors = [] | |
# ์ฌ๋งํ ์บ๋ฆญํฐ ๋ฑ์ฅ ๊ฒ์ฌ | |
for char_name, char_state in self.character_states.items(): | |
if char_name.lower() in content.lower() and not char_state.alive: | |
errors.append(f"โ ๏ธ ์ฌ๋งํ ์บ๋ฆญํฐ '{char_name}'์ด(๊ฐ) ๋ฑ์ฅํ์ต๋๋ค.") | |
# ๋ด์ฉ ๋ฐ๋ณต ๊ฒ์ฌ | |
is_repetition, repeat_msg = self.check_repetition(content, chapter) | |
if is_repetition: | |
errors.append(f"๐ {repeat_msg}") | |
return errors | |
def get_character_summary(self, chapter: int) -> str: | |
"""ํ์ฌ ์ฑํฐ ๊ธฐ์ค ์บ๋ฆญํฐ ์์ฝ""" | |
summary = "\n=== ์บ๋ฆญํฐ ํํฉ ์์ฝ (์ด์ 2๊ฐ ์ฑํฐ ๊ธฐ์ค) ===\n" | |
active_chars = [char for char in self.character_states.values() if char.last_seen_chapter >= chapter - 2] | |
if not active_chars: | |
return "\n(์์ง ์ฃผ์ ์บ๋ฆญํฐ ์ ๋ณด๊ฐ ์์ต๋๋ค.)\n" | |
for char in active_chars: | |
status = "์์กด" if char.alive else "์ฌ๋ง" | |
summary += f"โข {char.name}: {status}" | |
if char.alive and char.location: summary += f" (์์น: {char.location})" | |
if char.injuries: summary += f" (๋ถ์: {', '.join(char.injuries[-1:])})" | |
summary += "\n" | |
return summary | |
def get_plot_summary(self, chapter: int) -> str: | |
"""ํ๋กฏ ์์ฝ""" | |
summary = "\n=== ์ต๊ทผ ์ฃผ์ ์ฌ๊ฑด ์์ฝ ===\n" | |
recent_events = [p for p in self.plot_points if p.chapter >= chapter - 2] | |
if not recent_events: | |
return "\n(์์ง ์ฃผ์ ์ฌ๊ฑด์ด ์์ต๋๋ค.)\n" | |
for event in recent_events[-3:]: # ์ต๊ทผ 3๊ฐ๋ง ํ์ | |
summary += f"โข [์ฑํฐ {event.chapter}] {event.description}\n" | |
return summary | |
class WebSearchIntegration: | |
"""์น ๊ฒ์ ๊ธฐ๋ฅ (๊ฐ๋ ์ ๋จ๊ณ์์๋ง ์ฌ์ฉ)""" | |
def __init__(self): | |
self.brave_api_key = BRAVE_SEARCH_API_KEY | |
self.search_url = "https://api.search.brave.com/res/v1/web/search" | |
self.enabled = bool(self.brave_api_key) | |
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: | |
"""์น ๊ฒ์ ์ํ""" | |
if not self.enabled: | |
return [] | |
headers = { | |
"Accept": "application/json", | |
"X-Subscription-Token": self.brave_api_key | |
} | |
params = { | |
"q": query, | |
"count": count, | |
"search_lang": "ko" if language == "Korean" else "en", | |
"text_decorations": False, | |
"safesearch": "moderate" | |
} | |
try: | |
response = requests.get(self.search_url, headers=headers, params=params, timeout=10) | |
response.raise_for_status() | |
results = response.json().get("web", {}).get("results", []) | |
logger.info(f"์น ๊ฒ์ ์ฑ๊ณต: '{query}'์ ๋ํด {len(results)}๊ฐ ๊ฒฐ๊ณผ ๋ฐ๊ฒฌ") | |
return results | |
except requests.exceptions.RequestException as e: | |
logger.error(f"์น ๊ฒ์ API ์ค๋ฅ: {e}") | |
return [] | |
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: | |
"""๊ฒ์ ๊ฒฐ๊ณผ์์ ๊ด๋ จ ์ ๋ณด ์ถ์ถ""" | |
if not results: | |
return "" | |
extracted = [] | |
total_chars = 0 | |
for i, result in enumerate(results[:3], 1): | |
title = result.get("title", "") | |
description = result.get("description", "") | |
url = result.get("url", "") | |
info = f"[{i}] {title}\n{description}\nSource: {url}\n" | |
if total_chars + len(info) < max_chars: | |
extracted.append(info) | |
total_chars += len(info) | |
else: | |
break | |
return "\n---\n".join(extracted) | |
class NovelDatabase: | |
"""์์ค ์ธ์ ๊ด๋ฆฌ ๋ฐ์ดํฐ๋ฒ ์ด์ค""" | |
def init_db(): | |
with sqlite3.connect(DB_PATH) as conn: | |
conn.execute("PRAGMA journal_mode=WAL") | |
cursor = conn.cursor() | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS sessions ( | |
session_id TEXT PRIMARY KEY, | |
user_query TEXT NOT NULL, | |
language TEXT NOT NULL, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
status TEXT DEFAULT 'active', | |
current_stage INTEGER DEFAULT 0, | |
final_novel TEXT, | |
consistency_report TEXT | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS stages ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
stage_number INTEGER NOT NULL, | |
stage_name TEXT NOT NULL, | |
role TEXT NOT NULL, | |
content TEXT, | |
word_count INTEGER DEFAULT 0, | |
status TEXT DEFAULT 'pending', | |
consistency_score REAL DEFAULT 0.0, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
UNIQUE(session_id, stage_number) | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS character_states ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
character_name TEXT NOT NULL, | |
chapter INTEGER NOT NULL, | |
is_alive BOOLEAN DEFAULT TRUE, | |
location TEXT, | |
injuries TEXT, | |
emotional_state TEXT, | |
description TEXT, | |
created_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
) | |
''') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_id ON stages(session_id)') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_stage_number ON stages(stage_number)') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_char_session ON character_states(session_id)') | |
cursor.execute('CREATE INDEX IF NOT EXISTS idx_session_status ON sessions(status)') | |
conn.commit() | |
def get_db(): | |
with db_lock: | |
conn = sqlite3.connect(DB_PATH, timeout=30.0) | |
conn.row_factory = sqlite3.Row | |
try: | |
yield conn | |
finally: | |
conn.close() | |
def create_session(user_query: str, language: str) -> str: | |
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', | |
(session_id, user_query, language) | |
) | |
conn.commit() | |
return session_id | |
def save_stage(session_id: str, stage_number: int, stage_name: str, | |
role: str, content: str, status: str = 'complete', | |
consistency_score: float = 0.0): | |
word_count = len(content.split()) if content else 0 | |
with NovelDatabase.get_db() as conn: | |
cursor = conn.cursor() | |
cursor.execute(''' | |
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score) | |
VALUES (?, ?, ?, ?, ?, ?, ?, ?) | |
ON CONFLICT(session_id, stage_number) | |
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, consistency_score=?, updated_at=datetime('now') | |
''', (session_id, stage_number, stage_name, role, content, word_count, status, consistency_score, | |
content, word_count, status, stage_name, consistency_score)) | |
cursor.execute( | |
"UPDATE sessions SET updated_at = datetime('now'), current_stage = ? WHERE session_id = ?", | |
(stage_number, session_id) | |
) | |
conn.commit() | |
def get_session(session_id: str) -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone() | |
return dict(row) if row else None | |
def get_latest_active_session() -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute("SELECT * FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 1").fetchone() | |
return dict(row) if row else None | |
def get_stages(session_id: str) -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall() | |
return [dict(row) for row in rows] | |
def get_writer_content(session_id: str) -> str: | |
with NovelDatabase.get_db() as conn: | |
all_content = [] | |
for writer_num in range(1, 11): | |
row = conn.cursor().execute( | |
"SELECT content FROM stages WHERE session_id = ? AND role = ? AND stage_name LIKE '%์์ ๋ณธ%' ORDER BY stage_number DESC LIMIT 1", | |
(session_id, f'writer{writer_num}') | |
).fetchone() | |
if row and row['content']: | |
all_content.append(row['content'].strip()) | |
return '\n\n'.join(all_content) | |
def update_final_novel(session_id: str, final_novel: str, consistency_report: str = ""): | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), consistency_report = ? WHERE session_id = ?", | |
(final_novel, consistency_report, session_id) | |
) | |
conn.commit() | |
def get_active_sessions() -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute( | |
"SELECT session_id, user_query, language, created_at, current_stage FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10" | |
).fetchall() | |
return [dict(row) for row in rows] | |
class NovelWritingSystem: | |
"""์ต์ ํ๋ ์์ค ์์ฑ ์์คํ """ | |
def __init__(self): | |
self.token = FRIENDLI_TOKEN | |
self.api_url = API_URL | |
self.model_id = MODEL_ID | |
self.consistency_tracker = ConsistencyTracker() | |
self.web_search = WebSearchIntegration() | |
self.current_session_id = None | |
NovelDatabase.init_db() | |
def create_headers(self): | |
"""API ํค๋ ์์ฑ""" | |
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} | |
# --- ํ๋กฌํํธ ์์ฑ ํจ์๋ค (Thematic Guardian ๊ฐ๋ ํตํฉ) --- | |
def create_director_initial_prompt(self, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์ด๊ธฐ ๊ธฐํ ํ๋กฌํํธ (์น ๊ฒ์ ๋ฐ ํ ๋ง ์ ์ฝ ์กฐ๊ฑด ๊ฐํ)""" | |
search_results_str = "" | |
if self.web_search.enabled: | |
queries = [f"{user_query} novel setting", f"{user_query} background information"] | |
search_results = self.web_search.search(queries[0], count=2, language=language) | |
if search_results: | |
search_results_str = self.web_search.extract_relevant_info(search_results) | |
lang_prompts = { | |
"Korean": { | |
"title": "๋น์ ์ 30ํ์ด์ง ๋ถ๋์ ์คํธ ์์ค์ ๊ธฐํํ๋ ๋ฌธํ ๊ฐ๋ ์์ ๋๋ค.", | |
"user_theme": "์ฌ์ฉ์ ์ฃผ์ ", | |
"plan_instruction": "๋ค์ ์์๋ค์ ํฌํจํ ์์ธํ ์์ค ๊ธฐํ์ ์์ฑํ์ธ์:", | |
"theme_section": "1. **์ฃผ์ ์ ์ฅ๋ฅด ์ค์ **\n - ํต์ฌ ์ฃผ์ ์ ๋ฉ์์ง (์ฌ์ฉ์ ์๋ ๊น์ด ๋ฐ์)\n - ์ฅ๋ฅด ๋ฐ ๋ถ์๊ธฐ\n - ๋ ์์ธต ๊ณ ๋ ค์ฌํญ", | |
"char_section": "2. **์ฃผ์ ๋ฑ์ฅ์ธ๋ฌผ** (3-5๋ช )\n | ์ด๋ฆ | ์ญํ | ์ฑ๊ฒฉ | ๋ฐฐ๊ฒฝ | ๋ชฉํ | ๊ฐ๋ฑ |", | |
"setting_section": "3. **๋ฐฐ๊ฒฝ ์ค์ **\n - ์๊ณต๊ฐ์ ๋ฐฐ๊ฒฝ\n - ์ฌํ์ /๋ฌธํ์ ํ๊ฒฝ\n - ์ฃผ์ ์ฅ์๋ค", | |
"plot_section": "4. **ํ๋กฏ ๊ตฌ์กฐ** (10๊ฐ ํํธ, ๊ฐ 3ํ์ด์ง ๋ถ๋)\n | ํํธ | ํ์ด์ง | ์ฃผ์ ์ฌ๊ฑด | ๊ธด์ฅ๋ | ์บ๋ฆญํฐ ๋ฐ์ |", | |
"guideline_section": "5. **์๊ฐ๋ณ ์ง์นจ**\n - ์ผ๊ด์ฑ ์ ์ง๋ฅผ ์ํ ํต์ฌ ์ค์ \n - ๋ฌธ์ฒด์ ํค ๊ฐ์ด๋๋ผ์ธ", | |
"constraint_title": "โ ๏ธ๋งค์ฐ ์ค์ํ ์ง์์ฌํญ: ํต์ฌ ์ ์ฝ ์กฐ๊ฑดโ ๏ธ", | |
"constraint_body": "์ด ์์ค์ **AI๋ก ์ธํด ๋ชจ๋ ๊ฒ์ด ์ฝ๊ฒ ํด๊ฒฐ๋๋ ๊ธ์ ์ ์ด๊ณ ๋จ์ํ ์ด์ผ๊ธฐ๊ฐ ์๋๋๋ค.**\n๋ฐ๋์ ์ฌ์ฉ์์ ์ฃผ์ ์ธ '{query}'์ ๋ด๊ธด **ํต์ฌ ๊ฐ์ (์: ๋ถ์, ์์ธ๊ฐ, ์์ค๊ฐ, ์ธ๋ ๊ฐ๋ฑ ๋ฑ)์ ์ค์ฌ์ผ๋ก ์์ฌ๋ฅผ ์ ๊ฐํด์ผ ํฉ๋๋ค.**\nAI๋ ํน์ ๊ธฐ์ ์ ํธ๋ฆฌํ ๋๊ตฌ๊ฐ ์๋๋ผ, ์ฃผ์ธ๊ณต์๊ฒ **๊ฐ๋ฑ๊ณผ ์์ค๊ฐ์ ์๊ฒจ์ฃผ๋ ํต์ฌ ์์ธ**์ผ๋ก ์์ฉํด์ผ ํฉ๋๋ค.\n์ด ์ ์ฝ ์กฐ๊ฑด์ ์ ๋ ๋ฒ์ด๋์ง ๋ง์ญ์์ค.", | |
"final_instruction": "์ฐฝ์์ ์ด๊ณ ๊น์ด ์๋ ์์ค์ด ๋ ์ ์๋๋ก ์์ธํ๊ฒ ๊ธฐํํ์ธ์." | |
}, | |
"English": { | |
"title": "You are a literary director planning a 30-page novella.", | |
"user_theme": "User Theme", | |
"plan_instruction": "Create a detailed novel plan including:", | |
"theme_section": "1. **Theme and Genre**\n - Core theme and message (Deeply reflect user's intent)\n - Genre and atmosphere", | |
"char_section": "2. **Main Characters** (3-5)\n | Name | Role | Personality | Background | Goal | Conflict |", | |
"setting_section": "3. **Setting**\n - Time and place\n - Social/cultural environment", | |
"plot_section": "4. **Plot Structure** (10 parts, ~3 pages each)\n | Part | Pages | Main Events | Tension | Character Development |", | |
"guideline_section": "5. **Writer Guidelines**\n - Key settings for consistency\n - Style and tone guidelines", | |
"constraint_title": "โ ๏ธCRITICAL INSTRUCTION: CORE CONSTRAINTSโ ๏ธ", | |
"constraint_body": "This is **NOT a simple, positive story where AI solves everything.**\nYou must develop the narrative around the core emotions of the user's theme: '{query}' (e.g., anxiety, alienation, loss, generational conflict).\nAI or specific technology should be the **root cause of the protagonist's conflict and loss**, not a convenient tool.\nDo not deviate from this constraint.", | |
"final_instruction": "Plan in detail for a creative and profound novel." | |
} | |
} | |
p = lang_prompts[language] | |
return f"{p['title']}\n\n{p['user_theme']}: {user_query}\n\n{search_results_str}\n\n{p['plan_instruction']}\n\n{p['theme_section']}\n\n{p['char_section']}\n\n{p['setting_section']}\n\n{p['plot_section']}\n\n{p['guideline_section']}\n\n---\n{p['constraint_title']}\n{p['constraint_body'].format(query=user_query)}\n---\n\n{p['final_instruction']}" | |
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: | |
"""๋นํ๊ฐ์ ๊ฐ๋ ์ ๊ธฐํ ๊ฒํ ํ๋กฌํํธ (ํ ๋ง ์ผ๊ด์ฑ ๊ฐํ)""" | |
lang_prompts = { | |
"Korean": { | |
"title": "๋น์ ์ ๋ฌธํ ๋นํ๊ฐ์ ๋๋ค. ๊ฐ๋ ์์ ์์ค ๊ธฐํ์ '์ฃผ์ ์ผ๊ด์ฑ'๊ณผ '๊ธฐ์ ์ ์ผ๊ด์ฑ' ๊ด์ ์์ ๊ฒํ ํ์ธ์.", | |
"theme_check": f"**1. ์ฃผ์ ์ผ๊ด์ฑ (๊ฐ์ฅ ์ค์)**\n - **์๋ ์ฃผ์ :** '{user_query}'\n - ๊ธฐํ์์ด ์ฃผ์ ์ ํต์ฌ ๊ฐ์ (๋ถ์, ์์ค๊ฐ ๋ฑ)์์ ๋ฒ์ด๋ ๊ธ์ ์ ์ด๊ฑฐ๋ ๋จ์ํ ๋ฐฉํฅ์ผ๋ก ํ๋ฅด์ง ์์์ต๋๊น?\n - AI๋ ๊ธฐ์ ์ด ๊ฐ๋ฑ์ ์์ธ์ด ์๋, ๋จ์ ํด๊ฒฐ์ฌ๋ก ๋ฌ์ฌ๋์ง ์์์ต๋๊น?", | |
"consistency_check": "**2. ๊ธฐ์ ์ ์ผ๊ด์ฑ**\n - ์บ๋ฆญํฐ ์ค์ ์ ๋ชจ์, ํ๋กฏ์ ๋ ผ๋ฆฌ์ ํ์ , ์๊ฐ์ /๊ณต๊ฐ ์ค์ ์ ๋ฌธ์ ์ ์ ๊ฒํ ํ์ธ์.", | |
"instruction": "์ ํญ๋ชฉ๋ค์ ์ค์ฌ์ผ๋ก ๊ตฌ์ฒด์ ์ธ ๋ฌธ์ ์ ๊ณผ ๊ฐ์ ์์ ์ ์ํ์ธ์." | |
}, | |
"English": { | |
"title": "You are a literary critic. Review the director's plan from the perspectives of 'Thematic Consistency' and 'Technical Consistency'.", | |
"theme_check": f"**1. Thematic Consistency (Most Important)**\n - **Original Theme:** '{user_query}'\n - Does the plan drift from the core emotions (e.g., anxiety, loss) towards an overly positive or simplistic narrative?\n - Is AI depicted as a simple problem-solver instead of the root of the conflict?", | |
"consistency_check": "**2. Technical Consistency**\n - Review for character contradictions, plot holes, and timeline/setting issues.", | |
"instruction": "Provide specific problems and suggestions for improvement based on the above." | |
} | |
} | |
p = lang_prompts[language] | |
return f"{p['title']}\n\n**๊ฐ๋ ์ ๊ธฐํ:**\n{director_plan}\n\n---\n**๊ฒํ ํญ๋ชฉ:**\n{p['theme_check']}\n\n{p['consistency_check']}\n\n{p['instruction']}" | |
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์์ ํ๋กฌํํธ (ํ ๋ง ์ ์ฝ ์กฐ๊ฑด ์ฌ๊ฐ์กฐ)""" | |
return f"""๊ฐ๋ ์๋ก์ ๋นํ๊ฐ์ ํผ๋๋ฐฑ์ ๋ฐ์ํ์ฌ ์์ค ๊ธฐํ์ ์์ ํฉ๋๋ค. | |
**์๋ ์ฃผ์ :** {user_query} | |
**์ด๊ธฐ ๊ธฐํ:**\n{initial_plan} | |
**๋นํ๊ฐ ํผ๋๋ฐฑ:**\n{critic_feedback} | |
**์์ ์ง์นจ:** | |
- ๋นํ๊ฐ๊ฐ ์ง์ ํ ๋ชจ๋ ์ผ๊ด์ฑ ๋ฌธ์ ์ ์ฃผ์ ์ดํ ๋ฌธ์ ๋ฅผ ํด๊ฒฐํ์ธ์. | |
- **ํต์ฌ ์ ์ฝ ์กฐ๊ฑด**์ ๋ค์ ํ๋ฒ ์๊ธฐํ๊ณ , ์์ค ์ ์ฒด๊ฐ '๋ถ์'๊ณผ '์์ค๊ฐ'์ ํค์ ์ ์งํ๋๋ก ํ๋กฏ์ ๊ตฌ์ฒดํํ์ธ์. | |
- 10๋ช ์ ์๊ฐ๊ฐ ํผ๋ ์์ด ์์ ํ ์ ์๋๋ก ๋ช ํํ๊ณ ์์ธํ ์ต์ข ๋ง์คํฐํ๋์ ์์ฑํ์ธ์. | |
""" | |
def create_writer_prompt(self, writer_number: int, director_plan: str, previous_content_summary: str, user_query: str, language: str) -> str: | |
"""์๊ฐ ํ๋กฌํํธ (ํ ๋ง ๋ฆฌ๋ง์ธ๋ ํฌํจ)""" | |
pages_start = (writer_number - 1) * 3 + 1 | |
pages_end = writer_number * 3 | |
lang_prompts = { | |
"Korean": { | |
"title": f"๋น์ ์ ์๊ฐ {writer_number}๋ฒ์ ๋๋ค. ์์ค์ {pages_start}-{pages_end} ํ์ด์ง๋ฅผ ์์ฑํ์ธ์.", | |
"plan": "๊ฐ๋ ์ ๋ง์คํฐํ๋", | |
"prev_summary": "์ด์ ๋ด์ฉ ์์ฝ", | |
"guidelines": "**์์ฑ ์ง์นจ:**\n1. **๋ถ๋**: 1,400-1,500 ๋จ์ด ๋ด์ธ\n2. **์ฐ๊ฒฐ์ฑ**: ์์ฝ๋ ์ด์ ๋ด์ฉ๊ณผ ์์ฐ์ค๋ฝ๊ฒ ์ฐ๊ฒฐ\n3. **์ผ๊ด์ฑ**: ์บ๋ฆญํฐ ์ค์ ๊ณผ ์ํ, ํ๋กฏ ๊ตฌ์กฐ๋ฅผ ๋ฐ๋์ ๋ฐ๋ฅผ ๊ฒ", | |
"reminder_title": "โญ ์์ง ๋ง์ธ์ (ํ ๋ง ๋ฆฌ๋ง์ธ๋)", | |
"reminder_body": f"์ด ์์ค์ ํต์ฌ์ '{user_query}'์ ๋ด๊ธด **๋ถ์, ์์ธ, ์์ค๊ฐ**์ ๋๋ค. ๊ธ์ ์ ์ธ ํด๊ฒฐ์ ์๋๋ฅด์ง ๋ง๊ณ , ์ฃผ์ธ๊ณต์ ๋ด๋ฉด ๊ฐ๋ฑ์ ์ฌ๋ ์๊ฒ ๋ฌ์ฌํ๋ ๋ฐ ์ง์คํ์ธ์.", | |
"final_instruction": "์ฐฝ์์ ์ด๋ฉด์๋ ์ฃผ์ ์ ์ผ๊ด์ฑ์ ์ ๋ ์์ง ๋ง์ญ์์ค." | |
}, | |
"English": { | |
"title": f"You are Writer #{writer_number}. Write pages {pages_start}-{pages_end} of the novella.", | |
"plan": "Director's Masterplan", | |
"prev_summary": "Previous Content Summary", | |
"guidelines": "**Writing Guidelines:**\n1. **Length**: Approx. 1,400-1,500 words\n2. **Connectivity**: Connect naturally with the summarized previous content.\n3. **Consistency**: Strictly follow character settings, states, and plot structure.", | |
"reminder_title": "โญ REMINDER (THEME)", | |
"reminder_body": f"The core of this novel is the **anxiety, alienation, and loss** from the theme '{user_query}'. Do not rush to a positive resolution; focus on deeply describing the protagonist's internal conflict.", | |
"final_instruction": "Be creative, but never lose consistency and the core theme." | |
} | |
} | |
p = lang_prompts[language] | |
consistency_info = self.consistency_tracker.get_character_summary(writer_number) + self.consistency_tracker.get_plot_summary(writer_number) | |
return f"{p['title']}\n\n**{p['plan']}:**\n{director_plan}\n\n{consistency_info}\n\n**{p['prev_summary']}:**\n{previous_content_summary}\n\n---\n{p['guidelines']}\n\n**{p['reminder_title']}**\n{p['reminder_body']}\n---\n\n{p['final_instruction']}" | |
def create_critic_consistency_prompt(self, all_content: str, user_query: str, language: str) -> str: | |
"""๋นํ๊ฐ ์ค๊ฐ ๊ฒํ ํ๋กฌํํธ (ํ ๋ง ๊ฒํ ๊ฐํ)""" | |
return f"""๋น์ ์ ์ผ๊ด์ฑ ๊ฒํ ์ ๋ฌธ ๋นํ๊ฐ์ ๋๋ค. ์ง๊ธ๊น์ง ์์ฑ๋ ๋ด์ฉ์ ๊ฒํ ํ์ธ์. | |
**์๋ ์ฃผ์ :** {user_query} | |
**ํ์ฌ๊น์ง ์์ฑ๋ ๋ด์ฉ (์ต๊ทผ 3000์):**\n{all_content[-3000:]} | |
**๊ฒํ ํญ๋ชฉ:** | |
1. **์ฃผ์ ์ผ๊ด์ฑ (๊ฐ์ฅ ์ค์):** ๋ด์ฉ์ด ์๋ ์ฃผ์ ์ ์ด๋์ด ๊ฐ์ ์ ์์ ๋ฒ์ด๋์ง ์์๋์ง ํ์ธํ๊ณ , ๋ฒ์ด๋ฌ๋ค๋ฉด ์์ ๋ฐฉํฅ์ ์ ์ํ์ธ์. | |
2. **๊ธฐ์ ์ ์ผ๊ด์ฑ:** ์บ๋ฆญํฐ, ํ๋กฏ, ์ค์ ์ ์ฐ์์ฑ๊ณผ ๋ ผ๋ฆฌ์ ์ค๋ฅ๋ฅผ ์ฐพ์๋ด์ธ์. | |
3. **๋ฐ๋ณต ๋ด์ฉ:** ์๋ฏธ์ ์ผ๋ก ์ค๋ณต๋๋ ์ฅ๋ฉด์ด๋ ํํ์ด ์๋์ง ํ์ธํ์ธ์. | |
**๊ฒฐ๊ณผ:** ๋ฐ๊ฒฌ๋ ๋ฌธ์ ์ ๊ณผ ๊ตฌ์ฒด์ ์ธ ์์ ์ ์์ ๋ชฉ๋ก์ผ๋ก ์ ์ํ์ธ์. | |
""" | |
def create_writer_revision_prompt(self, writer_number: int, initial_content: str, consistency_feedback: str, language: str) -> str: | |
"""์๊ฐ ์์ ํ๋กฌํํธ""" | |
return f"""์๊ฐ {writer_number}๋ฒ์ผ๋ก์ ๋นํ๊ฐ์ ํผ๋๋ฐฑ์ ๋ฐ์ํ์ฌ ๋ด์ฉ์ ์์ ํ์ธ์. | |
**์ด๊ธฐ ์์ฑ ๋ด์ฉ:**\n{initial_content} | |
**๋นํ๊ฐ ํผ๋๋ฐฑ:**\n{consistency_feedback} | |
**์์ ์ง์นจ:** | |
- ์ง์ ๋ ๋ชจ๋ ์ฃผ์ ์ดํ ๋ฐ ์ผ๊ด์ฑ ๋ฌธ์ ๋ฅผ ํด๊ฒฐํ์ธ์. | |
- ๋ถ๋(1,400-1,500 ๋จ์ด)์ ์ ์งํ๋ฉด์ ๋ด์ฉ์ ์ง์ ๋์ด์ธ์. | |
- ์์ ๋ ์ต์ข ๋ฒ์ ์ ์ ์ํ์ธ์. | |
""" | |
def create_critic_final_prompt(self, complete_novel: str, language: str) -> str: | |
"""์ต์ข ๋นํ๊ฐ ๊ฒํ ๋ฐ ๋ณด๊ณ ์ ์์ฑ ํ๋กฌํํธ""" | |
return f"""์์ฑ๋ ์์ค์ ์ต์ข ์ผ๊ด์ฑ ๋ฐ ์์ฑ๋์ ๋ํ ์ข ํฉ ๋ณด๊ณ ์๋ฅผ ์์ฑํ์ธ์. | |
**์์ฑ๋ ์์ค (๋ง์ง๋ง 2000์):**\n{complete_novel[-2000:]} | |
**๋ณด๊ณ ์ ํฌํจ ํญ๋ชฉ:** | |
1. **์ ์ฒด ์ผ๊ด์ฑ ํ๊ฐ:** ์บ๋ฆญํฐ, ํ๋กฏ, ์ค์ , ์ฃผ์ ์ ์ง์ ๋ํ ์ ์(1-10)์ ์ดํ. | |
2. **์ต์ข ๋ฐ๊ฒฌ๋ ๋ฌธ์ ์ :** ๋จ์์๋ ์ฌ์ํ ๋ฌธ์ ์ ๋ค. | |
3. **์ฑ๊ณต ์์:** ํนํ ์ ์ ์ง๋ ์ผ๊ด์ฑ ๋ถ๋ถ์ด๋ ์ฃผ์ ํํ์ด ๋ฐ์ด๋ ๋ถ๋ถ. | |
4. **์ต์ข ํ๊ฐ:** ์์ค์ ์ ๋ฐ์ ์ธ ์์ฑ๋์ ๋ ์์๊ฒ ๋ฏธ์น ์ํฅ์ ๋ํ ํ๊ฐ. | |
""" | |
# --- LLM ํธ์ถ ํจ์๋ค --- | |
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: | |
"""LLM ๋๊ธฐ์ ํธ์ถ (์์ฝ ๋ฑ ๋ด๋ถ์ฉ)""" | |
full_content = "" | |
for chunk in self.call_llm_streaming(messages, role, language): | |
full_content += chunk | |
if full_content.startswith("โ"): | |
raise Exception(f"LLM Sync Call Failed: {full_content}") | |
return full_content | |
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]: | |
"""LLM ์คํธ๋ฆฌ๋ฐ ํธ์ถ (์์ ํ ์๋ฌ ์ฒ๋ฆฌ ๋ฐ ๋๋ฒ๊น )""" | |
try: | |
system_prompts = self.get_system_prompts(language) | |
full_messages = [{"role": "system", "content": system_prompts.get(role, "You are a helpful assistant.")}, *messages] | |
payload = { | |
"model": self.model_id, | |
"messages": full_messages, | |
"max_tokens": 10000, | |
"temperature": 0.75, | |
"top_p": 0.9, | |
"presence_penalty": 0.3, | |
"frequency_penalty": 0.2, | |
"stream": True, | |
"stream_options": {"include_usage": True} | |
} | |
logger.info(f"[{role}] API ์คํธ๋ฆฌ๋ฐ ์์") | |
# API ํธ์ถ | |
response = requests.post( | |
self.api_url, | |
headers=self.create_headers(), | |
json=payload, | |
stream=True, | |
timeout=180 | |
) | |
# ์ํ ์ฝ๋ ํ์ธ | |
if response.status_code != 200: | |
logger.error(f"API ์๋ต ์ค๋ฅ: {response.status_code}") | |
logger.error(f"์๋ต ๋ด์ฉ: {response.text[:500]}") | |
yield f"โ API ์ค๋ฅ (์ํ ์ฝ๋: {response.status_code})" | |
return | |
response.raise_for_status() | |
# ์คํธ๋ฆฌ๋ฐ ์ฒ๋ฆฌ | |
buffer = "" | |
total_content = "" | |
chunk_count = 0 | |
error_count = 0 | |
for line in response.iter_lines(): | |
if not line: | |
continue | |
try: | |
line_str = line.decode('utf-8').strip() | |
# SSE ํ์ ํ์ธ | |
if not line_str.startswith("data: "): | |
continue | |
data_str = line_str[6:] # "data: " ์ ๊ฑฐ | |
# ์คํธ๋ฆผ ์ข ๋ฃ ํ์ธ | |
if data_str == "[DONE]": | |
logger.info(f"[{role}] ์คํธ๋ฆฌ๋ฐ ์๋ฃ - ์ด {len(total_content)} ๋ฌธ์") | |
break | |
# JSON ํ์ฑ | |
try: | |
data = json.loads(data_str) | |
except json.JSONDecodeError: | |
logger.warning(f"JSON ํ์ฑ ์คํจ: {data_str[:100]}") | |
continue | |
# choices ๋ฐฐ์ด ์์ ํ๊ฒ ํ์ธ | |
choices = data.get("choices", None) | |
if not choices or not isinstance(choices, list) or len(choices) == 0: | |
# ์๋ฌ ์๋ต ํ์ธ | |
if "error" in data: | |
error_msg = data.get("error", {}).get("message", "Unknown error") | |
logger.error(f"API ์๋ฌ: {error_msg}") | |
yield f"โ API ์๋ฌ: {error_msg}" | |
return | |
continue | |
# delta์์ content ์ถ์ถ | |
delta = choices[0].get("delta", {}) | |
content = delta.get("content", "") | |
if content: | |
buffer += content | |
total_content += content | |
chunk_count += 1 | |
# 100์ ๋๋ ์ค๋ฐ๊ฟ๋ง๋ค yield | |
if len(buffer) >= 100 or '\n' in buffer: | |
yield buffer | |
buffer = "" | |
time.sleep(0.01) # UI ์ ๋ฐ์ดํธ๋ฅผ ์ํ ์งง์ ๋๊ธฐ | |
except Exception as e: | |
error_count += 1 | |
logger.error(f"์ฒญํฌ ์ฒ๋ฆฌ ์ค๋ฅ #{error_count}: {str(e)}") | |
if error_count > 10: # ๋๋ฌด ๋ง์ ์๋ฌ์ ์ค๋จ | |
yield f"โ ์คํธ๋ฆฌ๋ฐ ์ค ๊ณผ๋ํ ์ค๋ฅ ๋ฐ์" | |
return | |
continue | |
# ๋จ์ ๋ฒํผ ์ฒ๋ฆฌ | |
if buffer: | |
yield buffer | |
# ๊ฒฐ๊ณผ ํ์ธ | |
if chunk_count == 0: | |
logger.error(f"[{role}] ์ฝํ ์ธ ๊ฐ ์ ํ ์์ ๋์ง ์์") | |
yield "โ API๋ก๋ถํฐ ์๋ต์ ๋ฐ์ง ๋ชปํ์ต๋๋ค." | |
else: | |
logger.info(f"[{role}] ์ฑ๊ณต์ ์ผ๋ก {chunk_count}๊ฐ ์ฒญํฌ, ์ด {len(total_content)}์ ์์ ") | |
except requests.exceptions.Timeout: | |
logger.error("API ์์ฒญ ์๊ฐ ์ด๊ณผ") | |
yield "โ API ์์ฒญ ์๊ฐ์ด ์ด๊ณผ๋์์ต๋๋ค." | |
except requests.exceptions.ConnectionError: | |
logger.error("API ์ฐ๊ฒฐ ์คํจ") | |
yield "โ API ์๋ฒ์ ์ฐ๊ฒฐํ ์ ์์ต๋๋ค." | |
except Exception as e: | |
logger.error(f"์๊ธฐ์น ์์ ์ค๋ฅ: {type(e).__name__}: {str(e)}", exc_info=True) | |
yield f"โ ์ค๋ฅ ๋ฐ์: {str(e)}" | |
def get_system_prompts(self, language: str) -> Dict[str, str]: | |
"""์ญํ ๋ณ ์์คํ ํ๋กฌํํธ ์์ฑ""" | |
base_prompts = { | |
"Korean": { | |
"director": "๋น์ ์ ์ฐฝ์์ ์ด๊ณ ์ฒด๊ณ์ ์ธ ์์ค ๊ธฐํ ์ ๋ฌธ๊ฐ์ ๋๋ค. ํฅ๋ฏธ๋กญ๊ณ ์ผ๊ด์ฑ ์๋ ์คํ ๋ฆฌ๋ฅผ ์ค๊ณํ์ธ์.", | |
"critic": "๋น์ ์ ์ผ๊ด์ฑ ๊ฒํ ์ ๋ฌธ ๋นํ๊ฐ์ ๋๋ค. ์บ๋ฆญํฐ, ํ๋กฏ, ์ค์ ์ ์ผ๊ด์ฑ์ ์ฒ ์ ํ ์ ๊ฒํ๊ณ ๊ฐ์ ๋ฐฉ์์ ์ ์ํ์ธ์.", | |
"writer_base": "๋น์ ์ ์ ๋ฌธ ์์ค ์๊ฐ์ ๋๋ค. ์ฃผ์ด์ง ์ง์นจ์ ๋ฐ๋ผ ๋ชฐ์ ๊ฐ ์๊ณ ์ผ๊ด์ฑ ์๋ ๋ด์ฉ์ ์์ฑํ์ธ์." | |
}, | |
"English": { | |
"director": "You are a creative and systematic novel planning expert. Design engaging and consistent stories.", | |
"critic": "You are a consistency review specialist critic. Thoroughly check character, plot, and setting consistency and suggest improvements.", | |
"writer_base": "You are a professional novel writer. Write immersive and consistent content according to the given guidelines." | |
} | |
} | |
prompts = base_prompts[language].copy() | |
# ์๊ฐ๋ณ ํน์ ํ๋กฌํํธ | |
if language == "Korean": | |
prompts["writer1"] = "๋น์ ์ ์์ค์ ๋งค๋ ฅ์ ์ธ ์์์ ๋ด๋นํ๋ ์๊ฐ์ ๋๋ค. ๋ ์๋ฅผ ์ฌ๋ก์ก๋ ๋์ ๋ถ๋ฅผ ๋ง๋์ธ์." | |
prompts["writer10"] = "๋น์ ์ ์๋ฒฝํ ๊ฒฐ๋ง์ ๋ง๋๋ ์๊ฐ์ ๋๋ค. ๋ ์์๊ฒ ๊น์ ์ฌ์ด์ ๋จ๊ธฐ๋ ๋ง๋ฌด๋ฆฌ๋ฅผ ํ์ธ์." | |
else: | |
prompts["writer1"] = "You are a writer responsible for the captivating beginning. Create an opening that hooks readers." | |
prompts["writer10"] = "You are a writer who creates the perfect ending. Create a conclusion that leaves readers with deep resonance." | |
# writer2-9๋ ๊ธฐ๋ณธ ํ๋กฌํํธ ์ฌ์ฉ | |
for i in range(2, 10): | |
prompts[f"writer{i}"] = prompts["writer_base"] | |
return prompts | |
# --- ๋ฉ์ธ ํ๋ก์ธ์ค --- | |
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: | |
"""์์ค ์์ฑ ์คํธ๋ฆฌ๋ฐ ํ๋ก์ธ์ค (๊ฐํ๋ ๋ก์ง)""" | |
try: | |
resume_from_stage = 0 | |
if session_id: | |
self.current_session_id = session_id | |
session = NovelDatabase.get_session(session_id) | |
if session: | |
query = session['user_query'] | |
language = session['language'] | |
resume_from_stage = session['current_stage'] + 1 | |
logger.info(f"Resuming session {session_id} from stage {resume_from_stage}") | |
else: | |
self.current_session_id = NovelDatabase.create_session(query, language) | |
logger.info(f"Created new session: {self.current_session_id}") | |
stages = [] | |
if resume_from_stage > 0: | |
stages = [{ | |
"name": s['stage_name'], "status": s['status'], "content": s.get('content', ''), | |
"consistency_score": s.get('consistency_score', 0.0) | |
} for s in NovelDatabase.get_stages(self.current_session_id)] | |
for stage_idx in range(resume_from_stage, len(OPTIMIZED_STAGES)): | |
role, stage_name = OPTIMIZED_STAGES[stage_idx] | |
if stage_idx >= len(stages): | |
stages.append({"name": stage_name, "status": "active", "content": "", "consistency_score": 0.0}) | |
else: | |
stages[stage_idx]["status"] = "active" | |
yield "", stages, self.current_session_id | |
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) | |
stage_content = "" | |
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): | |
stage_content += chunk | |
stages[stage_idx]["content"] = stage_content | |
yield "", stages, self.current_session_id | |
consistency_score = 0.0 | |
if role.startswith("writer"): | |
writer_num = int(re.search(r'\d+', role).group()) | |
all_previous = self.get_all_content(stages, stage_idx) | |
errors = self.consistency_tracker.validate_consistency(writer_num, stage_content) | |
consistency_score = max(0, 10 - len(errors) * 2) | |
stages[stage_idx]["consistency_score"] = consistency_score | |
stages[stage_idx]["status"] = "complete" | |
NovelDatabase.save_stage( | |
self.current_session_id, stage_idx, stage_name, role, | |
stage_content, "complete", consistency_score | |
) | |
yield "", stages, self.current_session_id | |
final_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
final_report = self.generate_consistency_report(final_novel, language) | |
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) | |
yield f"โ ์์ค ์์ฑ! ์ด {len(final_novel.split())}๋จ์ด", stages, self.current_session_id | |
except Exception as e: | |
logger.error(f"์์ค ์์ฑ ํ๋ก์ธ์ค ์ค๋ฅ: {e}", exc_info=True) | |
yield f"โ ์ค๋ฅ ๋ฐ์: {e}", stages if 'stages' in locals() else [], self.current_session_id | |
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ํ๋กฌํํธ ์์ฑ (์์ฝ ๊ธฐ๋ฅ ๋ฐ ์ฃผ์ ์ ๋ฌ ๊ฐํ)""" | |
if stage_idx == 0: | |
return self.create_director_initial_prompt(query, language) | |
if stage_idx == 1: | |
return self.create_critic_director_prompt(stages[0]["content"], query, language) | |
if stage_idx == 2: | |
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language) | |
master_plan = stages[2]["content"] | |
if 3 <= stage_idx <= 12: # ์๊ฐ ์ด์ | |
writer_num = stage_idx - 2 | |
previous_content = self.get_all_content(stages, stage_idx) | |
summary = self.create_summary(previous_content, language) | |
return self.create_writer_prompt(writer_num, master_plan, summary, query, language) | |
if stage_idx == 13: # ๋นํ๊ฐ ์ค๊ฐ ๊ฒํ | |
all_content = self.get_all_content(stages, stage_idx) | |
return self.create_critic_consistency_prompt(all_content, query, language) | |
if 14 <= stage_idx <= 23: # ์๊ฐ ์์ | |
writer_num = stage_idx - 13 | |
initial_content = stages[2 + writer_num]["content"] | |
feedback = stages[13]["content"] | |
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language) | |
if stage_idx == 24: # ์ต์ข ๊ฒํ | |
complete_novel = self.get_all_writer_content(stages) | |
return self.create_critic_final_prompt(complete_novel, language) | |
return "" | |
def create_summary(self, content: str, language: str) -> str: | |
"""LLM์ ์ด์ฉํด ์ด์ ๋ด์ฉ์ ์์ฝ""" | |
if not content.strip(): | |
return "์ด์ ๋ด์ฉ์ด ์์ต๋๋ค." if language == "Korean" else "No previous content." | |
prompt_text = "๋ค์ ์์ค ๋ด์ฉ์ 3~5๊ฐ์ ํต์ฌ์ ์ธ ๋ฌธ์ฅ์ผ๋ก ์์ฝํด์ค. ๋ค์ ์๊ฐ๊ฐ ์ด์ผ๊ธฐ๋ฅผ ์ด์ด๊ฐ๋ ๋ฐ ํ์ํ ํต์ฌ ์ ๋ณด(๋ฑ์ฅ์ธ๋ฌผ์ ํ์ฌ ์ํฉ, ๊ฐ์ , ๋ง์ง๋ง ์ฌ๊ฑด)๋ฅผ ํฌํจํด์ผ ํด." | |
if language != "Korean": | |
prompt_text = "Summarize the following novel content in 3-5 key sentences. Include crucial information for the next writer to continue the story (characters' current situation, emotions, and the last major event)." | |
summary_prompt = f"{prompt_text}\n\n---\n{content[-2000:]}" | |
try: | |
summary = self.call_llm_sync([{"role": "user", "content": summary_prompt}], "critic", language) | |
return summary | |
except Exception as e: | |
logger.error(f"์์ฝ ์์ฑ ์คํจ: {e}") | |
return content[-1000:] | |
def get_all_content(self, stages: List[Dict], current_stage: int) -> str: | |
"""ํ์ฌ๊น์ง์ ๋ชจ๋ ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ""" | |
return "\n\n".join(s["content"] for i, s in enumerate(stages) if i < current_stage and s["content"]) | |
def get_all_writer_content(self, stages: List[Dict]) -> str: | |
"""๋ชจ๋ ์๊ฐ ์ต์ข ์์ ๋ณธ ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ""" | |
return "\n\n".join(s["content"] for i, s in enumerate(stages) if 14 <= i <= 23 and s["content"]) | |
def generate_consistency_report(self, complete_novel: str, language: str) -> str: | |
"""์ต์ข ๋ณด๊ณ ์ ์์ฑ (LLM ํธ์ถ)""" | |
prompt = self.create_critic_final_prompt(complete_novel, language) | |
try: | |
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language) | |
return report | |
except Exception as e: | |
logger.error(f"์ต์ข ๋ณด๊ณ ์ ์์ฑ ์คํจ: {e}") | |
return "๋ณด๊ณ ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์" | |
# --- ์ ํธ๋ฆฌํฐ ํจ์๋ค --- | |
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""๋ฉ์ธ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ ํจ์""" | |
if not query.strip(): | |
yield "", "", "โ ์ฃผ์ ๋ฅผ ์ ๋ ฅํด์ฃผ์ธ์.", session_id | |
return | |
system = NovelWritingSystem() | |
stages_markdown = "" | |
novel_content = "" | |
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): | |
stages_markdown = format_stages_display(stages) | |
# ์ต์ข ์์ค ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ | |
if stages and all(s.get("status") == "complete" for s in stages[-10:]): | |
novel_content = NovelDatabase.get_writer_content(current_session_id) | |
novel_content = format_novel_display(novel_content) | |
yield stages_markdown, novel_content, status or "๐ ์ฒ๋ฆฌ ์ค...", current_session_id | |
def get_active_sessions(language: str) -> List[str]: | |
"""ํ์ฑ ์ธ์ ๋ชฉ๋ก ๊ฐ์ ธ์ค๊ธฐ""" | |
sessions = NovelDatabase.get_active_sessions() | |
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']})" | |
for s in sessions] | |
def auto_recover_session(language: str) -> Tuple[Optional[str], str]: | |
"""๊ฐ์ฅ ์ต๊ทผ ํ์ฑ ์ธ์ ์๋ ๋ณต๊ตฌ""" | |
latest_session = NovelDatabase.get_latest_active_session() | |
if latest_session: | |
return latest_session['session_id'], f"์ธ์ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ" | |
return None, "๋ณต๊ตฌํ ์ธ์ ์ด ์์ต๋๋ค." | |
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""์ธ์ ์ฌ๊ฐ ํจ์""" | |
if not session_id: | |
yield "", "", "โ ์ธ์ ID๊ฐ ์์ต๋๋ค.", session_id | |
return | |
# ๋๋กญ๋ค์ด์์ ์ธ์ ID ์ถ์ถ | |
if "..." in session_id: | |
session_id = session_id.split("...")[0] | |
session = NovelDatabase.get_session(session_id) | |
if not session: | |
yield "", "", "โ ์ธ์ ์ ์ฐพ์ ์ ์์ต๋๋ค.", None | |
return | |
# process_query๋ฅผ ํตํด ์ฌ๊ฐ | |
yield from process_query(session['user_query'], session['language'], session_id) | |
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: | |
"""์์ค ๋ค์ด๋ก๋ ํ์ผ ์์ฑ""" | |
if not novel_text or not session_id: | |
return None | |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
filename = f"novel_{session_id[:8]}_{timestamp}" | |
try: | |
if format_type == "DOCX" and DOCX_AVAILABLE: | |
return export_to_docx(novel_text, filename, language) | |
else: | |
return export_to_txt(novel_text, filename) | |
except Exception as e: | |
logger.error(f"ํ์ผ ์์ฑ ์คํจ: {e}") | |
return None | |
def format_stages_display(stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ์งํ ์ํฉ ๋งํฌ๋ค์ด ํฌ๋งทํ """ | |
markdown = "## ๐ฌ ์งํ ์ํฉ\n\n" | |
for i, stage in enumerate(stages): | |
status_icon = "โ " if stage['status'] == 'complete' else "๐" if stage['status'] == 'active' else "โณ" | |
markdown += f"{status_icon} **{stage['name']}**" | |
if stage.get('consistency_score', 0) > 0: | |
markdown += f" (์ผ๊ด์ฑ: {stage['consistency_score']:.1f}/10)" | |
markdown += "\n" | |
if stage['content']: | |
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content'] | |
markdown += f"> {preview}\n\n" | |
return markdown | |
def format_novel_display(novel_text: str) -> str: | |
"""์์ค ๋ด์ฉ ๋งํฌ๋ค์ด ํฌ๋งทํ """ | |
if not novel_text: | |
return "์์ง ์์ฑ๋ ๋ด์ฉ์ด ์์ต๋๋ค." | |
# ํ์ด์ง ๊ตฌ๋ถ ์ถ๊ฐ | |
pages = novel_text.split('\n\n') | |
formatted = "# ๐ ์์ฑ๋ ์์ค\n\n" | |
for i, page in enumerate(pages): | |
if page.strip(): | |
formatted += f"### ํ์ด์ง {i+1}\n\n{page}\n\n---\n\n" | |
return formatted | |
def export_to_docx(content: str, filename: str, language: str) -> str: | |
"""DOCX ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
doc = Document() | |
# ์ ๋ชฉ ์ถ๊ฐ | |
title = doc.add_heading('AI ํ์ ์์ค', 0) | |
title.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
# ๋ฉํ๋ฐ์ดํฐ | |
doc.add_paragraph(f"์์ฑ์ผ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") | |
doc.add_paragraph(f"์ธ์ด: {language}") | |
doc.add_page_break() | |
# ๋ณธ๋ฌธ ์ถ๊ฐ | |
paragraphs = content.split('\n\n') | |
for para in paragraphs: | |
if para.strip(): | |
doc.add_paragraph(para.strip()) | |
# ํ์ผ ์ ์ฅ | |
filepath = f"{filename}.docx" | |
doc.save(filepath) | |
return filepath | |
def export_to_txt(content: str, filename: str) -> str: | |
"""TXT ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
filepath = f"{filename}.txt" | |
with open(filepath, 'w', encoding='utf-8') as f: | |
f.write(content) | |
return filepath | |
# CSS ์คํ์ผ | |
custom_css = """ | |
.gradio-container { | |
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); | |
min-height: 100vh; | |
} | |
.main-header { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 30px; | |
border-radius: 12px; | |
margin-bottom: 30px; | |
text-align: center; | |
color: white; | |
} | |
.input-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 20px; | |
border-radius: 12px; | |
margin-bottom: 20px; | |
} | |
.session-section { | |
background-color: rgba(255, 255, 255, 0.1); | |
backdrop-filter: blur(10px); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
color: white; | |
} | |
#stages-display { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 20px; | |
border-radius: 12px; | |
max-height: 600px; | |
overflow-y: auto; | |
} | |
#novel-output { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 30px; | |
border-radius: 12px; | |
max-height: 400px; | |
overflow-y: auto; | |
} | |
.download-section { | |
background-color: rgba(255, 255, 255, 0.9); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
} | |
""" | |
# Gradio ์ธํฐํ์ด์ค ์์ฑ | |
def create_interface(): | |
with gr.Blocks(css=custom_css, title="AI ํ์ ์์ค ์์ฑ ์์คํ ") as interface: | |
gr.HTML(""" | |
<div class="main-header"> | |
<h1 style="font-size: 2.5em; margin-bottom: 10px;"> | |
๐ AI ํ์ ์์ค ์์ฑ ์์คํ | |
</h1> | |
<h3 style="color: #ccc; margin-bottom: 20px;"> | |
์ผ๊ด์ฑ ์ค์ฌ์ ์ฐฝ์์ ์์ค ์์ฑ | |
</h3> | |
<p style="font-size: 1.1em; color: #ddd; max-width: 800px; margin: 0 auto;"> | |
์ฃผ์ ๋ฅผ ์ ๋ ฅํ๋ฉด AI ์์ด์ ํธ๋ค์ด ํ์ ํ์ฌ 30ํ์ด์ง ๋ถ๋์ ์์ฑ๋ ์์ค์ ์์ฑํฉ๋๋ค. | |
<br> | |
๊ฐ๋ ์ 1๋ช , ๋นํ๊ฐ 1๋ช , ์๊ฐ 10๋ช ์ด ํจ๊ป ์์ ํ๋ฉฐ ์ผ๊ด์ฑ์ ์ ์งํฉ๋๋ค. | |
</p> | |
</div> | |
""") | |
# ์ํ ๊ด๋ฆฌ | |
current_session_id = gr.State(None) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
with gr.Group(elem_classes=["input-section"]): | |
query_input = gr.Textbox( | |
label="์์ค ์ฃผ์ / Novel Theme", | |
placeholder="์์ค์ ์ฃผ์ ๋ ์ด๊ธฐ ์์ด๋์ด๋ฅผ ์ ๋ ฅํ์ธ์...\nEnter your novel theme or initial idea...", | |
lines=4 | |
) | |
language_select = gr.Radio( | |
choices=["English", "Korean"], | |
value="English", | |
label="์ธ์ด / Language" | |
) | |
with gr.Row(): | |
submit_btn = gr.Button("๐ ์์ค ์์ฑ ์์", variant="primary", scale=2) | |
clear_btn = gr.Button("๐๏ธ ์ด๊ธฐํ", scale=1) | |
status_text = gr.Textbox( | |
label="์ํ", | |
interactive=False, | |
value="๐ ์ค๋น ์๋ฃ" | |
) | |
# ์ธ์ ๊ด๋ฆฌ | |
with gr.Group(elem_classes=["session-section"]): | |
gr.Markdown("### ๐พ ์ด์ ์ธ์ ์ฌ๊ฐ") | |
session_dropdown = gr.Dropdown( | |
label="์ธ์ ์ ํ", | |
choices=[], | |
interactive=True | |
) | |
with gr.Row(): | |
refresh_btn = gr.Button("๐ ๋ชฉ๋ก ์๋ก๊ณ ์นจ", scale=1) | |
resume_btn = gr.Button("โถ๏ธ ์ ํ ์ฌ๊ฐ", variant="secondary", scale=1) | |
auto_recover_btn = gr.Button("โป๏ธ ์๋ ๋ณต๊ตฌ", scale=1) | |
with gr.Column(scale=2): | |
with gr.Tab("๐ ์์ฑ ๊ณผ์ "): | |
stages_display = gr.Markdown( | |
value="์์ฑ ๊ณผ์ ์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
elem_id="stages-display" | |
) | |
with gr.Tab("๐ ์์ฑ๋ ์์ค"): | |
novel_output = gr.Markdown( | |
value="์์ฑ๋ ์์ค์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
elem_id="novel-output" | |
) | |
with gr.Group(elem_classes=["download-section"]): | |
gr.Markdown("### ๐ฅ ์์ค ๋ค์ด๋ก๋") | |
with gr.Row(): | |
format_select = gr.Radio( | |
choices=["DOCX", "TXT"], | |
value="DOCX" if DOCX_AVAILABLE else "TXT", | |
label="ํ์" | |
) | |
download_btn = gr.Button("โฌ๏ธ ๋ค์ด๋ก๋", variant="secondary") | |
download_file = gr.File( | |
label="๋ค์ด๋ก๋๋ ํ์ผ", | |
visible=False | |
) | |
# ์จ๊ฒจ์ง ์ํ | |
novel_text_state = gr.State("") | |
# ์์ | |
with gr.Row(): | |
gr.Examples( | |
examples=[ | |
["๋ฏธ๋ ๋์์์ ๊ธฐ์ต์ ๊ฑฐ๋ํ๋ ์์ธ์ ์ด์ผ๊ธฐ"], | |
["์๊ฐ์ด ๊ฑฐ๊พธ๋ก ํ๋ฅด๋ ๋ง์์ ๋ฏธ์คํฐ๋ฆฌ"], | |
["A scientist discovers a portal to parallel universes"], | |
["In a world where dreams can be traded, a dream thief's story"], | |
["Two AI entities fall in love while preventing a cyber war"], | |
["์ฑ ์์ผ๋ก ๋ค์ด๊ฐ ์ ์๋ ๋ฅ๋ ฅ์ ๊ฐ์ง ์ฌ์์ ๋ชจํ"] | |
], | |
inputs=query_input, | |
label="๐ก ์์ ์ฃผ์ " | |
) | |
# ์ด๋ฒคํธ ํธ๋ค๋ฌ | |
def refresh_sessions(): | |
try: | |
sessions = get_active_sessions("English") | |
return gr.update(choices=sessions) | |
except Exception as e: | |
logger.error(f"Error refreshing sessions: {str(e)}") | |
return gr.update(choices=[]) | |
def handle_auto_recover(language): | |
session_id, message = auto_recover_session(language) | |
return session_id | |
# ์ด๋ฒคํธ ์ฐ๊ฒฐ | |
submit_btn.click( | |
fn=process_query, | |
inputs=[query_input, language_select, current_session_id], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
novel_output.change( | |
fn=lambda x: x, | |
inputs=[novel_output], | |
outputs=[novel_text_state] | |
) | |
resume_btn.click( | |
fn=lambda x: x, | |
inputs=[session_dropdown], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
auto_recover_btn.click( | |
fn=handle_auto_recover, | |
inputs=[language_select], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
refresh_btn.click( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
clear_btn.click( | |
fn=lambda: ("", "", "๐ ์ค๋น ์๋ฃ", "", None), | |
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] | |
) | |
def handle_download(format_type, language, session_id, novel_text): | |
if not session_id: | |
return gr.update(visible=False) | |
file_path = download_novel(novel_text, format_type, language, session_id) | |
if file_path: | |
return gr.update(value=file_path, visible=True) | |
else: | |
return gr.update(visible=False) | |
download_btn.click( | |
fn=handle_download, | |
inputs=[format_select, language_select, current_session_id, novel_text_state], | |
outputs=[download_file] | |
) | |
# ์์ ์ ์ธ์ ๋ก๋ | |
interface.load( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
return interface | |
# ๋ฉ์ธ ์คํ | |
if __name__ == "__main__": | |
logger.info("AI ํ์ ์์ค ์์ฑ ์์คํ ์์...") | |
logger.info("=" * 60) | |
# ํ๊ฒฝ ํ์ธ | |
logger.info(f"API ์๋ํฌ์ธํธ: {API_URL}") | |
if BRAVE_SEARCH_API_KEY: | |
logger.info("์น ๊ฒ์์ด ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("์น ๊ฒ์์ด ๋นํ์ฑํ๋์์ต๋๋ค.") | |
if DOCX_AVAILABLE: | |
logger.info("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ๋นํ์ฑํ๋์์ต๋๋ค.") | |
logger.info("=" * 60) | |
# ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์ค...") | |
NovelDatabase.init_db() | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์๋ฃ.") | |
# ์ธํฐํ์ด์ค ์์ฑ ๋ฐ ์คํ | |
interface = create_interface() | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=True | |
) |