Spaces:
Running
Running
import gradio as gr | |
import os | |
import json | |
import requests | |
from datetime import datetime | |
import time | |
from typing import List, Dict, Any, Generator, Tuple, Optional, Set | |
import logging | |
import re | |
import tempfile | |
from pathlib import Path | |
import sqlite3 | |
import hashlib | |
import threading | |
from contextlib import contextmanager | |
from dataclasses import dataclass, field, asdict | |
from collections import defaultdict | |
# --- ๋ก๊น ์ค์ --- | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
logger = logging.getLogger(__name__) | |
# --- Document export imports --- | |
try: | |
from docx import Document | |
from docx.shared import Inches, Pt, RGBColor | |
from docx.enum.text import WD_ALIGN_PARAGRAPH | |
from docx.enum.style import WD_STYLE_TYPE | |
from docx.oxml.ns import qn | |
from docx.oxml import OxmlElement | |
DOCX_AVAILABLE = True | |
except ImportError: | |
DOCX_AVAILABLE = False | |
logger.warning("python-docx not installed. DOCX export will be disabled.") | |
# --- ํ๊ฒฝ ๋ณ์ ๋ฐ ์์ --- | |
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") | |
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") | |
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
MODEL_ID = "dep89a2fld32mcm" | |
DB_PATH = "novel_sessions_v4.db" | |
# --- ํ๊ฒฝ ๋ณ์ ๊ฒ์ฆ --- | |
if not FRIENDLI_TOKEN: | |
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") | |
FRIENDLI_TOKEN = "dummy_token_for_testing" | |
if not BRAVE_SEARCH_API_KEY: | |
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") | |
# --- ์ ์ญ ๋ณ์ --- | |
db_lock = threading.Lock() | |
# ๋ฌธํ์ ๋จ๊ณ ๊ตฌ์ฑ (๋ด๋ฉด ์์ ๊ณผ ์ฌํ์ ํต์ฐฐ ์ค์ฌ) | |
LITERARY_STAGES = [ | |
("director", "๐ฌ ๊ฐ๋ ์: ์ฌํ์ ๋งฅ๋ฝ๊ณผ ์ธ๋ฌผ ์ฌ๋ฆฌ ๊ธฐํ"), | |
("critic", "๐ ๋นํ๊ฐ: ์ฌํ ๋นํ์ ๊น์ด์ ์์ง์ฑ ๊ฒํ "), | |
("director", "๐ฌ ๊ฐ๋ ์: ์์ ๋ ๋ง์คํฐํ๋"), | |
] + [ | |
(f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์ด์") | |
for i in range(1, 11) | |
] + [ | |
("critic", "๐ ๋นํ๊ฐ: ์ค๊ฐ ๊ฒํ (๋ด์ ์ผ๊ด์ฑ๊ณผ ์ฃผ์ ์ฌํ)"), | |
] + [ | |
(f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์์ ๋ณธ") | |
for i in range(1, 11) | |
] + [ | |
("critic", f"๐ ๋นํ๊ฐ: ์ต์ข ๊ฒํ ๋ฐ ๋ฌธํ์ ํ๊ฐ"), | |
] | |
# --- ๋ฐ์ดํฐ ํด๋์ค --- | |
class CharacterPsychology: | |
"""์ธ๋ฌผ์ ์ฌ๋ฆฌ์ ์ํ์ ๋ด๋ฉด""" | |
name: str | |
age: int | |
social_class: str # ๊ณ๊ธ์ ์์น | |
occupation: str | |
inner_conflict: str # ๋ด์ ๊ฐ๋ฑ | |
worldview: str # ์ธ๊ณ๊ด | |
desires: List[str] # ์๋ง๋ค | |
fears: List[str] # ๋๋ ค์๋ค | |
coping_mechanisms: List[str] # ๋ฐฉ์ด๊ธฐ์ | |
relationships: Dict[str, str] # ํ์ธ๊ณผ์ ๊ด๊ณ | |
class SymbolicElement: | |
"""์์ง์ ์์""" | |
symbol: str | |
meaning: str | |
appearances: List[int] # ๋ฑ์ฅํ๋ ์ฅ๋ค | |
evolution: str # ์์ง์ ์๋ฏธ ๋ณํ | |
class SocialContext: | |
"""์ฌํ์ ๋งฅ๋ฝ""" | |
economic_system: str | |
class_structure: str | |
power_dynamics: str | |
social_issues: List[str] | |
cultural_atmosphere: str | |
# --- ํต์ฌ ๋ก์ง ํด๋์ค --- | |
class LiteraryConsistencyTracker: | |
"""๋ฌธํ์ ์ผ๊ด์ฑ ์ถ์ ์์คํ """ | |
def __init__(self): | |
self.characters: Dict[str, CharacterPsychology] = {} | |
self.symbols: Dict[str, SymbolicElement] = {} | |
self.social_context: Optional[SocialContext] = None | |
self.themes: List[str] = [] | |
self.narrative_voice: str = "" # ์์ ์์ ๊ณผ ๋ฌธ์ฒด | |
self.tone: str = "" # ์ ์ฒด์ ์ธ ํค | |
def register_character(self, character: CharacterPsychology): | |
"""์ธ๋ฌผ ๋ฑ๋ก""" | |
self.characters[character.name] = character | |
logger.info(f"Character registered: {character.name}, Class: {character.social_class}") | |
def register_symbol(self, symbol: SymbolicElement): | |
"""์์ง ๋ฑ๋ก""" | |
self.symbols[symbol.symbol] = symbol | |
logger.info(f"Symbol registered: {symbol.symbol} = {symbol.meaning}") | |
def check_thematic_consistency(self, content: str, chapter: int) -> List[str]: | |
"""์ฃผ์ ์ ์ผ๊ด์ฑ ๊ฒ์ฌ""" | |
issues = [] | |
# ์ฌํ๋นํ์ ์์๊ฐ ์ ์ง๋๋์ง | |
if self.social_context and not any(issue.lower() in content.lower() | |
for issue in self.social_context.social_issues): | |
issues.append("์ฌํ์ ๋งฅ๋ฝ์ด ์ฝํ๋์์ต๋๋ค. ๊ณ๊ธ, ๋ถํ๋ฑ ๋ฑ์ ์ฃผ์ ๋ฅผ ์ ์งํ์ธ์.") | |
# ๋ด๋ฉด ์์ ์ด ์ถฉ๋ถํ์ง | |
introspective_keywords = ['์๊ฐํ๋ค', '๋๊ผ๋ค', '๊ธฐ์ต', '์์', '๋ง์', | |
'thought', 'felt', 'remembered', 'consciousness'] | |
if not any(keyword in content for keyword in introspective_keywords): | |
issues.append("๋ด๋ฉด ์์ ์ด ๋ถ์กฑํฉ๋๋ค. ์ธ๋ฌผ์ ์ฌ๋ฆฌ๋ฅผ ๋ ๊น์ด ํ๊ตฌํ์ธ์.") | |
return issues | |
class NovelDatabase: | |
"""๋ฐ์ดํฐ๋ฒ ์ด์ค ๊ด๋ฆฌ""" | |
def init_db(): | |
with sqlite3.connect(DB_PATH) as conn: | |
conn.execute("PRAGMA journal_mode=WAL") | |
cursor = conn.cursor() | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS sessions ( | |
session_id TEXT PRIMARY KEY, | |
user_query TEXT NOT NULL, | |
language TEXT NOT NULL, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
status TEXT DEFAULT 'active', | |
current_stage INTEGER DEFAULT 0, | |
final_novel TEXT, | |
literary_report TEXT, | |
social_context TEXT, | |
narrative_style TEXT | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS stages ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
stage_number INTEGER NOT NULL, | |
stage_name TEXT NOT NULL, | |
role TEXT NOT NULL, | |
content TEXT, | |
word_count INTEGER DEFAULT 0, | |
status TEXT DEFAULT 'pending', | |
literary_score REAL DEFAULT 0.0, | |
created_at TEXT DEFAULT (datetime('now')), | |
updated_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
UNIQUE(session_id, stage_number) | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS characters ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
name TEXT NOT NULL, | |
age INTEGER, | |
social_class TEXT, | |
occupation TEXT, | |
inner_conflict TEXT, | |
worldview TEXT, | |
created_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
UNIQUE(session_id, name) | |
) | |
''') | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS symbols ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
session_id TEXT NOT NULL, | |
symbol TEXT NOT NULL, | |
meaning TEXT, | |
created_at TEXT DEFAULT (datetime('now')), | |
FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
) | |
''') | |
conn.commit() | |
# ๊ธฐ์กด ๋ฉ์๋๋ค ์ ์ง | |
def get_db(): | |
with db_lock: | |
conn = sqlite3.connect(DB_PATH, timeout=30.0) | |
conn.row_factory = sqlite3.Row | |
try: | |
yield conn | |
finally: | |
conn.close() | |
def create_session(user_query: str, language: str) -> str: | |
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', | |
(session_id, user_query, language) | |
) | |
conn.commit() | |
return session_id | |
def save_stage(session_id: str, stage_number: int, stage_name: str, | |
role: str, content: str, status: str = 'complete', | |
literary_score: float = 0.0): | |
word_count = len(content.split()) if content else 0 | |
with NovelDatabase.get_db() as conn: | |
cursor = conn.cursor() | |
cursor.execute(''' | |
INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, literary_score) | |
VALUES (?, ?, ?, ?, ?, ?, ?, ?) | |
ON CONFLICT(session_id, stage_number) | |
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, literary_score=?, updated_at=datetime('now') | |
''', (session_id, stage_number, stage_name, role, content, word_count, status, literary_score, | |
content, word_count, status, stage_name, literary_score)) | |
cursor.execute( | |
"UPDATE sessions SET updated_at = datetime('now'), current_stage = ? WHERE session_id = ?", | |
(stage_number, session_id) | |
) | |
conn.commit() | |
def get_writer_content(session_id: str) -> str: | |
"""์๊ฐ ์ฝํ ์ธ ๊ฐ์ ธ์ค๊ธฐ""" | |
with NovelDatabase.get_db() as conn: | |
all_content = [] | |
for writer_num in range(1, 11): | |
row = conn.cursor().execute( | |
"SELECT content FROM stages WHERE session_id = ? AND role = ? AND stage_name LIKE '%์์ ๋ณธ%' ORDER BY stage_number DESC LIMIT 1", | |
(session_id, f'writer{writer_num}') | |
).fetchone() | |
if row and row['content']: | |
content = row['content'].strip() | |
all_content.append(content) | |
return '\n\n'.join(all_content) | |
def get_session(session_id: str) -> Optional[Dict]: | |
with NovelDatabase.get_db() as conn: | |
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone() | |
return dict(row) if row else None | |
def get_stages(session_id: str) -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall() | |
return [dict(row) for row in rows] | |
def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""): | |
with NovelDatabase.get_db() as conn: | |
conn.cursor().execute( | |
"UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), literary_report = ? WHERE session_id = ?", | |
(final_novel, literary_report, session_id) | |
) | |
conn.commit() | |
def get_active_sessions() -> List[Dict]: | |
with NovelDatabase.get_db() as conn: | |
rows = conn.cursor().execute( | |
"SELECT session_id, user_query, language, created_at, current_stage FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10" | |
).fetchall() | |
return [dict(row) for row in rows] | |
class WebSearchIntegration: | |
"""์น ๊ฒ์ ๊ธฐ๋ฅ""" | |
def __init__(self): | |
self.brave_api_key = BRAVE_SEARCH_API_KEY | |
self.search_url = "https://api.search.brave.com/res/v1/web/search" | |
self.enabled = bool(self.brave_api_key) | |
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: | |
if not self.enabled: | |
return [] | |
headers = { | |
"Accept": "application/json", | |
"X-Subscription-Token": self.brave_api_key | |
} | |
params = { | |
"q": query, | |
"count": count, | |
"search_lang": "ko" if language == "Korean" else "en", | |
"text_decorations": False, | |
"safesearch": "moderate" | |
} | |
try: | |
response = requests.get(self.search_url, headers=headers, params=params, timeout=10) | |
response.raise_for_status() | |
results = response.json().get("web", {}).get("results", []) | |
return results | |
except requests.exceptions.RequestException as e: | |
logger.error(f"์น ๊ฒ์ API ์ค๋ฅ: {e}") | |
return [] | |
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: | |
if not results: | |
return "" | |
extracted = [] | |
total_chars = 0 | |
for i, result in enumerate(results[:3], 1): | |
title = result.get("title", "") | |
description = result.get("description", "") | |
info = f"[{i}] {title}: {description}" | |
if total_chars + len(info) < max_chars: | |
extracted.append(info) | |
total_chars += len(info) | |
else: | |
break | |
return "\n".join(extracted) | |
class LiteraryNovelSystem: | |
"""๋ฌธํ์ ์์ค ์์ฑ ์์คํ """ | |
def __init__(self): | |
self.token = FRIENDLI_TOKEN | |
self.api_url = API_URL | |
self.model_id = MODEL_ID | |
self.consistency_tracker = LiteraryConsistencyTracker() | |
self.web_search = WebSearchIntegration() | |
self.current_session_id = None | |
NovelDatabase.init_db() | |
def create_headers(self): | |
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} | |
# --- ํ๋กฌํํธ ์์ฑ ํจ์๋ค (๋ฌธํ์ ๊น์ด ์ค์ฌ) --- | |
def create_director_initial_prompt(self, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์ด๊ธฐ ๊ธฐํ ํ๋กฌํํธ (์ฌํ๋นํ์ ๋ฆฌ์ผ๋ฆฌ์ฆ)""" | |
search_results_str = "" | |
if self.web_search.enabled: | |
# ์ฌํ์ ๋งฅ๋ฝ ๊ฒ์ | |
queries = [f"{user_query} ์ฌํ ๋ฌธ์ ", f"{user_query} ๊ณ๊ธ ๊ฐ๋ฑ", f"{user_query} social inequality"] | |
for q in queries[:1]: | |
results = self.web_search.search(q, count=2, language=language) | |
if results: | |
search_results_str += self.web_search.extract_relevant_info(results) + "\n" | |
lang_prompts = { | |
"Korean": f"""๋น์ ์ ํ๊ตญ ํ๋ ๋ฌธํ์ ์ ํต์ ์๋ ์์ค ๊ธฐํ์์ ๋๋ค. | |
ํ๊ฐ, ๊น์ ๋, ์ฒ๋ช ๊ด, ์ ์ธ๋ ๋ฑ ํ๋ ํ๊ตญ ์๊ฐ๋ค์ ๋ฌธํ์ ์ฑ์ทจ๋ฅผ ์ฐธ๊ณ ํ์ฌ, | |
์ฌํ๋นํ์ ๋ฆฌ์ผ๋ฆฌ์ฆ๊ณผ ๋ด๋ฉด ์ฌ๋ฆฌ ํ๊ตฌ๊ฐ ๊ฒฐํฉ๋ 30ํ์ด์ง ์คํธ์์ค์ ๊ธฐํํ์ธ์. | |
**์ฌ์ฉ์ ์ฃผ์ :** {user_query} | |
**์ฐธ๊ณ ์๋ฃ:** | |
{search_results_str if search_results_str else "N/A"} | |
**๊ธฐํ ์๊ตฌ์ฌํญ:** | |
1. **์ฃผ์ ์ ์ฌํ์ ๋งฅ๋ฝ** | |
- ํต์ฌ ์ฃผ์ : ํ๋ ํ๊ตญ ์ฌํ์ ๊ตฌ์กฐ์ ๋ฌธ์ (๊ณ๊ธ, ๋น๊ณค, ์์ธ, ์ ๋, ์ธ๋ ๋ฑ) | |
- ๋นํ์ ์๊ฐ: ๊ฐ์ธ์ ๋ฌธ์ ๋ฅผ ์ฌํ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ | |
- ํ์ค์ฑ: 2020๋ ๋ ํ๊ตญ์ ๊ตฌ์ฒด์ ํ์ค ๋ฐ์ | |
2. **์์ ๋ฐฉ์๊ณผ ๋ฌธ์ฒด** | |
- ์์ : 1์ธ์นญ ๋๋ ์ ํ์ 3์ธ์นญ (๋ด๋ฉด ์ ๊ทผ ๊ฐ๋ฅ) | |
- ๋ฌธ์ฒด: ๋ด๋ดํ์ง๋ง ๋ ์นด๋ก์ด ๊ด์ฐฐ, ์ผ์์ด์ ๋ฌธํ์ ํํ์ ๊ท ํ | |
- ๋ด๋ฉด ์์ : ์์์ ํ๋ฆ, ํ์, ์๊ธฐ ์ฑ์ฐฐ ์ ๊ทน ํ์ฉ | |
3. **์ธ๋ฌผ ์ค์ ** (2-4๋ช ์ ํต์ฌ ์ธ๋ฌผ) | |
| ์ด๋ฆ | ๋์ด | ๊ณ๊ธ์ ์์น | ์ง์ | ๋ด์ ๊ฐ๋ฑ | ์๋ง | ๋๋ ค์ | | |
- ๊ฐ ์ธ๋ฌผ์ ํน์ ๊ณ๊ธ๊ณผ ์ฌํ์ ์์น๋ฅผ ๋ํ | |
- ๊ฒ๋ชจ์ต๊ณผ ๋ด๋ฉด์ ๊ดด๋ฆฌ ํํ | |
- ๊ตฌ์กฐ์ ์ต์ ์์์์ ๊ฐ์ธ์ ์ ํ | |
4. **์์ง๊ณผ ์์ ** | |
- ํต์ฌ ์์ง: (์: ๊ฐ๊ตฌ๋ฆฌ=์์ธ๋ ์๋ค์ ๋ชฉ์๋ฆฌ, ์ฐ๋ชป=๊ณ๊ธ ๊ฒฝ๊ณ) | |
- ๋ฐ๋ณต๋๋ ์ด๋ฏธ์ง๋ ๋ชจํฐํ | |
- ์ผ์์ ์ฌ๋ฌผ์ ๋ด๊ธด ์ฌํ์ ์๋ฏธ | |
5. **ํ๋กฏ ๊ตฌ์กฐ** | |
- ๊ทน์ ์ฌ๊ฑด๋ณด๋ค ์ผ์ ์ ๋ฏธ๋ฌํ ๋ณํ ์ค์ฌ | |
- ์ธ๋ฌผ์ ์ธ์ ๋ณํ๊ฐ ๊ณง ์์ฌ์ ์งํ | |
- ์ด๋ฆฐ ๊ฒฐ๋ง: ํด๊ฒฐ๋ณด๋ค๋ ๋ฌธ์ ์ ๊ธฐ์ ์ฑ์ฐฐ | |
**์ ๋ ๊ธ์ง์ฌํญ:** | |
- ์ ์ ๊ตฌ๋ถ์ด ๋ช ํํ ํ๋ฉด์ ์ธ๋ฌผ | |
- ๊ธ์์ค๋ฌ์ด ์ฌ๊ฑด์ด๋ ๊ทน์ ๋ฐ์ | |
- ๊ตํ์ ์ด๊ฑฐ๋ ๊ณ๋ชฝ์ ์ธ ๋ฉ์์ง | |
- ์์ดํ ํฌ๋ง์ด๋ ํํด | |
๋ด๋ฉด๊ณผ ์ฌํ๋ฅผ ๋์์ ํฌ์ฐฉํ๋ ์ฌ์ธํ ๊ธฐํ์ ์์ฑํ์ธ์.""", | |
"English": f"""You are a literary director planning a 30-page novella in the tradition of contemporary social realism. | |
Drawing from authors like George Saunders, Zadie Smith, and Sally Rooney, create a work that combines psychological depth with social critique. | |
**User Theme:** {user_query} | |
**Reference Material:** | |
{search_results_str if search_results_str else "N/A"} | |
**Planning Requirements:** | |
1. **Theme and Social Context** | |
- Core theme: Structural problems in contemporary society (class, inequality, alienation) | |
- Critical perspective: Connect individual struggles to social systems | |
- Realism: Reflect specific contemporary realities | |
2. **Narrative Style** | |
- POV: First person or limited third person (with access to interiority) | |
- Style: Understated yet sharp observation, balance of vernacular and literary | |
- Interior narration: Stream of consciousness, memory, self-reflection | |
3. **Character Design** (2-4 main characters) | |
| Name | Age | Class Position | Occupation | Inner Conflict | Desires | Fears | | |
- Each character represents specific social position | |
- Gap between appearance and interior life | |
- Individual choices within structural constraints | |
4. **Symbols and Metaphors** | |
- Key symbols with social meaning | |
- Recurring images or motifs | |
- Everyday objects as social commentary | |
5. **Plot Structure** | |
- Focus on subtle changes over dramatic events | |
- Character perception shifts drive narrative | |
- Open ending: Questions over resolutions | |
**Absolutely Avoid:** | |
- Clear-cut heroes and villains | |
- Sudden dramatic events or twists | |
- Didactic or preachy messages | |
- Easy hope or reconciliation | |
Create a nuanced plan that captures both interior life and social reality.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: | |
"""๋นํ๊ฐ์ ๊ฐ๋ ์ ๊ธฐํ ๊ฒํ """ | |
lang_prompts = { | |
"Korean": f"""๋น์ ์ ๋ฌธํ ๋นํ๊ฐ์ ๋๋ค. ๋ค์ ๊ด์ ์์ ๊ธฐํ์ ๊ฒํ ํ์ธ์: | |
**์ ์ฃผ์ :** {user_query} | |
**๊ฐ๋ ์ ๊ธฐํ:** | |
{director_plan} | |
**๊ฒํ ๊ธฐ์ค:** | |
1. **์ฌํ๋นํ์ ๊น์ด** | |
- ๊ฐ์ธ๊ณผ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ์ด ์ค๋๋ ฅ ์๋๊ฐ? | |
- ํ์ค์ ๋ณต์ก์ฑ์ ๋จ์ํํ์ง ์์๋๊ฐ? | |
- ์ง๋ถํ ์ฌํ ๋นํ์ ๊ทธ์น์ง ์์๋๊ฐ? | |
2. **๋ฌธํ์ ์์ฑ๋** | |
- ๋ด๋ฉด ์์ ๊ณผ ์ธ์ ํ์ค์ ๊ท ํ | |
- ์์ง๊ณผ ์์ ์ ์ ์ ์ฑ | |
- ์ธ๋ฌผ์ ์ ์ฒด์ฑ๊ณผ ์ ๋น์ฑ | |
3. **ํ๋์ฑ๊ณผ ๋ณดํธ์ฑ** | |
- 2020๋ ๋ ํ๊ตญ์ ํน์์ฑ ๋ฐ์ | |
- ๋์์ ๋ณดํธ์ ์ธ๊ฐ ์กฐ๊ฑด ํ๊ตฌ | |
๊ตฌ์ฒด์ ๊ฐ์ ๋ฐฉํฅ์ ์ ์ํ์ธ์.""", | |
"English": f"""You are a literary critic. Review the plan from these perspectives: | |
**Original Theme:** {user_query} | |
**Director's Plan:** | |
{director_plan} | |
**Review Criteria:** | |
1. **Social Critical Depth** | |
- Is the connection between individual and structure convincing? | |
- Does it avoid oversimplifying complex realities? | |
- Does it go beyond clichรฉd social criticism? | |
2. **Literary Merit** | |
- Balance of interiority and external reality | |
- Effectiveness of symbols and metaphors | |
- Character complexity and credibility | |
3. **Contemporary Relevance** | |
- Reflects specific contemporary context | |
- While exploring universal human conditions | |
Provide specific improvements.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_writer_prompt(self, writer_number: int, director_plan: str, | |
previous_content: str, user_query: str, | |
language: str) -> str: | |
"""์๊ฐ ํ๋กฌํํธ (๋ด๋ฉด ์์ ์ค์ฌ)""" | |
lang_prompts = { | |
"Korean": f"""๋น์ ์ ์๊ฐ {writer_number}๋ฒ์ ๋๋ค. ํ๊ตญ ํ๋ ๋ฌธํ์ ์ ํต์ ๋ฐ๋ผ ์์ฑํ์ธ์. | |
**๋ง์คํฐํ๋:** | |
{director_plan} | |
**์ด์ ๋ด์ฉ:** | |
{previous_content[-2000:] if previous_content else "์์"} | |
**์์ฑ ์ง์นจ:** | |
1. **๋ถ๋**: 1,300-1,500 ๋จ์ด | |
2. **์์ ๋ฐฉ์** | |
- ์ธ๋ฌผ์ ๋ด๋ฉด ์์์ ๊น์ด ํ๊ตฌ | |
- ๊ด์ฐฐ๊ณผ ์ฑ์ฐฐ์ ๊ต์ฐจ | |
- ํ์ฌ์ ๊ณผ๊ฑฐ์ ์์ฐ์ค๋ฌ์ด ์ค๋ฒ๋ฉ | |
3. **๋ฌธ์ฒด** | |
- ๋ด๋ดํ๋ฉด์๋ ๋ ์นด๋ก์ด ์์ | |
- ์ผ์์ด ์์ ์ค๋ฉฐ๋ ์์ ํํ | |
- ์งง์ ๋ฌธ์ฅ๊ณผ ๊ธด ๋ฌธ์ฅ์ ๋ฆฌ๋ฌ๊ฐ | |
4. **๋ด์ฉ ์ ๊ฐ** | |
- ํฐ ์ฌ๊ฑด๋ณด๋ค ์์ ์๊ฐ์ ์๋ฏธ | |
- ๋ํ๋ ์ต์ํ, ์์ด๋ ํจ์ถ์ | |
- ์ธ๋ฌผ์ ์ธ์ ๋ณํ๊ฐ ๊ณง ํ๋กฏ | |
5. **์ฌํ์ ๋งฅ๋ฝ** | |
- ๊ฐ์ธ์ ์ผ์์ ์ค๋ฉฐ๋ ๊ตฌ์กฐ์ ์ต์ | |
- ์ง์ ์ ๋นํ๋ณด๋ค ๊ฐ์ ์ ๋๋ฌ๋ | |
- ๋ ์๊ฐ ์ค์ค๋ก ๊นจ๋ซ๊ฒ ํ๋ ์์ | |
**๋ฐ๋์ ํฌํจํ ์์:** | |
- ์ธ๋ฌผ์ ๋ด์ ๋ ๋ฐฑ์ด๋ ์์์ ํ๋ฆ | |
- ๊ตฌ์ฒด์ ์ธ ๊ฐ๊ฐ์ ๋ํ ์ผ | |
- ์ฌํ์ ๋งฅ๋ฝ์ ์์ํ๋ ์ผ์์ ์๊ฐ | |
- ์์ง์ด๋ ์์ ์ ์์ฐ์ค๋ฌ์ด ํ์ฉ | |
๊น์ด ์๋ ๋ด๋ฉด ํ๊ตฌ์ ์ฌ์ธํ ์ฌํ ๊ด์ฐฐ์ ๋ณด์ฌ์ฃผ์ธ์.""", | |
"English": f"""You are Writer #{writer_number}. Write in the contemporary literary tradition. | |
**Masterplan:** | |
{director_plan} | |
**Previous Content:** | |
{previous_content[-2000:] if previous_content else "Beginning"} | |
**Writing Guidelines:** | |
1. **Length**: 1,300-1,500 words | |
2. **Narrative Approach** | |
- Deep exploration of character consciousness | |
- Intersection of observation and reflection | |
- Natural overlap of present and past | |
3. **Style** | |
- Understated yet sharp perspective | |
- Poetic expression within everyday language | |
- Rhythm of short and long sentences | |
4. **Development** | |
- Meaning in small moments over big events | |
- Minimal, implicit dialogue | |
- Character perception shifts as plot | |
5. **Social Context** | |
- Structural oppression in daily life | |
- Indirect rather than direct critique | |
- Let readers discover meaning | |
**Must Include:** | |
- Interior monologue or stream of consciousness | |
- Concrete sensory details | |
- Everyday moments revealing social context | |
- Natural use of symbols and metaphors | |
Show deep interior exploration and subtle social observation.""" | |
} | |
return lang_prompts.get(language, lang_prompts["Korean"]) | |
def create_critic_consistency_prompt(self, all_content: str, user_query: str, | |
language: str) -> str: | |
"""๋นํ๊ฐ ์ค๊ฐ ๊ฒํ """ | |
return f"""๋ฌธํ ๋นํ๊ฐ๋ก์ ํ์ฌ๊น์ง์ ์ํ์ ๊ฒํ ํ์ธ์. | |
**์ ์ฃผ์ :** {user_query} | |
**์ํ ๋ด์ฉ (์ต๊ทผ ๋ถ๋ถ):** | |
{all_content[-3000:]} | |
**๊ฒํ ํญ๋ชฉ:** | |
1. **๋ด์ ์ผ๊ด์ฑ** | |
- ์ธ๋ฌผ์ ์์๊ณผ ํ๋์ ์ผ์น | |
- ์์ ์์ ์ ์ผ๊ด์ฑ | |
- ๋ฌธ์ฒด์ ํค์ ํต์ผ์ฑ | |
2. **์ฃผ์ ์ฌํ** | |
- ์ด๊ธฐ ์ค์ ํ ์ฌํ์ ๋ฌธ์ ์ ์ง์์ ํ๊ตฌ | |
- ์์ง๊ณผ ์์ ์ ๋ฐ์ | |
- ๊น์ด ์๋ ์ฑ์ฐฐ์ ๋์ | |
3. **๋ฌธํ์ ์์ฑ๋** | |
- ์ง๋ถํจ์ด๋ ์ํฌ์ฑ ํํผ | |
- ๋ ์ฐฝ์ ํํ๊ณผ ๊ด์ฐฐ | |
- ์ฌ์ด๊ณผ ํจ์ถ์ฑ | |
๊ฐ ์๊ฐ์๊ฒ ๊ตฌ์ฒด์ ๊ฐ์ ๋ฐฉํฅ์ ์ ์ํ์ธ์.""" | |
def create_writer_revision_prompt(self, writer_number: int, initial_content: str, | |
critic_feedback: str, language: str) -> str: | |
"""์๊ฐ ์์ ํ๋กฌํํธ""" | |
return f"""์๊ฐ {writer_number}๋ฒ, ๋นํ์ ๋ฐ์ํ์ฌ ์์ ํ์ธ์. | |
**์ด์:** | |
{initial_content} | |
**๋นํ:** | |
{critic_feedback} | |
**์์ ๋ฐฉํฅ:** | |
1. ๋ด๋ฉด ์์ ๊ฐํ | |
2. ์ฌํ์ ๋งฅ๋ฝ ์ฌํ | |
3. ๋ฌธํ์ ํํ ๊ฐ์ | |
4. ์ง๋ถํจ ์ ๊ฑฐ | |
์์ ๋ณธ๋ง ์ ์ํ์ธ์.""" | |
def create_critic_final_prompt(self, complete_novel: str, language: str) -> str: | |
"""์ต์ข ๋นํ""" | |
return f"""์์ฑ๋ ์์ค์ ๋ฌธํ์ ๊ฐ์น๋ฅผ ํ๊ฐํ์ธ์. | |
**์ํ (์ผ๋ถ):** | |
{complete_novel[-3000:]} | |
**ํ๊ฐ ๊ธฐ์ค:** | |
1. **์ฃผ์ ์์ (30์ )** | |
- ์ฌํ ๋นํ์ ์๋ฆฌํจ | |
- ์ธ๊ฐ ์กฐ๊ฑด์ ๋ํ ํต์ฐฐ | |
- ํ๋์ฑ๊ณผ ๋ณดํธ์ฑ์ ์กฐํ | |
2. **์ธ๋ฌผ๊ณผ ์ฌ๋ฆฌ (25์ )** | |
- ๋ด๋ฉด ๋ฌ์ฌ์ ๊น์ด | |
- ์ธ๋ฌผ์ ์ ๋น์ฑ | |
- ๋ณต์ก์ฑ๊ณผ ๋ชจ์์ ํฌ์ฐฉ | |
3. **๋ฌธ์ฒด์ ๊ธฐ๋ฒ (25์ )** | |
- ๋ฌธ์ฅ์ ์์ฑ๋ | |
- ์์ง๊ณผ ์์ ์ ํจ๊ณผ | |
- ๋ ์ฐฝ์ฑ๊ณผ ์ฐธ์ ํจ | |
4. **๊ตฌ์กฐ์ ์๊ฒฐ์ฑ (20์ )** | |
- ์ ์ฒด ๊ตฌ์ฑ์ ๊ท ํ | |
- ์ฌ์ด๊ณผ ๊ฐ๋ฐฉ์ฑ | |
- ๋ ์์๊ฒ ๋์ง๋ ์ง๋ฌธ | |
**์ด์ : /100์ ** | |
ํ๊ตญ ํ๋ ๋ฌธํ์ ๋งฅ๋ฝ์์ ์ด ์ํ์ ์์๋ฅผ ๋ ผํ์ธ์.""" | |
# --- LLM ํธ์ถ ํจ์๋ค --- | |
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: | |
full_content = "" | |
for chunk in self.call_llm_streaming(messages, role, language): | |
full_content += chunk | |
if full_content.startswith("โ"): | |
raise Exception(f"LLM Call Failed: {full_content}") | |
return full_content | |
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]: | |
try: | |
system_prompts = self.get_system_prompts(language) | |
full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages] | |
payload = { | |
"model": self.model_id, | |
"messages": full_messages, | |
"max_tokens": 10000, | |
"temperature": 0.8, # ๋ ์ฐฝ์์ ์ธ ์ถ๋ ฅ | |
"top_p": 0.95, | |
"presence_penalty": 0.5, # ๋ฐ๋ณต ๋ฐฉ์ง | |
"frequency_penalty": 0.3, | |
"stream": True | |
} | |
response = requests.post( | |
self.api_url, | |
headers=self.create_headers(), | |
json=payload, | |
stream=True, | |
timeout=180 | |
) | |
if response.status_code != 200: | |
yield f"โ API ์ค๋ฅ (์ํ ์ฝ๋: {response.status_code})" | |
return | |
buffer = "" | |
for line in response.iter_lines(): | |
if not line: | |
continue | |
try: | |
line_str = line.decode('utf-8').strip() | |
if not line_str.startswith("data: "): | |
continue | |
data_str = line_str[6:] | |
if data_str == "[DONE]": | |
break | |
data = json.loads(data_str) | |
choices = data.get("choices", []) | |
if choices and choices[0].get("delta", {}).get("content"): | |
content = choices[0]["delta"]["content"] | |
buffer += content | |
if len(buffer) >= 50 or '\n' in buffer: | |
yield buffer | |
buffer = "" | |
time.sleep(0.01) | |
except Exception as e: | |
logger.error(f"์ฒญํฌ ์ฒ๋ฆฌ ์ค๋ฅ: {str(e)}") | |
continue | |
if buffer: | |
yield buffer | |
except Exception as e: | |
logger.error(f"์คํธ๋ฆฌ๋ฐ ์ค๋ฅ: {type(e).__name__}: {str(e)}") | |
yield f"โ ์ค๋ฅ ๋ฐ์: {str(e)}" | |
def get_system_prompts(self, language: str) -> Dict[str, str]: | |
"""์ญํ ๋ณ ์์คํ ํ๋กฌํํธ""" | |
base_prompts = { | |
"Korean": { | |
"director": """๋น์ ์ ํ๊ฐ, ๊น์ ๋, ์ฒ๋ช ๊ด ๋ฑ ํ๊ตญ ํ๋ ์๊ฐ๋ค์ ์ํ์ ๊น์ด ์ดํดํ๋ ๋ฌธํ ๊ธฐํ์์ ๋๋ค. | |
์ฌํ๋นํ์ ๋ฆฌ์ผ๋ฆฌ์ฆ๊ณผ ์ฌ๋ฆฌ์ ๊น์ด๋ฅผ ๊ฒฐํฉํ ์ํ์ ๊ธฐํํ์ธ์. | |
๊ฐ์ธ์ ๋ด๋ฉด๊ณผ ์ฌํ ๊ตฌ์กฐ์ ์ํธ์์ฉ์ ์ฌ์ธํ๊ฒ ํฌ์ฐฉํ๋ ๊ฒ์ด ํต์ฌ์ ๋๋ค.""", | |
"critic": """๋น์ ์ ํ๋ ํ๊ตญ ๋ฌธํ์ ๊น์ด ์ดํดํ๋ ๋นํ๊ฐ์ ๋๋ค. | |
์ํ์ด ์ง๋ถํ ์ฌํ ๋นํ์ด๋ ํ๋ฉด์ ์ฌ๋ฆฌ ๋ฌ์ฌ์ ๊ทธ์น์ง ์๊ณ , | |
์ง์ ํ ๋ฌธํ์ ๊ฐ์น๋ฅผ ์ง๋๋์ง ์๊ฒฉํ๊ฒ ํ๊ฐํ์ธ์.""", | |
"writer_base": """๋น์ ์ ํ๋ ํ๊ตญ ๋ฌธํ์ ์ ํต์ ์๋ ์๊ฐ์ ๋๋ค. | |
๋ด๋ฉด ์์์ ํ๋ฆ๊ณผ ๋ ์นด๋ก์ด ์ฌํ ๊ด์ฐฐ์ ๊ฒฐํฉํ์ฌ, | |
๋ ์์๊ฒ ๊น์ ์ฌ์ด์ ๋จ๊ธฐ๋ ๋ฌธ์ฅ์ ์ฐ์ธ์. | |
'๋ณด์ฌ์ฃผ๊ธฐ'๋ณด๋ค '์์ํ๊ธฐ'๋ฅผ, ์ฌ๊ฑด๋ณด๋ค ์ธ์์ ์ค์ํ์ธ์.""" | |
}, | |
"English": { | |
"director": """You are a literary planner deeply versed in contemporary social realist fiction. | |
Plan works that combine social critique with psychological depth. | |
The key is capturing the subtle interplay between individual consciousness and social structures.""", | |
"critic": """You are a critic well-versed in contemporary literary fiction. | |
Evaluate whether works go beyond superficial social commentary or psychology | |
to achieve genuine literary value.""", | |
"writer_base": """You are a writer in the contemporary literary tradition. | |
Combine stream of consciousness with sharp social observation | |
to create resonant prose. Prioritize consciousness over showing, | |
perception over events.""" | |
} | |
} | |
prompts = base_prompts.get(language, base_prompts["Korean"]).copy() | |
# ํน์ ์๊ฐ ํ๋กฌํํธ | |
if language == "Korean": | |
prompts["writer1"] = prompts["writer_base"] + "\nํนํ ๋์ ๋ถ์์ ๋ ์๋ฅผ ์ํ์ ๋ถ์๊ธฐ๋ก ์์ํ ๋์ด๋ค์ด์ธ์." | |
prompts["writer5"] = prompts["writer_base"] + "\n์ค๋ฐ๋ถ์ ์ฌ๋ฆฌ์ ๋ฐ๋๋ฅผ ๋์ด๊ณ ๊ฐ๋ฑ์ ๋ด๋ฉดํํ์ธ์." | |
prompts["writer10"] = prompts["writer_base"] + "\n์ด๋ฆฐ ๊ฒฐ๋ง๋ก ๋ ์์๊ฒ ์ง๋ฌธ์ ๋์ง์ธ์." | |
for i in range(2, 10): | |
if f"writer{i}" not in prompts: | |
prompts[f"writer{i}"] = prompts["writer_base"] | |
return prompts | |
# --- ๋ฉ์ธ ํ๋ก์ธ์ค --- | |
def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: | |
"""์์ค ์์ฑ ํ๋ก์ธ์ค""" | |
try: | |
resume_from_stage = 0 | |
if session_id: | |
self.current_session_id = session_id | |
session = NovelDatabase.get_session(session_id) | |
if session: | |
query = session['user_query'] | |
language = session['language'] | |
resume_from_stage = session['current_stage'] + 1 | |
else: | |
self.current_session_id = NovelDatabase.create_session(query, language) | |
logger.info(f"Created new session: {self.current_session_id}") | |
stages = [] | |
if resume_from_stage > 0: | |
stages = [{ | |
"name": s['stage_name'], | |
"status": s['status'], | |
"content": s.get('content', ''), | |
"literary_score": s.get('literary_score', 0.0) | |
} for s in NovelDatabase.get_stages(self.current_session_id)] | |
for stage_idx in range(resume_from_stage, len(LITERARY_STAGES)): | |
role, stage_name = LITERARY_STAGES[stage_idx] | |
if stage_idx >= len(stages): | |
stages.append({ | |
"name": stage_name, | |
"status": "active", | |
"content": "", | |
"literary_score": 0.0 | |
}) | |
else: | |
stages[stage_idx]["status"] = "active" | |
yield "", stages, self.current_session_id | |
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) | |
stage_content = "" | |
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): | |
stage_content += chunk | |
stages[stage_idx]["content"] = stage_content | |
yield "", stages, self.current_session_id | |
# ๋ฌธํ์ ์ ์ ํ๊ฐ | |
literary_score = self.evaluate_literary_quality(stage_content, role) | |
stages[stage_idx]["literary_score"] = literary_score | |
stages[stage_idx]["status"] = "complete" | |
NovelDatabase.save_stage( | |
self.current_session_id, stage_idx, stage_name, role, | |
stage_content, "complete", literary_score | |
) | |
yield "", stages, self.current_session_id | |
# ์ต์ข ์์ค ์ ๋ฆฌ | |
final_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
final_report = self.generate_literary_report(final_novel, language) | |
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) | |
yield f"โ ์์ค ์์ฑ! ์ด {len(final_novel.split())}๋จ์ด", stages, self.current_session_id | |
except Exception as e: | |
logger.error(f"์์ค ์์ฑ ํ๋ก์ธ์ค ์ค๋ฅ: {e}", exc_info=True) | |
yield f"โ ์ค๋ฅ ๋ฐ์: {e}", stages if 'stages' in locals() else [], self.current_session_id | |
def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ํ๋กฌํํธ ์์ฑ""" | |
if stage_idx == 0: | |
return self.create_director_initial_prompt(query, language) | |
if stage_idx == 1: | |
return self.create_critic_director_prompt(stages[0]["content"], query, language) | |
if stage_idx == 2: | |
return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language) | |
master_plan = stages[2]["content"] | |
if 3 <= stage_idx <= 12: # ์๊ฐ ์ด์ | |
writer_num = stage_idx - 2 | |
previous_content = self.get_all_content(stages, stage_idx) | |
return self.create_writer_prompt(writer_num, master_plan, previous_content, query, language) | |
if stage_idx == 13: # ๋นํ๊ฐ ์ค๊ฐ ๊ฒํ | |
all_content = self.get_all_content(stages, stage_idx) | |
return self.create_critic_consistency_prompt(all_content, query, language) | |
if 14 <= stage_idx <= 23: # ์๊ฐ ์์ | |
writer_num = stage_idx - 13 | |
initial_content = stages[2 + writer_num]["content"] | |
feedback = stages[13]["content"] | |
return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language) | |
if stage_idx == 24: # ์ต์ข ๊ฒํ | |
complete_novel = self.get_all_writer_content(stages) | |
return self.create_critic_final_prompt(complete_novel, language) | |
return "" | |
def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str: | |
"""๊ฐ๋ ์ ์์ ํ๋กฌํํธ""" | |
return f"""๋นํ์ ๋ฐ์ํ์ฌ ๊ธฐํ์ ์์ ํ์ธ์. | |
**์ ์ฃผ์ :** {user_query} | |
**์ด๊ธฐ ๊ธฐํ:** | |
{initial_plan} | |
**๋นํ:** | |
{critic_feedback} | |
**์์ ๋ฐฉํฅ:** | |
1. ์ฌํ๋นํ์ ๊น์ด ๊ฐํ | |
2. ์ธ๋ฌผ์ ๋ด๋ฉด ๋ณต์ก์ฑ ์ฆ๋ | |
3. ์์ง๊ณผ ์์ ์ ๊ตํ | |
4. ์ง๋ถํจ ์ ๊ฑฐ | |
์์ ๋ ๋ง์คํฐํ๋์ ์ ์ํ์ธ์.""" | |
def get_all_content(self, stages: List[Dict], current_stage: int) -> str: | |
"""ํ์ฌ๊น์ง์ ๋ชจ๋ ๋ด์ฉ""" | |
contents = [] | |
for i, s in enumerate(stages): | |
if i < current_stage and s["content"] and "writer" in s.get("name", ""): | |
contents.append(s["content"]) | |
return "\n\n".join(contents) | |
def get_all_writer_content(self, stages: List[Dict]) -> str: | |
"""๋ชจ๋ ์๊ฐ ์ต์ข ๋ณธ""" | |
contents = [] | |
for i, s in enumerate(stages): | |
if 14 <= i <= 23 and s["content"]: | |
contents.append(s["content"]) | |
return "\n\n".join(contents) | |
def evaluate_literary_quality(self, content: str, role: str) -> float: | |
"""๋ฌธํ์ ํ์ง ํ๊ฐ""" | |
if not content or not role.startswith("writer"): | |
return 0.0 | |
score = 5.0 # ๊ธฐ๋ณธ ์ ์ | |
# ๋ด๋ฉด ์์ ํ๊ฐ | |
introspective_patterns = ['์๊ฐํ๋ค', '๋๊ผ๋ค', '๊ธฐ์ต', '์์', '๋ ์ฌ๋ ธ๋ค', | |
'๋ง์', '๋จธ๋ฆฟ์', '๊ฐ์ด', 'thought', 'felt', 'remembered'] | |
introspection_count = sum(1 for pattern in introspective_patterns if pattern in content) | |
score += min(2.0, introspection_count * 0.2) | |
# ๊ฐ๊ฐ์ ๋ํ ์ผ | |
sensory_patterns = ['๋์', '์๋ฆฌ', '๋น', '๊ทธ๋ฆผ์', '์ด๊ฐ', '๋ง', '์จ๋', '์', | |
'smell', 'sound', 'light', 'shadow', 'touch', 'taste'] | |
sensory_count = sum(1 for pattern in sensory_patterns if pattern in content) | |
score += min(1.5, sensory_count * 0.15) | |
# ์ฌํ์ ๋งฅ๋ฝ | |
social_patterns = ['์๋', '๊ณ๊ธ', '๋น๊ณค', '๊ฒฉ์ฐจ', '์ฐจ๋ณ', '์์ธ', '๋ถํ๋ฑ', | |
'rent', 'class', 'poverty', 'gap', 'discrimination'] | |
social_count = sum(1 for pattern in social_patterns if pattern in content) | |
score += min(1.5, social_count * 0.3) | |
return min(10.0, score) | |
def generate_literary_report(self, complete_novel: str, language: str) -> str: | |
"""์ต์ข ๋ฌธํ์ ํ๊ฐ ๋ณด๊ณ ์""" | |
prompt = self.create_critic_final_prompt(complete_novel, language) | |
try: | |
report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language) | |
return report | |
except Exception as e: | |
logger.error(f"์ต์ข ๋ณด๊ณ ์ ์์ฑ ์คํจ: {e}") | |
return "๋ณด๊ณ ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์" | |
# --- ์ ํธ๋ฆฌํฐ ํจ์๋ค --- | |
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""๋ฉ์ธ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ ํจ์""" | |
if not query.strip(): | |
yield "", "", "โ ์ฃผ์ ๋ฅผ ์ ๋ ฅํด์ฃผ์ธ์.", session_id | |
return | |
system = LiteraryNovelSystem() | |
stages_markdown = "" | |
novel_content = "" | |
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): | |
stages_markdown = format_stages_display(stages) | |
# ์ต์ข ์์ค ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ | |
if stages and all(s.get("status") == "complete" for s in stages[-10:]): | |
novel_content = NovelDatabase.get_writer_content(current_session_id) | |
novel_content = format_novel_display(novel_content) | |
yield stages_markdown, novel_content, status or "๐ ์ฒ๋ฆฌ ์ค...", current_session_id | |
def get_active_sessions(language: str) -> List[str]: | |
"""ํ์ฑ ์ธ์ ๋ชฉ๋ก""" | |
sessions = NovelDatabase.get_active_sessions() | |
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']})" | |
for s in sessions] | |
def auto_recover_session(language: str) -> Tuple[Optional[str], str]: | |
"""์ต๊ทผ ์ธ์ ์๋ ๋ณต๊ตฌ""" | |
latest_session = NovelDatabase.get_latest_active_session() | |
if latest_session: | |
return latest_session['session_id'], f"์ธ์ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ" | |
return None, "๋ณต๊ตฌํ ์ธ์ ์ด ์์ต๋๋ค." | |
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: | |
"""์ธ์ ์ฌ๊ฐ""" | |
if not session_id: | |
yield "", "", "โ ์ธ์ ID๊ฐ ์์ต๋๋ค.", session_id | |
return | |
if "..." in session_id: | |
session_id = session_id.split("...")[0] | |
session = NovelDatabase.get_session(session_id) | |
if not session: | |
yield "", "", "โ ์ธ์ ์ ์ฐพ์ ์ ์์ต๋๋ค.", None | |
return | |
yield from process_query(session['user_query'], session['language'], session_id) | |
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: | |
"""์์ค ๋ค์ด๋ก๋ ํ์ผ ์์ฑ""" | |
if not novel_text or not session_id: | |
return None | |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
filename = f"novel_{session_id[:8]}_{timestamp}" | |
try: | |
if format_type == "DOCX" and DOCX_AVAILABLE: | |
return export_to_docx(novel_text, filename, language, session_id) | |
else: | |
return export_to_txt(novel_text, filename) | |
except Exception as e: | |
logger.error(f"ํ์ผ ์์ฑ ์คํจ: {e}") | |
return None | |
def format_stages_display(stages: List[Dict]) -> str: | |
"""๋จ๊ณ๋ณ ์งํ ์ํฉ ํ์""" | |
markdown = "## ๐ฌ ์งํ ์ํฉ\n\n" | |
for i, stage in enumerate(stages): | |
status_icon = "โ " if stage['status'] == 'complete' else "๐" if stage['status'] == 'active' else "โณ" | |
markdown += f"{status_icon} **{stage['name']}**" | |
if stage.get('literary_score', 0) > 0: | |
markdown += f" (๋ฌธํ์ฑ: {stage['literary_score']:.1f}/10)" | |
markdown += "\n" | |
if stage['content']: | |
preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content'] | |
markdown += f"> {preview}\n\n" | |
return markdown | |
def format_novel_display(novel_text: str) -> str: | |
"""์์ค ๋ด์ฉ ํ์""" | |
if not novel_text: | |
return "์์ง ์์ฑ๋ ๋ด์ฉ์ด ์์ต๋๋ค." | |
formatted = "# ๐ ์์ฑ๋ ์์ค\n\n" | |
formatted += novel_text | |
return formatted | |
def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str: | |
"""DOCX ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ (๋ฌธํ ์์ค ํ์)""" | |
doc = Document() | |
# ์ ๊ตญํ ํฌ๊ธฐ (152mm x 225mm) | |
section = doc.sections[0] | |
section.page_height = Inches(8.86) # 225mm | |
section.page_width = Inches(5.98) # 152mm | |
# ์ฌ๋ฐฑ ์ค์ | |
section.top_margin = Inches(0.79) # 20mm | |
section.bottom_margin = Inches(0.79) | |
section.left_margin = Inches(0.79) | |
section.right_margin = Inches(0.79) | |
# ์ธ์ ์ ๋ณด | |
session = NovelDatabase.get_session(session_id) | |
# ์ ๋ชฉ ํ์ด์ง | |
title_para = doc.add_paragraph() | |
title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
# ๋น ์ค ์ถ๊ฐ | |
for _ in range(8): | |
doc.add_paragraph() | |
if session: | |
title_run = title_para.add_run(session["user_query"]) | |
title_run.font.size = Pt(16) | |
title_run.font.name = '๋ฐํ' | |
# ํ์ด์ง ๋๋๊ธฐ | |
doc.add_page_break() | |
# ๋ณธ๋ฌธ ์คํ์ผ ์ค์ | |
style = doc.styles['Normal'] | |
style.font.name = '๋ฐํ' | |
style.font.size = Pt(10.5) | |
style.paragraph_format.line_spacing = 1.8 | |
style.paragraph_format.alignment = WD_ALIGN_PARAGRAPH.JUSTIFY | |
style.paragraph_format.first_line_indent = Inches(0.35) | |
style.paragraph_format.space_after = Pt(3) | |
# ๋ณธ๋ฌธ ์ถ๊ฐ | |
paragraphs = content.split('\n\n') | |
for para_text in paragraphs: | |
if para_text.strip(): | |
para = doc.add_paragraph(para_text.strip()) | |
# ํ์ผ ์ ์ฅ | |
filepath = f"{filename}.docx" | |
doc.save(filepath) | |
return filepath | |
def export_to_txt(content: str, filename: str) -> str: | |
"""TXT ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
filepath = f"{filename}.txt" | |
with open(filepath, 'w', encoding='utf-8') as f: | |
f.write(content) | |
return filepath | |
# CSS ์คํ์ผ (๋ฌธํ์ ๋ถ์๊ธฐ) | |
custom_css = """ | |
.gradio-container { | |
background: linear-gradient(135deg, #1a1a1a 0%, #2d2d2d 50%, #1a1a1a 100%); | |
min-height: 100vh; | |
} | |
.main-header { | |
background-color: rgba(255, 255, 255, 0.03); | |
backdrop-filter: blur(10px); | |
padding: 30px; | |
border-radius: 12px; | |
margin-bottom: 30px; | |
text-align: center; | |
color: white; | |
border: 1px solid rgba(255, 255, 255, 0.1); | |
} | |
.literary-note { | |
background-color: rgba(255, 255, 255, 0.05); | |
border-left: 3px solid #888; | |
padding: 15px; | |
margin: 20px 0; | |
border-radius: 8px; | |
color: #ccc; | |
font-style: italic; | |
font-family: 'Georgia', serif; | |
} | |
.input-section { | |
background-color: rgba(255, 255, 255, 0.05); | |
backdrop-filter: blur(10px); | |
padding: 20px; | |
border-radius: 12px; | |
margin-bottom: 20px; | |
border: 1px solid rgba(255, 255, 255, 0.1); | |
} | |
.session-section { | |
background-color: rgba(255, 255, 255, 0.05); | |
backdrop-filter: blur(10px); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
color: white; | |
border: 1px solid rgba(255, 255, 255, 0.1); | |
} | |
#stages-display { | |
background-color: rgba(255, 255, 255, 0.95); | |
padding: 20px; | |
border-radius: 12px; | |
max-height: 600px; | |
overflow-y: auto; | |
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
} | |
#novel-output { | |
background-color: rgba(255, 255, 255, 0.98); | |
padding: 40px; | |
border-radius: 12px; | |
max-height: 700px; | |
overflow-y: auto; | |
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
font-family: '๋ฐํ', 'Batang', 'Georgia', serif; | |
line-height: 2; | |
color: #333; | |
} | |
.download-section { | |
background-color: rgba(255, 255, 255, 0.9); | |
padding: 15px; | |
border-radius: 8px; | |
margin-top: 20px; | |
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); | |
} | |
/* ๋ฌธํ์ ์คํ์ผ */ | |
#novel-output p { | |
text-indent: 2em; | |
margin-bottom: 1em; | |
text-align: justify; | |
} | |
#novel-output h1 { | |
color: #1a1a1a; | |
font-weight: normal; | |
text-align: center; | |
margin: 2em 0; | |
font-size: 1.8em; | |
} | |
/* ์ธ์ฉ๋ฌธ์ด๋ ๋ด์ ๋ ๋ฐฑ ์คํ์ผ */ | |
#novel-output blockquote { | |
margin: 1.5em 2em; | |
font-style: italic; | |
color: #555; | |
border-left: 3px solid #ccc; | |
padding-left: 1em; | |
} | |
""" | |
# Gradio ์ธํฐํ์ด์ค ์์ฑ | |
def create_interface(): | |
with gr.Blocks(css=custom_css, title="AI ๋ฌธํ ์์ค ์์ฑ ์์คํ ") as interface: | |
gr.HTML(""" | |
<div class="main-header"> | |
<h1 style="font-size: 2.5em; margin-bottom: 10px; font-weight: normal;"> | |
๐ AI ๋ฌธํ ์์ค ์์ฑ ์์คํ | |
</h1> | |
<h3 style="color: #ccc; margin-bottom: 20px; font-weight: normal;"> | |
ํ๋ ํ๊ตญ ๋ฌธํ์ ์ ํต์ ์๋ ์์ค ์ฐฝ์ | |
</h3> | |
<p style="font-size: 1.1em; color: #ddd; max-width: 800px; margin: 0 auto;"> | |
์ฌํ๋นํ์ ๋ฆฌ์ผ๋ฆฌ์ฆ๊ณผ ์ฌ๋ฆฌ์ ๊น์ด๋ฅผ ๊ฒฐํฉํ 30ํ์ด์ง ์คํธ์์ค์ ์์ฑํฉ๋๋ค. | |
<br> | |
๊ฐ์ธ์ ๋ด๋ฉด๊ณผ ์ฌํ ๊ตฌ์กฐ์ ์ํธ์์ฉ์ ์ฌ์ธํ๊ฒ ํฌ์ฐฉํฉ๋๋ค. | |
</p> | |
<div class="literary-note"> | |
"๊ฐ์ฅ ๊ฐ์ธ์ ์ธ ๊ฒ์ด ๊ฐ์ฅ ๋ณดํธ์ ์ธ ๊ฒ์ด๋ค" | |
</div> | |
</div> | |
""") | |
# ์ํ ๊ด๋ฆฌ | |
current_session_id = gr.State(None) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
with gr.Group(elem_classes=["input-section"]): | |
query_input = gr.Textbox( | |
label="์์ค ์ฃผ์ / Novel Theme", | |
placeholder="ํ๋ ์ฌํ์ ๋ฌธ์ ๋ ์ธ๊ฐ์ ์กฐ๊ฑด์ ๋ํ ์ฃผ์ ๋ฅผ ์ ๋ ฅํ์ธ์...\nEnter themes about contemporary social issues or human condition...", | |
lines=4 | |
) | |
language_select = gr.Radio( | |
choices=["Korean", "English"], | |
value="Korean", | |
label="์ธ์ด / Language" | |
) | |
with gr.Row(): | |
submit_btn = gr.Button("๐ ์์ค ์์ฑ ์์", variant="primary", scale=2) | |
clear_btn = gr.Button("๐๏ธ ์ด๊ธฐํ", scale=1) | |
status_text = gr.Textbox( | |
label="์ํ", | |
interactive=False, | |
value="๐ ์ค๋น ์๋ฃ" | |
) | |
# ์ธ์ ๊ด๋ฆฌ | |
with gr.Group(elem_classes=["session-section"]): | |
gr.Markdown("### ๐พ ์ด์ ์ธ์ ์ฌ๊ฐ") | |
session_dropdown = gr.Dropdown( | |
label="์ธ์ ์ ํ", | |
choices=[], | |
interactive=True | |
) | |
with gr.Row(): | |
refresh_btn = gr.Button("๐ ๋ชฉ๋ก ์๋ก๊ณ ์นจ", scale=1) | |
resume_btn = gr.Button("โถ๏ธ ์ ํ ์ฌ๊ฐ", variant="secondary", scale=1) | |
auto_recover_btn = gr.Button("โป๏ธ ์๋ ๋ณต๊ตฌ", scale=1) | |
with gr.Column(scale=2): | |
with gr.Tab("๐ ์ฐฝ์ ๊ณผ์ "): | |
stages_display = gr.Markdown( | |
value="์ฐฝ์ ๊ณผ์ ์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
elem_id="stages-display" | |
) | |
with gr.Tab("๐ ์์ฑ๋ ์์ค"): | |
novel_output = gr.Markdown( | |
value="์์ฑ๋ ์์ค์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
elem_id="novel-output" | |
) | |
with gr.Group(elem_classes=["download-section"]): | |
gr.Markdown("### ๐ฅ ์์ค ๋ค์ด๋ก๋") | |
with gr.Row(): | |
format_select = gr.Radio( | |
choices=["DOCX", "TXT"], | |
value="DOCX" if DOCX_AVAILABLE else "TXT", | |
label="ํ์" | |
) | |
download_btn = gr.Button("โฌ๏ธ ๋ค์ด๋ก๋", variant="secondary") | |
download_file = gr.File( | |
label="๋ค์ด๋ก๋๋ ํ์ผ", | |
visible=False | |
) | |
# ์จ๊ฒจ์ง ์ํ | |
novel_text_state = gr.State("") | |
# ์์ (๋ฌธํ์ ์ฃผ์ ) | |
with gr.Row(): | |
gr.Examples( | |
examples=[ | |
["๋์ ๋น๋ฏผ์ ์ฃผ๊ฑฐ๊ถ๊ณผ ๊ณ๊ธ ๊ฐ๋ฑ"], | |
["๋น์ ๊ท์ง ์ฒญ๋ ์ ๋ถ์๊ณผ ์์ธ"], | |
["๋๋ด ๋ ธ๋์์ ๋ณด์ด์ง ์๋ ์ถ"], | |
["์ ํธ๋ฆฌํผ์ผ์ด์ ๊ณผ ๋์ ๋๋ฏผ"], | |
["The invisible labor of care workers"], | |
["Digital divide and social isolation"], | |
["์ธ๋ ๊ฐ ๋จ์ ๊ณผ ๋ ธ์ธ ๋น๊ณค"], | |
["ํ๊ฒฝ ์ฌ๋ ์๋์ ๋ถํ๋ฑ"] | |
], | |
inputs=query_input, | |
label="๐ก ๋ฌธํ์ ์ฃผ์ ์์" | |
) | |
# ์ด๋ฒคํธ ํธ๋ค๋ฌ | |
def refresh_sessions(): | |
try: | |
sessions = get_active_sessions("Korean") | |
return gr.update(choices=sessions) | |
except Exception as e: | |
logger.error(f"Error refreshing sessions: {str(e)}") | |
return gr.update(choices=[]) | |
def handle_auto_recover(language): | |
session_id, message = auto_recover_session(language) | |
return session_id | |
# ์ด๋ฒคํธ ์ฐ๊ฒฐ | |
submit_btn.click( | |
fn=process_query, | |
inputs=[query_input, language_select, current_session_id], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
novel_output.change( | |
fn=lambda x: x, | |
inputs=[novel_output], | |
outputs=[novel_text_state] | |
) | |
resume_btn.click( | |
fn=lambda x: x, | |
inputs=[session_dropdown], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
auto_recover_btn.click( | |
fn=handle_auto_recover, | |
inputs=[language_select], | |
outputs=[current_session_id] | |
).then( | |
fn=resume_session, | |
inputs=[current_session_id, language_select], | |
outputs=[stages_display, novel_output, status_text, current_session_id] | |
) | |
refresh_btn.click( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
clear_btn.click( | |
fn=lambda: ("", "", "๐ ์ค๋น ์๋ฃ", "", None), | |
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] | |
) | |
def handle_download(format_type, language, session_id, novel_text): | |
if not session_id: | |
return gr.update(visible=False) | |
file_path = download_novel(novel_text, format_type, language, session_id) | |
if file_path: | |
return gr.update(value=file_path, visible=True) | |
else: | |
return gr.update(visible=False) | |
download_btn.click( | |
fn=handle_download, | |
inputs=[format_select, language_select, current_session_id, novel_text_state], | |
outputs=[download_file] | |
) | |
# ์์ ์ ์ธ์ ๋ก๋ | |
interface.load( | |
fn=refresh_sessions, | |
outputs=[session_dropdown] | |
) | |
return interface | |
# ๋ฉ์ธ ์คํ | |
if __name__ == "__main__": | |
logger.info("AI ๋ฌธํ ์์ค ์์ฑ ์์คํ ์์...") | |
logger.info("=" * 60) | |
# ํ๊ฒฝ ํ์ธ | |
logger.info(f"API ์๋ํฌ์ธํธ: {API_URL}") | |
if BRAVE_SEARCH_API_KEY: | |
logger.info("์น ๊ฒ์์ด ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("์น ๊ฒ์์ด ๋นํ์ฑํ๋์์ต๋๋ค.") | |
if DOCX_AVAILABLE: | |
logger.info("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ํ์ฑํ๋์์ต๋๋ค.") | |
else: | |
logger.warning("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ๋นํ์ฑํ๋์์ต๋๋ค.") | |
logger.info("=" * 60) | |
# ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์ค...") | |
NovelDatabase.init_db() | |
logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์๋ฃ.") | |
# ์ธํฐํ์ด์ค ์์ฑ ๋ฐ ์คํ | |
interface = create_interface() | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=True | |
) |