Spaces:
Running
Running
| import gradio as gr | |
| import os | |
| import json | |
| import requests | |
| from datetime import datetime | |
| import time | |
| from typing import List, Dict, Any, Generator, Tuple, Optional, Set | |
| import logging | |
| import re | |
| import tempfile | |
| from pathlib import Path | |
| import sqlite3 | |
| import hashlib | |
| import threading | |
| from contextlib import contextmanager | |
| from dataclasses import dataclass, field, asdict | |
| from collections import defaultdict | |
| # --- ๋ก๊น ์ค์ --- | |
| logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
| logger = logging.getLogger(__name__) | |
| # --- Document export imports --- | |
| try: | |
| from docx import Document | |
| from docx.shared import Inches, Pt, RGBColor | |
| from docx.enum.text import WD_ALIGN_PARAGRAPH | |
| from docx.enum.style import WD_STYLE_TYPE | |
| from docx.oxml.ns import qn | |
| from docx.oxml import OxmlElement | |
| DOCX_AVAILABLE = True | |
| except ImportError: | |
| DOCX_AVAILABLE = False | |
| logger.warning("python-docx not installed. DOCX export will be disabled.") | |
| # --- ํ๊ฒฝ ๋ณ์ ๋ฐ ์์ --- | |
| FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") | |
| BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") | |
| API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" | |
| MODEL_ID = "dep89a2fld32mcm" | |
| DB_PATH = "novel_sessions_v6.db" | |
| # ๋ชฉํ ๋ถ๋ ์ค์ | |
| TARGET_WORDS = 8000 # ์์ ๋ง์ง์ ์ํด 8000๋จ์ด | |
| MIN_WORDS_PER_WRITER = 800 # ๊ฐ ์๊ฐ ์ต์ ๋ถ๋ | |
| # --- ํ๊ฒฝ ๋ณ์ ๊ฒ์ฆ --- | |
| if not FRIENDLI_TOKEN: | |
| logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") | |
| FRIENDLI_TOKEN = "dummy_token_for_testing" | |
| if not BRAVE_SEARCH_API_KEY: | |
| logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") | |
| # --- ์ ์ญ ๋ณ์ --- | |
| db_lock = threading.Lock() | |
| # ์์ฌ ์งํ ๋จ๊ณ ์ ์ | |
| NARRATIVE_PHASES = [ | |
| "๋์ : ์ผ์๊ณผ ๊ท ์ด", | |
| "๋ฐ์ 1: ๋ถ์์ ๊ณ ์กฐ", | |
| "๋ฐ์ 2: ์ธ๋ถ ์ถฉ๊ฒฉ", | |
| "๋ฐ์ 3: ๋ด์ ๊ฐ๋ฑ ์ฌํ", | |
| "์ ์ 1: ์๊ธฐ์ ์ ์ ", | |
| "์ ์ 2: ์ ํ์ ์๊ฐ", | |
| "ํ๊ฐ 1: ๊ฒฐ๊ณผ์ ์ฌํ", | |
| "ํ๊ฐ 2: ์๋ก์ด ์ธ์", | |
| "๊ฒฐ๋ง 1: ๋ณํ๋ ์ผ์", | |
| "๊ฒฐ๋ง 2: ์ด๋ฆฐ ์ง๋ฌธ" | |
| ] | |
| # ๋จ๊ณ๋ณ ๊ตฌ์ฑ - ํธ์ง์ ๋จ๊ณ ์ถ๊ฐ | |
| PROGRESSIVE_STAGES = [ | |
| ("director", "๐ฌ ๊ฐ๋ ์: ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ ๊ธฐํ"), | |
| ("critic", "๐ ๋นํ๊ฐ: ์์ฌ ์งํ์ฑ๊ณผ ๊น์ด ๊ฒํ "), | |
| ("director", "๐ฌ ๊ฐ๋ ์: ์์ ๋ ๋ง์คํฐํ๋"), | |
| ] + [ | |
| (f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์ด์ - {NARRATIVE_PHASES[i-1]}") | |
| for i in range(1, 11) | |
| ] + [ | |
| ("critic", "๐ ๋นํ๊ฐ: ์ค๊ฐ ๊ฒํ (์์ฌ ๋์ ์ฑ๊ณผ ๋ณํ)"), | |
| ] + [ | |
| (f"writer{i}", f"โ๏ธ ์๊ฐ {i}: ์์ ๋ณธ - {NARRATIVE_PHASES[i-1]}") | |
| for i in range(1, 11) | |
| ] + [ | |
| ("editor", "โ๏ธ ํธ์ง์: ๋ฐ๋ณต ์ ๊ฑฐ ๋ฐ ์์ฌ ์ฌ๊ตฌ์ฑ"), | |
| ("critic", f"๐ ๋นํ๊ฐ: ์ต์ข ๊ฒํ ๋ฐ ๋ฌธํ์ ํ๊ฐ"), | |
| ] | |
| # --- ๋ฐ์ดํฐ ํด๋์ค --- | |
| class CharacterArc: | |
| """์ธ๋ฌผ์ ๋ณํ ๊ถค์ ์ถ์ """ | |
| name: str | |
| initial_state: Dict[str, Any] # ์ด๊ธฐ ์ํ | |
| phase_states: Dict[int, Dict[str, Any]] = field(default_factory=dict) # ๋จ๊ณ๋ณ ์ํ | |
| transformations: List[str] = field(default_factory=list) # ์ฃผ์ ๋ณํ๋ค | |
| relationships_evolution: Dict[str, List[str]] = field(default_factory=dict) # ๊ด๊ณ ๋ณํ | |
| class PlotThread: | |
| """ํ๋กฏ ๋ผ์ธ ์ถ์ """ | |
| thread_id: str | |
| description: str | |
| introduction_phase: int | |
| development_phases: List[int] | |
| resolution_phase: Optional[int] | |
| status: str = "active" # active, resolved, suspended | |
| class SymbolicEvolution: | |
| """์์ง์ ์๋ฏธ ๋ณํ ์ถ์ """ | |
| symbol: str | |
| initial_meaning: str | |
| phase_meanings: Dict[int, str] = field(default_factory=dict) | |
| transformation_complete: bool = False | |
| class CharacterConsistency: | |
| """์บ๋ฆญํฐ ์ผ๊ด์ฑ ๊ด๋ฆฌ""" | |
| primary_names: Dict[str, str] = field(default_factory=dict) # role -> canonical name | |
| aliases: Dict[str, List[str]] = field(default_factory=dict) # canonical -> aliases | |
| name_history: List[Tuple[int, str, str]] = field(default_factory=list) # (phase, role, used_name) | |
| def validate_name(self, phase: int, role: str, name: str) -> bool: | |
| """์ด๋ฆ ์ผ๊ด์ฑ ๊ฒ์ฆ""" | |
| if role in self.primary_names: | |
| canonical = self.primary_names[role] | |
| if name != canonical and name not in self.aliases.get(canonical, []): | |
| return False | |
| return True | |
| def register_name(self, phase: int, role: str, name: str): | |
| """์ด๋ฆ ๋ฑ๋ก""" | |
| if role not in self.primary_names: | |
| self.primary_names[role] = name | |
| self.name_history.append((phase, role, name)) | |
| # --- ํต์ฌ ๋ก์ง ํด๋์ค --- | |
| class ContentDeduplicator: | |
| """์ค๋ณต ์ฝํ ์ธ ๊ฐ์ง ๋ฐ ์ ๊ฑฐ""" | |
| def __init__(self): | |
| self.seen_paragraphs = set() | |
| self.seen_key_phrases = set() | |
| self.similarity_threshold = 0.85 | |
| def check_similarity(self, text1: str, text2: str) -> float: | |
| """๋ ํ ์คํธ์ ์ ์ฌ๋ ์ธก์ """ | |
| # ๊ฐ๋จํ Jaccard ์ ์ฌ๋ ๊ตฌํ | |
| words1 = set(text1.lower().split()) | |
| words2 = set(text2.lower().split()) | |
| intersection = words1.intersection(words2) | |
| union = words1.union(words2) | |
| return len(intersection) / len(union) if union else 0 | |
| def extract_key_phrases(self, text: str) -> List[str]: | |
| """ํต์ฌ ๋ฌธ๊ตฌ ์ถ์ถ""" | |
| # 20์ ์ด์์ ๋ฌธ์ฅ๋ค์ ํต์ฌ ๋ฌธ๊ตฌ๋ก ๊ฐ์ฃผ | |
| sentences = [s.strip() for s in re.split(r'[.!?]', text) if len(s.strip()) > 20] | |
| return sentences[:5] # ์์ 5๊ฐ๋ง | |
| def is_duplicate(self, paragraph: str) -> bool: | |
| """์ค๋ณต ๋ฌธ๋จ ๊ฐ์ง""" | |
| # ํต์ฌ ๋ฌธ๊ตฌ ์ฒดํฌ | |
| key_phrases = self.extract_key_phrases(paragraph) | |
| for phrase in key_phrases: | |
| if phrase in self.seen_key_phrases: | |
| return True | |
| # ์ ์ฒด ๋ฌธ๋จ ์ ์ฌ๋ ์ฒดํฌ | |
| for seen in self.seen_paragraphs: | |
| if self.check_similarity(paragraph, seen) > self.similarity_threshold: | |
| return True | |
| # ์ค๋ณต์ด ์๋๋ฉด ์ ์ฅ | |
| self.seen_paragraphs.add(paragraph) | |
| self.seen_key_phrases.update(key_phrases) | |
| return False | |
| def get_used_elements(self) -> List[str]: | |
| """์ฌ์ฉ๋ ํต์ฌ ์์ ๋ฐํ""" | |
| return list(self.seen_key_phrases)[:10] # ์ต๊ทผ 10๊ฐ | |
| def count_repetitions(self, content: str) -> int: | |
| """ํ ์คํธ ๋ด์ ๋ฐ๋ณต ํ์ ๊ณ์ฐ""" | |
| paragraphs = content.split('\n\n') | |
| repetitions = 0 | |
| for i, para1 in enumerate(paragraphs): | |
| for para2 in paragraphs[i+1:]: | |
| if self.check_similarity(para1, para2) > 0.7: | |
| repetitions += 1 | |
| return repetitions | |
| class ProgressionMonitor: | |
| """์ค์๊ฐ ์์ฌ ์งํ ๋ชจ๋ํฐ๋ง""" | |
| def __init__(self): | |
| self.phase_keywords = {} | |
| self.locations = set() | |
| self.characters = set() | |
| def count_new_characters(self, content: str, phase: int) -> int: | |
| """์๋ก์ด ์ธ๋ฌผ ๋ฑ์ฅ ํ์""" | |
| # ๊ฐ๋จํ ๊ณ ์ ๋ช ์ฌ ์ถ์ถ (๋๋ฌธ์๋ก ์์ํ๋ ๋จ์ด) | |
| potential_names = re.findall(r'\b[A-Z๊ฐ-ํฃ][a-z๊ฐ-ํฃ]+\b', content) | |
| new_chars = set(potential_names) - self.characters | |
| self.characters.update(new_chars) | |
| return len(new_chars) | |
| def count_new_locations(self, content: str, phase: int) -> int: | |
| """์๋ก์ด ์ฅ์ ๋ฑ์ฅ ํ์""" | |
| # ์ฅ์ ๊ด๋ จ ํค์๋ | |
| location_markers = ['์์', '์ผ๋ก', '์', '์', 'at', 'in', 'to'] | |
| new_locs = 0 | |
| for marker in location_markers: | |
| matches = re.findall(rf'(\S+)\s*{marker}', content) | |
| for match in matches: | |
| if match not in self.locations and len(match) > 2: | |
| self.locations.add(match) | |
| new_locs += 1 | |
| return new_locs | |
| def calculate_content_difference(self, current_phase: int, content: str, previous_content: str) -> float: | |
| """์ด์ ๋จ๊ณ์์ ๋ด์ฉ ์ฐจ์ด ๋น์จ""" | |
| if not previous_content: | |
| return 1.0 | |
| dedup = ContentDeduplicator() | |
| return 1.0 - dedup.check_similarity(content, previous_content) | |
| def count_repetitions(self, content: str) -> int: | |
| """๋ฐ๋ณต ํ์ ๊ณ์ฐ""" | |
| paragraphs = content.split('\n\n') | |
| repetitions = 0 | |
| for i, para1 in enumerate(paragraphs): | |
| for para2 in paragraphs[i+1:]: | |
| similarity = ContentDeduplicator().check_similarity(para1, para2) | |
| if similarity > 0.7: | |
| repetitions += 1 | |
| return repetitions | |
| def calculate_progression_score(self, current_phase: int, content: str, previous_content: str = "") -> Dict[str, float]: | |
| """์งํ๋ ์ ์ ๊ณ์ฐ""" | |
| scores = { | |
| "new_elements": 0.0, # ์๋ก์ด ์์ | |
| "character_growth": 0.0, # ์ธ๋ฌผ ์ฑ์ฅ | |
| "plot_advancement": 0.0, # ํ๋กฏ ์ง์ | |
| "no_repetition": 0.0 # ๋ฐ๋ณต ์์ | |
| } | |
| # ์๋ก์ด ์์ ์ฒดํฌ | |
| new_characters = self.count_new_characters(content, current_phase) | |
| new_locations = self.count_new_locations(content, current_phase) | |
| scores["new_elements"] = min(10, (new_characters * 3 + new_locations * 2)) | |
| # ์ฑ์ฅ ๊ด๋ จ ํค์๋ | |
| growth_keywords = ["๊นจ๋ฌ์๋ค", "์ด์ ๋", "๋ฌ๋ผ์ก๋ค", "์๋กญ๊ฒ", "๋น๋ก์", "๋ณํ๋ค", "๋ ์ด์"] | |
| growth_count = sum(1 for k in growth_keywords if k in content) | |
| scores["character_growth"] = min(10, growth_count * 2) | |
| # ํ๋กฏ ์ง์ (์ด์ ๋จ๊ณ์์ ์ฐจ์ด) | |
| if current_phase > 1 and previous_content: | |
| diff_ratio = self.calculate_content_difference(current_phase, content, previous_content) | |
| scores["plot_advancement"] = min(10, diff_ratio * 10) | |
| else: | |
| scores["plot_advancement"] = 8.0 # ์ฒซ ๋จ๊ณ๋ ๊ธฐ๋ณธ ์ ์ | |
| # ๋ฐ๋ณต ์ฒดํฌ (์ญ์ฐ) | |
| repetition_count = self.count_repetitions(content) | |
| scores["no_repetition"] = max(0, 10 - repetition_count * 2) | |
| return scores | |
| class ProgressiveNarrativeTracker: | |
| """์์ฌ ์งํ๊ณผ ๋์ ์ ์ถ์ ํ๋ ์์คํ """ | |
| def __init__(self): | |
| self.character_arcs: Dict[str, CharacterArc] = {} | |
| self.plot_threads: Dict[str, PlotThread] = {} | |
| self.symbolic_evolutions: Dict[str, SymbolicEvolution] = {} | |
| self.phase_summaries: Dict[int, str] = {} | |
| self.accumulated_events: List[Dict[str, Any]] = [] | |
| self.thematic_deepening: List[str] = [] | |
| self.philosophical_insights: List[str] = [] # ์ฒ ํ์ ํต์ฐฐ ์ถ์ | |
| self.literary_devices: Dict[int, List[str]] = {} # ๋ฌธํ์ ๊ธฐ๋ฒ ์ฌ์ฉ ์ถ์ | |
| self.character_consistency = CharacterConsistency() # ์บ๋ฆญํฐ ์ผ๊ด์ฑ ์ถ๊ฐ | |
| self.content_deduplicator = ContentDeduplicator() # ์ค๋ณต ๊ฐ์ง๊ธฐ ์ถ๊ฐ | |
| self.progression_monitor = ProgressionMonitor() # ์งํ๋ ๋ชจ๋ํฐ ์ถ๊ฐ | |
| self.used_expressions: Set[str] = set() # ์ฌ์ฉ๋ ํํ ์ถ์ | |
| def register_character_arc(self, name: str, initial_state: Dict[str, Any]): | |
| """์บ๋ฆญํฐ ์ํฌ ๋ฑ๋ก""" | |
| self.character_arcs[name] = CharacterArc(name=name, initial_state=initial_state) | |
| self.character_consistency.register_name(0, "protagonist", name) | |
| logger.info(f"Character arc registered: {name}") | |
| def update_character_state(self, name: str, phase: int, new_state: Dict[str, Any], transformation: str): | |
| """์บ๋ฆญํฐ ์ํ ์ ๋ฐ์ดํธ ๋ฐ ๋ณํ ๊ธฐ๋ก""" | |
| if name in self.character_arcs: | |
| arc = self.character_arcs[name] | |
| arc.phase_states[phase] = new_state | |
| arc.transformations.append(f"Phase {phase}: {transformation}") | |
| logger.info(f"Character {name} transformed in phase {phase}: {transformation}") | |
| def add_plot_thread(self, thread_id: str, description: str, intro_phase: int): | |
| """์๋ก์ด ํ๋กฏ ๋ผ์ธ ์ถ๊ฐ""" | |
| self.plot_threads[thread_id] = PlotThread( | |
| thread_id=thread_id, | |
| description=description, | |
| introduction_phase=intro_phase, | |
| development_phases=[] | |
| ) | |
| def develop_plot_thread(self, thread_id: str, phase: int): | |
| """ํ๋กฏ ๋ผ์ธ ๋ฐ์ """ | |
| if thread_id in self.plot_threads: | |
| self.plot_threads[thread_id].development_phases.append(phase) | |
| def check_narrative_progression(self, current_phase: int) -> Tuple[bool, List[str]]: | |
| """์์ฌ๊ฐ ์ค์ ๋ก ์งํ๋๊ณ ์๋์ง ํ์ธ""" | |
| issues = [] | |
| # 1. ์บ๋ฆญํฐ ๋ณํ ํ์ธ | |
| static_characters = [] | |
| for name, arc in self.character_arcs.items(): | |
| if len(arc.transformations) < current_phase // 3: # ์ต์ 3๋จ๊ณ๋ง๋ค ๋ณํ ํ์ | |
| static_characters.append(name) | |
| if static_characters: | |
| issues.append(f"๋ค์ ์ธ๋ฌผ๋ค์ ๋ณํ๊ฐ ๋ถ์กฑํฉ๋๋ค: {', '.join(static_characters)}") | |
| # 2. ํ๋กฏ ์งํ ํ์ธ | |
| unresolved_threads = [] | |
| for thread_id, thread in self.plot_threads.items(): | |
| if thread.status == "active" and len(thread.development_phases) < 2: | |
| unresolved_threads.append(thread.description) | |
| if unresolved_threads: | |
| issues.append(f"์ง์ ๋์ง ์์ ํ๋กฏ: {', '.join(unresolved_threads)}") | |
| # 3. ์์ง ๋ฐ์ ํ์ธ | |
| static_symbols = [] | |
| for symbol, evolution in self.symbolic_evolutions.items(): | |
| if len(evolution.phase_meanings) < current_phase // 4: | |
| static_symbols.append(symbol) | |
| if static_symbols: | |
| issues.append(f"์๋ฏธ๊ฐ ๋ฐ์ ํ์ง ์์ ์์ง: {', '.join(static_symbols)}") | |
| # 4. ์ฒ ํ์ ๊น์ด ํ์ธ | |
| if len(self.philosophical_insights) < current_phase // 2: | |
| issues.append("์ฒ ํ์ ์ฑ์ฐฐ๊ณผ ์ธ๊ฐ์ ๋ํ ํต์ฐฐ์ด ๋ถ์กฑํฉ๋๋ค") | |
| # 5. ๋ฌธํ์ ๊ธฐ๋ฒ ๋ค์์ฑ | |
| unique_devices = set() | |
| for devices in self.literary_devices.values(): | |
| unique_devices.update(devices) | |
| if len(unique_devices) < 5: | |
| issues.append("๋ฌธํ์ ๊ธฐ๋ฒ์ด ๋จ์กฐ๋กญ์ต๋๋ค. ๋ ๋ค์ํ ํํ ๊ธฐ๋ฒ์ด ํ์ํฉ๋๋ค") | |
| # 6. ์บ๋ฆญํฐ ์ด๋ฆ ์ผ๊ด์ฑ | |
| name_issues = [] | |
| for phase, role, name in self.character_consistency.name_history: | |
| if not self.character_consistency.validate_name(phase, role, name): | |
| name_issues.append(f"Phase {phase}: {role} ์ด๋ฆ ๋ถ์ผ์น ({name})") | |
| if name_issues: | |
| issues.extend(name_issues) | |
| return len(issues) == 0, issues | |
| def generate_phase_requirements(self, phase: int) -> str: | |
| """๊ฐ ๋จ๊ณ๋ณ ํ์ ์๊ตฌ์ฌํญ ์์ฑ""" | |
| requirements = [] | |
| # ์ด์ ๋จ๊ณ ์์ฝ | |
| if phase > 1 and (phase-1) in self.phase_summaries: | |
| requirements.append(f"์ด์ ๋จ๊ณ ํต์ฌ: {self.phase_summaries[phase-1]}") | |
| # ์ฌ์ฉ๋ ํํ ๋ชฉ๋ก | |
| if self.used_expressions: | |
| requirements.append("\nโ ๋ค์ ํํ/์ํฉ์ ์ด๋ฏธ ์ฌ์ฉ๋จ (์ฌ์ฌ์ฉ ๊ธ์ง):") | |
| for expr in list(self.used_expressions)[-10:]: # ์ต๊ทผ 10๊ฐ | |
| requirements.append(f"- {expr[:50]}...") | |
| # ๋จ๊ณ๋ณ ํน์ ์๊ตฌ์ฌํญ | |
| phase_name = NARRATIVE_PHASES[phase-1] if phase <= 10 else "์์ " | |
| if "๋์ " in phase_name: | |
| requirements.append("\nโ ํ์ ํฌํจ:") | |
| requirements.append("- ์ผ์์ ๊ท ์ด์ ๋ณด์ฌ์ฃผ๋, ํฐ ์ฌ๊ฑด์ด ์๋ ๋ฏธ๋ฌํ ๋ณํ๋ก ์์") | |
| requirements.append("- ์ฃผ์ ์ธ๋ฌผ๋ค์ ์ด๊ธฐ ์ํ์ ๊ด๊ณ ์ค์ ") | |
| requirements.append("- ํต์ฌ ์์ง ๋์ (์์ฐ์ค๋ฝ๊ฒ)") | |
| requirements.append("- ์ฃผ์ธ๊ณต ์ด๋ฆ ๋ช ํํ ์ค์ ") | |
| elif "๋ฐ์ " in phase_name: | |
| requirements.append("\nโ ํ์ ํฌํจ:") | |
| requirements.append("- ์ด์ ๋จ๊ณ์ ๊ท ์ด/๊ฐ๋ฑ์ด ๊ตฌ์ฒดํ๋๊ณ ์ฌํ") | |
| requirements.append("- ์๋ก์ด ์ฌ๊ฑด์ด๋ ์ธ์์ด ์ถ๊ฐ๋์ด ๋ณต์ก์ฑ ์ฆ๊ฐ") | |
| requirements.append("- ์ธ๋ฌผ ๊ฐ ๊ด๊ณ์ ๋ฏธ๋ฌํ ๋ณํ") | |
| requirements.append("- ์๋ก์ด ๊ณต๊ฐ์ด๋ ์๊ฐ๋ ํ์") | |
| elif "์ ์ " in phase_name: | |
| requirements.append("\nโ ํ์ ํฌํจ:") | |
| requirements.append("- ์ถ์ ๋ ๊ฐ๋ฑ์ด ์๊ณ์ ์ ๋๋ฌ") | |
| requirements.append("- ์ธ๋ฌผ์ ๋ด์ ์ ํ์ด๋ ์ธ์์ ์ ํ์ ") | |
| requirements.append("- ์์ง์ ์๋ฏธ๊ฐ ์ ๋ณต๋๊ฑฐ๋ ์ฌํ") | |
| requirements.append("- ์ด์ ๊ณผ๋ ๋ค๋ฅธ ํ๋์ด๋ ๊ฒฐ์ ") | |
| elif "ํ๊ฐ" in phase_name: | |
| requirements.append("\nโ ํ์ ํฌํจ:") | |
| requirements.append("- ์ ์ ์ ์ฌํ์ ๊ทธ๋ก ์ธํ ๋ณํ") | |
| requirements.append("- ์๋ก์ด ๊ท ํ์ ์ ์ฐพ์๊ฐ๋ ๊ณผ์ ") | |
| requirements.append("- ์ธ๋ฌผ๋ค์ ๋ณํ๋ ๊ด๊ณ์ ์ธ์") | |
| requirements.append("- ํ๋ณต์ด๋ ์์ค์ ๊ตฌ์ฒด์ ๋ฌ์ฌ") | |
| elif "๊ฒฐ๋ง" in phase_name: | |
| requirements.append("\nโ ํ์ ํฌํจ:") | |
| requirements.append("- ๋ณํ๋ ์ผ์์ ๋ชจ์ต") | |
| requirements.append("- ํด๊ฒฐ๋์ง ์์ ์ง๋ฌธ๋ค") | |
| requirements.append("- ์ฌ์ด๊ณผ ์ฑ์ฐฐ์ ์ฌ์ง") | |
| requirements.append("- ์ฒ์๊ณผ ๋๋น๋๋ ๋ง์ง๋ง") | |
| # ์ฒ ํยท์ธ๊ฐ์ ๊ฐํ ์ฒดํฌ๋ฆฌ์คํธ | |
| requirements.append("\n๐ ํ์ ํฌํจ ์์:") | |
| requirements.append("- ์กด์ฌ์ ์๋ฏธ๋ ์ถ์ ๋ณธ์ง์ ๋ํ ์ฑ์ฐฐ์ด ๋ด๊ธด 1๋ฌธ๋จ ์ด์") | |
| requirements.append("- ํ์ธ์ ๊ณ ํต์ ๋ํ ๊ณต๊ฐ์ด๋ ์ฐ๋ฏผ์ ๋ณด์ฌ์ฃผ๋ ๊ตฌ์ฒด์ ์ฅ๋ฉด 1๊ฐ ์ด์") | |
| requirements.append("- '๋ณด์ฌ์ฃผ๊ธฐ(showing)' ๊ธฐ๋ฒ: ์ง์ ์ค๋ช ๋์ ๊ฐ๊ฐ์ ๋ฌ์ฌ์ ํ๋์ผ๋ก ํํ") | |
| requirements.append("- ์ด ๋จ๊ณ๋ง์ ๋ ํนํ ๋ฌธํ์ ์ฅ์น๋ ์์ 1๊ฐ ์ด์") | |
| # ๋ฐ๋ณต ๋ฐฉ์ง ์๊ตฌ์ฌํญ | |
| requirements.append("\nโ ๏ธ ์ ๋ ๊ธ์ง์ฌํญ:") | |
| requirements.append("- ์ด์ ๋จ๊ณ์ ๋์ผํ ์ฌ๊ฑด์ด๋ ๊ฐ๋ฑ ๋ฐ๋ณต") | |
| requirements.append("- ์ธ๋ฌผ์ด ๊ฐ์ ์๊ฐ์ด๋ ๊ฐ์ ์ ๋จธ๋ฌด๋ฅด๊ธฐ") | |
| requirements.append("- ํ๋กฏ์ด ์ ์๋ฆฌ๊ฑธ์ํ๊ธฐ") | |
| requirements.append("- '~์ ๋๊ผ๋ค', '~์๋ค'์ ๊ฐ์ ์ง์ ์ ์ค๋ช ") | |
| requirements.append("- ์ด๋ฏธ ์ป์ ๊นจ๋ฌ์์ ์๊ณ ๋ค์ ์์ํ๊ธฐ") | |
| # ์งํ ์ฒดํฌ๋ฆฌ์คํธ | |
| requirements.append("\nโ๏ธ ์งํ ์ฒดํฌ๋ฆฌ์คํธ:") | |
| requirements.append("โก ์ด์ ๋จ๊ณ์ ๊ฒฐ๊ณผ๊ฐ ์ด๋ฒ ๋จ๊ณ์ ์์ธ์ด ๋๋๊ฐ?") | |
| requirements.append("โก ์ฃผ์ธ๊ณต์ ๋ด์ ๋ณํ๊ฐ ๊ตฌ์ฒด์ ์ผ๋ก ๋๋ฌ๋๋๊ฐ?") | |
| requirements.append("โก ํ๋กฏ์ด ์ค์ ๋ก ์ ์งํ๋๊ฐ?") | |
| requirements.append("โก ์๋ก์ด ์ ๋ณด/์ฌ๊ฑด์ด ์ถ๊ฐ๋๋๊ฐ?") | |
| return "\n".join(requirements) | |
| def extract_used_elements(self, content: str): | |
| """์ฌ์ฉ๋ ํต์ฌ ํํ ์ถ์ถ ๋ฐ ์ ์ฅ""" | |
| # 20์ ์ด์์ ํน์ง์ ์ธ ๋ฌธ์ฅ๋ค ์ถ์ถ | |
| sentences = re.findall(r'[^.!?]+[.!?]', content) | |
| for sent in sentences: | |
| if len(sent) > 20 and len(sent) < 100: | |
| self.used_expressions.add(sent.strip()) | |
| class NovelDatabase: | |
| """๋ฐ์ดํฐ๋ฒ ์ด์ค ๊ด๋ฆฌ""" | |
| def init_db(): | |
| with sqlite3.connect(DB_PATH) as conn: | |
| conn.execute("PRAGMA journal_mode=WAL") | |
| cursor = conn.cursor() | |
| # ๊ธฐ์กด ํ ์ด๋ธ๋ค | |
| cursor.execute(''' | |
| CREATE TABLE IF NOT EXISTS sessions ( | |
| session_id TEXT PRIMARY KEY, | |
| user_query TEXT NOT NULL, | |
| language TEXT NOT NULL, | |
| created_at TEXT DEFAULT (datetime('now')), | |
| updated_at TEXT DEFAULT (datetime('now')), | |
| status TEXT DEFAULT 'active', | |
| current_stage INTEGER DEFAULT 0, | |
| final_novel TEXT, | |
| literary_report TEXT, | |
| total_words INTEGER DEFAULT 0, | |
| narrative_tracker TEXT | |
| ) | |
| ''') | |
| cursor.execute(''' | |
| CREATE TABLE IF NOT EXISTS stages ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| session_id TEXT NOT NULL, | |
| stage_number INTEGER NOT NULL, | |
| stage_name TEXT NOT NULL, | |
| role TEXT NOT NULL, | |
| content TEXT, | |
| word_count INTEGER DEFAULT 0, | |
| status TEXT DEFAULT 'pending', | |
| progression_score REAL DEFAULT 0.0, | |
| repetition_score REAL DEFAULT 0.0, | |
| created_at TEXT DEFAULT (datetime('now')), | |
| updated_at TEXT DEFAULT (datetime('now')), | |
| FOREIGN KEY (session_id) REFERENCES sessions(session_id), | |
| UNIQUE(session_id, stage_number) | |
| ) | |
| ''') | |
| cursor.execute(''' | |
| CREATE TABLE IF NOT EXISTS plot_threads ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| session_id TEXT NOT NULL, | |
| thread_id TEXT NOT NULL, | |
| description TEXT, | |
| introduction_phase INTEGER, | |
| status TEXT DEFAULT 'active', | |
| created_at TEXT DEFAULT (datetime('now')), | |
| FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
| ) | |
| ''') | |
| # ์๋ก์ด ํ ์ด๋ธ: ์ค๋ณต ๊ฐ์ง ๊ธฐ๋ก | |
| cursor.execute(''' | |
| CREATE TABLE IF NOT EXISTS duplicate_detection ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| session_id TEXT NOT NULL, | |
| phase INTEGER NOT NULL, | |
| duplicate_content TEXT, | |
| original_phase INTEGER, | |
| similarity_score REAL, | |
| created_at TEXT DEFAULT (datetime('now')), | |
| FOREIGN KEY (session_id) REFERENCES sessions(session_id) | |
| ) | |
| ''') | |
| conn.commit() | |
| # ๊ธฐ์กด ๋ฉ์๋๋ค ์ ์ง | |
| def get_db(): | |
| with db_lock: | |
| conn = sqlite3.connect(DB_PATH, timeout=30.0) | |
| conn.row_factory = sqlite3.Row | |
| try: | |
| yield conn | |
| finally: | |
| conn.close() | |
| def create_session(user_query: str, language: str) -> str: | |
| session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() | |
| with NovelDatabase.get_db() as conn: | |
| conn.cursor().execute( | |
| 'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', | |
| (session_id, user_query, language) | |
| ) | |
| conn.commit() | |
| return session_id | |
| def save_stage(session_id: str, stage_number: int, stage_name: str, | |
| role: str, content: str, status: str = 'complete', | |
| progression_score: float = 0.0, repetition_score: float = 0.0): | |
| word_count = len(content.split()) if content else 0 | |
| with NovelDatabase.get_db() as conn: | |
| cursor = conn.cursor() | |
| cursor.execute(''' | |
| INSERT INTO stages (session_id, stage_number, stage_name, role, content, word_count, status, progression_score, repetition_score) | |
| VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) | |
| ON CONFLICT(session_id, stage_number) | |
| DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, progression_score=?, repetition_score=?, updated_at=datetime('now') | |
| ''', (session_id, stage_number, stage_name, role, content, word_count, status, progression_score, repetition_score, | |
| content, word_count, status, stage_name, progression_score, repetition_score)) | |
| # ์ด ๋จ์ด ์ ์ ๋ฐ์ดํธ | |
| cursor.execute(''' | |
| UPDATE sessions | |
| SET total_words = ( | |
| SELECT SUM(word_count) | |
| FROM stages | |
| WHERE session_id = ? AND role LIKE 'writer%' AND content IS NOT NULL | |
| ), | |
| updated_at = datetime('now'), | |
| current_stage = ? | |
| WHERE session_id = ? | |
| ''', (session_id, stage_number, session_id)) | |
| conn.commit() | |
| def get_writer_content(session_id: str) -> str: | |
| """์๊ฐ ์ฝํ ์ธ ๊ฐ์ ธ์ค๊ธฐ (์์ ๋ณธ ์ฐ์ )""" | |
| with NovelDatabase.get_db() as conn: | |
| all_content = [] | |
| for writer_num in range(1, 11): | |
| # ์์ ๋ณธ์ด ์์ผ๋ฉด ์์ ๋ณธ์, ์์ผ๋ฉด ์ด์์ | |
| row = conn.cursor().execute(''' | |
| SELECT content FROM stages | |
| WHERE session_id = ? AND role = ? | |
| AND stage_name LIKE '%์์ ๋ณธ%' | |
| ORDER BY stage_number DESC LIMIT 1 | |
| ''', (session_id, f'writer{writer_num}')).fetchone() | |
| if not row or not row['content']: | |
| # ์์ ๋ณธ์ด ์์ผ๋ฉด ์ด์ ์ฌ์ฉ | |
| row = conn.cursor().execute(''' | |
| SELECT content FROM stages | |
| WHERE session_id = ? AND role = ? | |
| AND stage_name LIKE '%์ด์%' | |
| ORDER BY stage_number DESC LIMIT 1 | |
| ''', (session_id, f'writer{writer_num}')).fetchone() | |
| if row and row['content']: | |
| all_content.append(row['content'].strip()) | |
| return '\n\n'.join(all_content) | |
| def get_total_words(session_id: str) -> int: | |
| """์ด ๋จ์ด ์ ๊ฐ์ ธ์ค๊ธฐ""" | |
| with NovelDatabase.get_db() as conn: | |
| row = conn.cursor().execute( | |
| 'SELECT total_words FROM sessions WHERE session_id = ?', | |
| (session_id,) | |
| ).fetchone() | |
| return row['total_words'] if row and row['total_words'] else 0 | |
| def save_narrative_tracker(session_id: str, tracker: ProgressiveNarrativeTracker): | |
| """์์ฌ ์ถ์ ๊ธฐ ์ ์ฅ""" | |
| with NovelDatabase.get_db() as conn: | |
| tracker_data = json.dumps({ | |
| 'character_arcs': {k: asdict(v) for k, v in tracker.character_arcs.items()}, | |
| 'plot_threads': {k: asdict(v) for k, v in tracker.plot_threads.items()}, | |
| 'phase_summaries': tracker.phase_summaries, | |
| 'thematic_deepening': tracker.thematic_deepening, | |
| 'philosophical_insights': tracker.philosophical_insights, | |
| 'literary_devices': tracker.literary_devices, | |
| 'character_consistency': asdict(tracker.character_consistency), | |
| 'used_expressions': list(tracker.used_expressions) | |
| }) | |
| conn.cursor().execute( | |
| 'UPDATE sessions SET narrative_tracker = ? WHERE session_id = ?', | |
| (tracker_data, session_id) | |
| ) | |
| conn.commit() | |
| def load_narrative_tracker(session_id: str) -> Optional[ProgressiveNarrativeTracker]: | |
| """์์ฌ ์ถ์ ๊ธฐ ๋ก๋""" | |
| with NovelDatabase.get_db() as conn: | |
| row = conn.cursor().execute( | |
| 'SELECT narrative_tracker FROM sessions WHERE session_id = ?', | |
| (session_id,) | |
| ).fetchone() | |
| if row and row['narrative_tracker']: | |
| data = json.loads(row['narrative_tracker']) | |
| tracker = ProgressiveNarrativeTracker() | |
| # ๋ฐ์ดํฐ ๋ณต์ | |
| for name, arc_data in data.get('character_arcs', {}).items(): | |
| tracker.character_arcs[name] = CharacterArc(**arc_data) | |
| for thread_id, thread_data in data.get('plot_threads', {}).items(): | |
| tracker.plot_threads[thread_id] = PlotThread(**thread_data) | |
| tracker.phase_summaries = data.get('phase_summaries', {}) | |
| tracker.thematic_deepening = data.get('thematic_deepening', []) | |
| tracker.philosophical_insights = data.get('philosophical_insights', []) | |
| tracker.literary_devices = data.get('literary_devices', {}) | |
| # ์บ๋ฆญํฐ ์ผ๊ด์ฑ ๋ณต์ | |
| if 'character_consistency' in data: | |
| tracker.character_consistency = CharacterConsistency(**data['character_consistency']) | |
| # ์ฌ์ฉ๋ ํํ ๋ณต์ | |
| if 'used_expressions' in data: | |
| tracker.used_expressions = set(data['used_expressions']) | |
| return tracker | |
| return None | |
| def save_duplicate_detection(session_id: str, phase: int, duplicate_content: str, | |
| original_phase: int, similarity_score: float): | |
| """์ค๋ณต ๊ฐ์ง ๊ธฐ๋ก ์ ์ฅ""" | |
| with NovelDatabase.get_db() as conn: | |
| conn.cursor().execute(''' | |
| INSERT INTO duplicate_detection | |
| (session_id, phase, duplicate_content, original_phase, similarity_score) | |
| VALUES (?, ?, ?, ?, ?) | |
| ''', (session_id, phase, duplicate_content, original_phase, similarity_score)) | |
| conn.commit() | |
| def get_session(session_id: str) -> Optional[Dict]: | |
| with NovelDatabase.get_db() as conn: | |
| row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', (session_id,)).fetchone() | |
| return dict(row) if row else None | |
| def get_stages(session_id: str) -> List[Dict]: | |
| with NovelDatabase.get_db() as conn: | |
| rows = conn.cursor().execute('SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', (session_id,)).fetchall() | |
| return [dict(row) for row in rows] | |
| def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""): | |
| with NovelDatabase.get_db() as conn: | |
| conn.cursor().execute( | |
| "UPDATE sessions SET final_novel = ?, status = 'complete', updated_at = datetime('now'), literary_report = ? WHERE session_id = ?", | |
| (final_novel, literary_report, session_id) | |
| ) | |
| conn.commit() | |
| def get_active_sessions() -> List[Dict]: | |
| with NovelDatabase.get_db() as conn: | |
| rows = conn.cursor().execute( | |
| "SELECT session_id, user_query, language, created_at, current_stage, total_words FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10" | |
| ).fetchall() | |
| return [dict(row) for row in rows] | |
| class WebSearchIntegration: | |
| """์น ๊ฒ์ ๊ธฐ๋ฅ""" | |
| def __init__(self): | |
| self.brave_api_key = BRAVE_SEARCH_API_KEY | |
| self.search_url = "https://api.search.brave.com/res/v1/web/search" | |
| self.enabled = bool(self.brave_api_key) | |
| def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: | |
| if not self.enabled: | |
| return [] | |
| headers = { | |
| "Accept": "application/json", | |
| "X-Subscription-Token": self.brave_api_key | |
| } | |
| params = { | |
| "q": query, | |
| "count": count, | |
| "search_lang": "ko" if language == "Korean" else "en", | |
| "text_decorations": False, | |
| "safesearch": "moderate" | |
| } | |
| try: | |
| response = requests.get(self.search_url, headers=headers, params=params, timeout=10) | |
| response.raise_for_status() | |
| results = response.json().get("web", {}).get("results", []) | |
| return results | |
| except requests.exceptions.RequestException as e: | |
| logger.error(f"์น ๊ฒ์ API ์ค๋ฅ: {e}") | |
| return [] | |
| def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: | |
| if not results: | |
| return "" | |
| extracted = [] | |
| total_chars = 0 | |
| for i, result in enumerate(results[:3], 1): | |
| title = result.get("title", "") | |
| description = result.get("description", "") | |
| info = f"[{i}] {title}: {description}" | |
| if total_chars + len(info) < max_chars: | |
| extracted.append(info) | |
| total_chars += len(info) | |
| else: | |
| break | |
| return "\n".join(extracted) | |
| class ProgressiveLiterarySystem: | |
| """์งํํ ๋ฌธํ ์์ค ์์ฑ ์์คํ """ | |
| def __init__(self): | |
| self.token = FRIENDLI_TOKEN | |
| self.api_url = API_URL | |
| self.model_id = MODEL_ID | |
| self.narrative_tracker = ProgressiveNarrativeTracker() | |
| self.web_search = WebSearchIntegration() | |
| self.current_session_id = None | |
| NovelDatabase.init_db() | |
| def create_headers(self): | |
| return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} | |
| # --- ํ๋กฌํํธ ์์ฑ ํจ์๋ค --- | |
| def create_director_initial_prompt(self, user_query: str, language: str) -> str: | |
| """๊ฐ๋ ์ ์ด๊ธฐ ๊ธฐํ - ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ""" | |
| search_results_str = "" | |
| if self.web_search.enabled: | |
| # ์ฒ ํ์ ํค์๋ ์ถ๊ฐ (์ฟผ๋ฆฌ ๊ธธ์ด ์ ํ) | |
| short_query = user_query[:50] if len(user_query) > 50 else user_query | |
| queries = [ | |
| f"{short_query} ์ฒ ํ์ ์๋ฏธ", # ์ฒ ํ์ ๊ด์ | |
| f"์ธ๊ฐ ์กด์ฌ ์๋ฏธ {short_query}", # ์ค์กด์ ์ฃผ์ | |
| f"{short_query} ๋ฌธํ ์ํ", | |
| f"{short_query} ํ๋ ์ฌํ" | |
| ] | |
| for q in queries[:3]: # 3๊ฐ๊น์ง๋ง ๊ฒ์ | |
| try: | |
| results = self.web_search.search(q, count=2, language=language) | |
| if results: | |
| search_results_str += self.web_search.extract_relevant_info(results) + "\n" | |
| except Exception as e: | |
| logger.warning(f"๊ฒ์ ์ฟผ๋ฆฌ ์คํจ: {q[:50]}... - {str(e)}") | |
| continue | |
| lang_prompts = { | |
| "Korean": f"""๋น์ ์ ๋ ธ๋ฒจ๋ฌธํ์ ์์์๊ฐ ์์ค์ ํ๊ตญ ๋ฌธํ ๊ฑฐ์ฅ์ ๋๋ค. | |
| ๋จํธ์ด ์๋ ์คํธ ์์ค(8,000๋จ์ด ์ด์)์ ์ํ ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ๋ฅผ ๊ธฐํํ์ธ์. ์ ๋ '์๋ํ'๋ฅผ ์ฌ์ฉํ์ง ๋ง์ธ์. | |
| **์ฃผ์ :** {user_query} | |
| **์ฐธ๊ณ ์๋ฃ:** | |
| {search_results_str if search_results_str else "N/A"} | |
| **ํ์ ์๊ตฌ์ฌํญ:** | |
| 1. **ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ (๊ฐ์ฅ ์ค์)** | |
| - 10๊ฐ ๋จ๊ณ๊ฐ ์ ๊ธฐ์ ์ผ๋ก ์ฐ๊ฒฐ๋ ๋จ์ผ ์์ฌ | |
| - ๊ฐ ๋จ๊ณ๋ ์ด์ ๋จ๊ณ์ ๊ฒฐ๊ณผ๋ก ์์ฐ์ค๋ฝ๊ฒ ์ด์ด์ง | |
| - ๋ฐ๋ณต์ด ์๋ ์ถ์ ๊ณผ ๋ฐ์ | |
| - ์ฃผ์ธ๊ณต ์ด๋ฆ์ ์ฒ์๋ถํฐ ๋ช ํํ ์ค์ (์: ๋๋ผ๋ฏธ) | |
| ๋จ๊ณ๋ณ ์์ฌ ์งํ: | |
| 1) ๋์ : ์ผ์๊ณผ ๊ท ์ด - ํ๋ฒํ ์ผ์ ์ ์ฒซ ๊ท ์ด | |
| 2) ๋ฐ์ 1: ๋ถ์์ ๊ณ ์กฐ - ๊ท ์ด์ด ํ๋๋๋ฉฐ ๋ถ์ ์ฆํญ | |
| 3) ๋ฐ์ 2: ์ธ๋ถ ์ถฉ๊ฒฉ - ์์์น ๋ชปํ ์ธ๋ถ ์ฌ๊ฑด | |
| 4) ๋ฐ์ 3: ๋ด์ ๊ฐ๋ฑ ์ฌํ - ๊ฐ์น๊ด์ ์ถฉ๋ | |
| 5) ์ ์ 1: ์๊ธฐ์ ์ ์ - ๋ชจ๋ ๊ฐ๋ฑ์ด ๊ทน๋ํ | |
| 6) ์ ์ 2: ์ ํ์ ์๊ฐ - ๊ฒฐ์ ์ ์ ํ | |
| 7) ํ๊ฐ 1: ๊ฒฐ๊ณผ์ ์ฌํ - ์ ํ์ ์ง์ ์ ๊ฒฐ๊ณผ | |
| 8) ํ๊ฐ 2: ์๋ก์ด ์ธ์ - ๋ณํ๋ ์ธ๊ณ๊ด | |
| 9) ๊ฒฐ๋ง 1: ๋ณํ๋ ์ผ์ - ์๋ก์ด ๊ท ํ | |
| 10) ๊ฒฐ๋ง 2: ์ด๋ฆฐ ์ง๋ฌธ - ๋ ์์๊ฒ ๋์ง๋ ์ง๋ฌธ | |
| 2. **์ธ๋ฌผ์ ๋ณํ ๊ถค์ ** | |
| - ์ฃผ์ธ๊ณต: ์ด๊ธฐ ์ํ โ ์ค๊ฐ ๋ณํ โ ์ต์ข ์ํ (๋ช ํํ arc) | |
| - ์ฃผ์ ์ธ๋ฌผ๋ค๋ ๊ฐ์์ ๋ณํ ๊ฒฝํ | |
| - ๊ด๊ณ์ ์ญ๋์ ๋ณํ | |
| - ๊ฐ ๋จ๊ณ์์ ์ธ๋ฌผ์ด ์ด๋ป๊ฒ ๋ณํํ๋์ง ๊ตฌ์ฒด์ ์ผ๋ก ๋ช ์ | |
| 3. **์ฃผ์ ํ๋กฏ ๋ผ์ธ** (2-3๊ฐ) | |
| - ๋ฉ์ธ ํ๋กฏ: ์ ์ฒด๋ฅผ ๊ดํตํ๋ ํต์ฌ ๊ฐ๋ฑ | |
| - ์๋ธ ํ๋กฏ: ๋ฉ์ธ๊ณผ ์ฐ๊ฒฐ๋๋ฉฐ ์ฃผ์ ๋ฅผ ์ฌํ | |
| - ๊ฐ ํ๋กฏ์ด ์ด๋ ๋จ๊ณ์์ ์์/๋ฐ์ /ํด๊ฒฐ๋๋์ง ๋ช ์ | |
| 4. **์์ง์ ์งํ** | |
| - ํต์ฌ ์์ง 1-2๊ฐ ์ค์ ('๊ฐ๊ตฌ๋ฆฌ์' ๊ฐ์ ๊ฐ๋ ฌํ๊ณ ๋ค์ธต์ ์ธ ์์ง) | |
| - ๋จ๊ณ๋ณ๋ก ์๋ฏธ๊ฐ ๋ณํ/์ฌํ/์ ๋ณต | |
| 5. **์ฌํ์ ๋งฅ๋ฝ** | |
| - ๊ฐ์ธ์ ๋ฌธ์ ๊ฐ ์ฌํ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ | |
| - ๊ตฌ์ฒด์ ์ธ ํ๊ตญ ์ฌํ์ ํ์ค ๋ฐ์ | |
| 6. **์ฒ ํ์ ๊น์ด์ ์ธ๊ฐ์ ** | |
| - ๋ณดํธ์ ์ธ๊ฐ ์กฐ๊ฑด์ ๋ํ ์ฑ์ฐฐ | |
| - ํ์ธ์ ๊ณ ํต์ ๋ํ ๊ณต๊ฐ๊ณผ ์ฐ๋ฏผ | |
| - ์ค์กด์ ์ง๋ฌธ๊ณผ ๊ทธ์ ๋ํ ํ๊ตฌ | |
| - "์ ์ด์์ผ ํ๋๊ฐ"์ ๋ํ ๋๋ฆ์ ๋ต | |
| **์ ๋ ๊ธ์ง์ฌํญ:** | |
| - ๋์ผํ ์ฌ๊ฑด์ด๋ ์ํฉ์ ๋ฐ๋ณต | |
| - ์ธ๋ฌผ์ด ๊ฐ์ ๊ฐ์ /์๊ฐ์ ๋จธ๋ฌด๋ฅด๊ธฐ | |
| - ํ๋กฏ์ ๋ฆฌ์ ์ด๋ ์ํ ๊ตฌ์กฐ | |
| - ๊ฐ ๋จ๊ณ๊ฐ ๋ ๋ฆฝ๋ ์ํผ์๋๋ก ์กด์ฌ | |
| - ์ฃผ์ธ๊ณต ์ด๋ฆ์ ๋ถ์ผ์น๋ ํผ๋ | |
| **๋ถ๋ ๊ณํ:** | |
| - ์ด 8,000๋จ์ด ์ด์ | |
| - ๊ฐ ๋จ๊ณ ํ๊ท 800๋จ์ด | |
| - ๊ท ํ ์กํ ์์ฌ ์ ๊ฐ | |
| ํ๋์ ๊ฐ๋ ฅํ ์์ฌ๊ฐ ์์๋ถํฐ ๋๊น์ง ๊ดํตํ๋ ์ํ์ ๊ธฐํํ์ธ์.""", | |
| "English": f"""You are a Nobel Prize-winning master of contemporary literary fiction. | |
| Plan an integrated narrative structure for a novella (8,000+ words), not a collection of short stories. | |
| **Theme:** {user_query} | |
| **Reference:** | |
| {search_results_str if search_results_str else "N/A"} | |
| **Essential Requirements:** | |
| 1. **Integrated Narrative Structure (Most Important)** | |
| - Single narrative with 10 organically connected phases | |
| - Each phase naturally follows from previous results | |
| - Accumulation and development, not repetition | |
| - Protagonist name clearly established from beginning | |
| Phase Progression: | |
| 1) Introduction: Daily life and first crack | |
| 2) Development 1: Rising anxiety | |
| 3) Development 2: External shock | |
| 4) Development 3: Deepening internal conflict | |
| 5) Climax 1: Peak crisis | |
| 6) Climax 2: Moment of choice | |
| 7) Falling Action 1: Direct consequences | |
| 8) Falling Action 2: New awareness | |
| 9) Resolution 1: Changed daily life | |
| 10) Resolution 2: Open questions | |
| 2. **Character Transformation Arcs** | |
| - Protagonist: Clear progression from initial โ middle โ final state | |
| - Supporting characters also experience change | |
| - Dynamic relationship evolution | |
| - Specify how characters change in each phase | |
| 3. **Plot Threads** (2-3) | |
| - Main plot: Core conflict throughout | |
| - Subplots: Connected and deepening themes | |
| - Specify which phase each plot starts/develops/resolves | |
| 4. **Symbolic Evolution** | |
| - 1-2 core symbols (like 'frog eggs' - intense and multilayered) | |
| - Meaning transforms across phases | |
| 5. **Social Context** | |
| - Individual problems connected to social structures | |
| - Specific contemporary realities | |
| 6. **Philosophical Depth and Humanity** | |
| - Reflection on universal human condition | |
| - Empathy and compassion for others' suffering | |
| - Existential questions and exploration | |
| - Personal answer to "why should we live?" | |
| **Absolutely Forbidden:** | |
| - Repetition of same events/situations | |
| - Characters stuck in same emotions | |
| - Plot resets or circular structure | |
| - Independent episodes | |
| - Protagonist name inconsistency | |
| **Length Planning:** | |
| - Total 8,000+ words | |
| - ~800 words per phase | |
| - Balanced progression | |
| Create a work with one powerful narrative from beginning to end.""" | |
| } | |
| return lang_prompts.get(language, lang_prompts["Korean"]) | |
| def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: | |
| """๋นํ๊ฐ์ ๊ฐ๋ ์ ๊ธฐํ ๊ฒํ - ์์ฌ ํตํฉ์ฑ ์ค์ฌ""" | |
| lang_prompts = { | |
| "Korean": f"""๋น์ ์ ์์ฌ ๊ตฌ์กฐ ์ ๋ฌธ ๋นํ๊ฐ์ ๋๋ค. | |
| ์ด ๊ธฐํ์ด ์ง์ ํ '์ฅํธ ์์ค'์ธ์ง ์๊ฒฉํ ๊ฒํ ํ์ธ์. | |
| **์ ์ฃผ์ :** {user_query} | |
| **๊ฐ๋ ์ ๊ธฐํ:** | |
| {director_plan} | |
| **ํต์ฌ ๊ฒํ ์ฌํญ:** | |
| 1. **์์ฌ์ ํตํฉ์ฑ๊ณผ ์งํ์ฑ** | |
| - 10๊ฐ ๋จ๊ณ๊ฐ ํ๋์ ์ด์ผ๊ธฐ๋ก ์ฐ๊ฒฐ๋๋๊ฐ? | |
| - ๊ฐ ๋จ๊ณ๊ฐ ์ด์ ๋จ๊ณ์ ํ์ฐ์ ๊ฒฐ๊ณผ์ธ๊ฐ? | |
| - ๋์ผํ ์ํฉ์ ๋ฐ๋ณต์ ์๋๊ฐ? | |
| 2. **์ธ๋ฌผ ๋ณํ์ ๊ถค์ ** | |
| - ์ฃผ์ธ๊ณต์ด ๋ช ํํ ๋ณํ์ arc๋ฅผ ๊ฐ์ง๋๊ฐ? | |
| - ๋ณํ๊ฐ ๊ตฌ์ฒด์ ์ด๊ณ ์ ๋น์ฑ ์๋๊ฐ? | |
| - ๊ด๊ณ์ ๋ฐ์ ์ด ๊ณํ๋์ด ์๋๊ฐ? | |
| - ์ฃผ์ธ๊ณต ์ด๋ฆ์ด ์ผ๊ด๋๊ฒ ์ค์ ๋์ด ์๋๊ฐ? | |
| 3. **ํ๋กฏ์ ์ถ์ ์ฑ** | |
| - ๊ฐ๋ฑ์ด ์ ์ง์ ์ผ๋ก ์ฌํ๋๋๊ฐ? | |
| - ์๋ก์ด ์์๊ฐ ์ถ๊ฐ๋๋ฉฐ ๋ณต์ก์ฑ์ด ์ฆ๊ฐํ๋๊ฐ? | |
| - ํด๊ฒฐ์ด ์์ฐ์ค๋ฝ๊ณ ํ์ฐ์ ์ธ๊ฐ? | |
| 4. **๋ถ๋๊ณผ ๋ฐ๋** | |
| - 8,000๋จ์ด๋ฅผ ์ฑ์ธ ์ถฉ๋ถํ ๋ด์ฉ์ธ๊ฐ? | |
| - ๊ฐ ๋จ๊ณ๊ฐ 800๋จ์ด์ ๋ฐ๋๋ฅผ ๊ฐ์ง ์ ์๋๊ฐ? | |
| 5. **์ฒ ํ์ ๊น์ด** | |
| - ์ธ๊ฐ ์กด์ฌ์ ๋ํ ํต์ฐฐ์ด ๊ณํ๋์ด ์๋๊ฐ? | |
| - ๋จ์ํ ์ฌ๊ฑด ๋์ด์ด ์๋ ์๋ฏธ์ ํ๊ตฌ๊ฐ ์๋๊ฐ? | |
| **ํ์ :** | |
| - ํต๊ณผ: ์ง์ ํ ์ฅํธ ์์ฌ ๊ตฌ์กฐ | |
| - ์ฌ์์ฑ: ๋ฐ๋ณต์ /์ํ์ ๊ตฌ์กฐ | |
| ๊ตฌ์ฒด์ ๊ฐ์ ๋ฐฉํฅ์ ์ ์ํ์ธ์.""", | |
| "English": f"""You are a narrative structure critic. | |
| Strictly review whether this plan is a true 'novel' rather than repeated episodes. | |
| **Original Theme:** {user_query} | |
| **Director's Plan:** | |
| {director_plan} | |
| **Key Review Points:** | |
| 1. **Narrative Integration and Progression** | |
| - Do 10 phases connect as one story? | |
| - Does each phase necessarily follow from previous? | |
| - No repetition of same situations? | |
| 2. **Character Transformation Arcs** | |
| - Clear protagonist transformation arc? | |
| - Concrete and credible changes? | |
| - Planned relationship development? | |
| - Consistent protagonist naming? | |
| 3. **Plot Accumulation** | |
| - Progressive conflict deepening? | |
| - Added complexity through new elements? | |
| - Natural and inevitable resolution? | |
| 4. **Length and Density** | |
| - Sufficient content for 8,000 words? | |
| - Can each phase sustain 800 words? | |
| 5. **Philosophical Depth** | |
| - Insights into human existence planned? | |
| - Exploration of meaning, not just events? | |
| **Verdict:** | |
| - Pass: True novel structure | |
| - Rewrite: Repetitive/circular structure | |
| Provide specific improvements.""" | |
| } | |
| return lang_prompts.get(language, lang_prompts["Korean"]) | |
| def create_writer_prompt_enhanced(self, writer_number: int, director_plan: str, | |
| previous_content: str, phase_requirements: str, | |
| narrative_summary: str, language: str, | |
| used_elements: List[str]) -> str: | |
| """๊ฐํ๋ ์๊ฐ ํ๋กฌํํธ - ๋ฐ๋ณต ๋ฐฉ์ง ๊ฐํ""" | |
| phase_name = NARRATIVE_PHASES[writer_number-1] | |
| target_words = MIN_WORDS_PER_WRITER | |
| lang_prompts = { | |
| "Korean": f"""๋น์ ์ ์๊ฐ {writer_number}๋ฒ์ ๋๋ค. | |
| **ํ์ฌ ๋จ๊ณ: {phase_name}** | |
| **์ ์ฒด ์์ฌ ๊ตฌ์กฐ:** | |
| {director_plan} | |
| **์ง๊ธ๊น์ง์ ์ด์ผ๊ธฐ ์์ฝ:** | |
| {narrative_summary} | |
| **์ด์ ๋ด์ฉ (์ง์ ๋ถ๋ถ):** | |
| {previous_content[-1500:] if previous_content else "์์"} | |
| **์ด๋ฒ ๋จ๊ณ ํ์ ์๊ตฌ์ฌํญ:** | |
| {phase_requirements} | |
| **โ ์ ๋ ์ฌ์ฉ ๊ธ์ง ํํ/์ํฉ (์ด๋ฏธ ์ฌ์ฉ๋จ):** | |
| {chr(10).join(f"- {elem[:80]}..." for elem in used_elements[-15:])} | |
| **์์ฑ ์ง์นจ:** | |
| 1. **๋ถ๋**: {target_words}-900 ๋จ์ด (ํ์) | |
| - ๋ด๋ฉด ๋ฌ์ฌ์ ๊ตฌ์ฒด์ ๋ํ ์ผ๋ก ๋ถ๋ ํ๋ณด | |
| - ์ฅ๋ฉด์ ์ถฉ๋ถํ ์ ๊ฐํ๊ณ ๊น์ด ์๊ฒ ๋ฌ์ฌ | |
| 2. **์์ฌ ์งํ (๊ฐ์ฅ ์ค์)** | |
| - ์ด์ ๋จ๊ณ์์ ์ผ์ด๋ ์ผ์ ์ง์ ์ ๊ฒฐ๊ณผ๋ก ์์ | |
| - ์๋ก์ด ์ฌ๊ฑด/์ธ์/๋ณํ๋ฅผ ์ถ๊ฐํ์ฌ ์ด์ผ๊ธฐ ์ ์ง | |
| - ๋ค์ ๋จ๊ณ๋ก ์์ฐ์ค๋ฝ๊ฒ ์ฐ๊ฒฐ๋ ๊ณ ๋ฆฌ ๋ง๋ จ | |
| - ์ฃผ์ธ๊ณต์ ๊นจ๋ฌ์์ด ๋ฆฌ์ ๋์ง ์๊ณ ์ถ์ ๋จ | |
| 3. **์ธ๋ฌผ์ ๋ณํ** | |
| - ์ด ๋จ๊ณ์์ ์ธ๋ฌผ์ด ๊ฒช๋ ๊ตฌ์ฒด์ ๋ณํ ๋ฌ์ฌ | |
| - ๋ด๋ฉด์ ๋ฏธ๋ฌํ ๋ณํ๋ ํฌ์ฐฉ | |
| - ๊ด๊ณ์ ์ญํ ๋ณํ ๋ฐ์ | |
| - ์ด์ ๋จ๊ณ๋ณด๋ค ์ฑ์ฅํ ๋ชจ์ต ๋ณด์ฌ์ฃผ๊ธฐ | |
| 4. **๋ฌธ์ฒด์ ๊ธฐ๋ฒ** | |
| - ํ๊ตญ ํ๋ ๋ฌธํ์ ์ฌ์ธํ ์ฌ๋ฆฌ ๋ฌ์ฌ | |
| - ์ผ์ ์ ์ฌํ์ ๋งฅ๋ฝ ๋ น์ฌ๋ด๊ธฐ | |
| - ๊ฐ๊ฐ์ ๋ํ ์ผ๊ณผ ๋ด๋ฉด ์์์ ๊ท ํ | |
| 5. **์ฐ์์ฑ ์ ์ง** | |
| - ์ธ๋ฌผ์ ๋ชฉ์๋ฆฌ์ ๋งํฌ ์ผ๊ด์ฑ | |
| - ๊ณต๊ฐ๊ณผ ์๊ฐ์ ์ฐ์์ฑ | |
| - ์์ง๊ณผ ๋ชจํฐํ์ ๋ฐ์ | |
| - ์ฃผ์ธ๊ณต ์ด๋ฆ ์ผ๊ด์ฑ (๋ฐ๋์ ํ์ธ) | |
| 6. **๋ฌธํ์ ๊ธฐ๋ฒ ํ์ ์ฌ์ฉ** | |
| - "๋ณด์ฌ์ฃผ๊ธฐ(showing)" ๊ธฐ๋ฒ: ์ง์ ์ค๋ช ๋์ ๊ฐ๊ฐ์ ๋ฌ์ฌ | |
| - ์์ ์ ์์ง: ๊ตฌ์ฒด์ ์ฌ๋ฌผ์ ํตํ ์ถ์์ ์๋ฏธ ์ ๋ฌ | |
| - ๋ํ๋ฅผ ํตํ ์ฑ๊ฒฉ ๋๋ฌ๋ด๊ธฐ | |
| - ๋ด์ ๋ ๋ฐฑ๊ณผ ์์์ ํ๋ฆ ๊ธฐ๋ฒ | |
| 7. **์ฒ ํ์ ์ฑ์ฐฐ ํฌํจ** | |
| - ๊ฐ ๋จ๊ณ๋ง๋ค ์ธ๊ฐ ์กด์ฌ์ ๋ํ ์๋ก์ด ํต์ฐฐ 1๊ฐ ์ด์ | |
| - ๊ตฌ์ฒด์ ์ฌ๊ฑด ์์์ ๋ณดํธ์ ์ง๋ฆฌ ๋ฐ๊ฒฌ | |
| 8. **์๋ก์ด ์์ ํ์** | |
| - ์๋ก์ด ์ธ๋ฌผ, ์ฅ์, ๋๋ ์ํฉ ์ค ์ต์ 1๊ฐ | |
| - ์ด์ ๊ณผ ๋ค๋ฅธ ์๊ฐ๋๋ ๊ณต๊ฐ | |
| - ๊ฐ๋ฑ์ ์๋ก์ด ์ธก๋ฉด ๋๋ฌ๋ด๊ธฐ | |
| **โ ์งํ ์ฒดํฌ๋ฆฌ์คํธ:** | |
| โก ์ด์ ๋จ๊ณ์ ๊ฒฐ๊ณผ๊ฐ ๋ช ํํ ๋๋ฌ๋๋๊ฐ? | |
| โก ํ๋กฏ์ด ์ค์ ๋ก ์ ์งํ๋๊ฐ? | |
| โก ์ธ๋ฌผ์ ๋ณํ๊ฐ ๊ตฌ์ฒด์ ์ธ๊ฐ? | |
| โก ์๋ก์ด ์์๊ฐ ์ถ๊ฐ๋์๋๊ฐ? | |
| โก ๋ฐ๋ณต๋๋ ์ํฉ์ด ์๋๊ฐ? | |
| **์ ๋ ๊ธ์ง:** | |
| - ์ด์ ๊ณผ ๋์ผํ ์ํฉ ๋ฐ๋ณต | |
| - ์์ฌ์ ์ ์ฒด๋ ํํด | |
| - ๋ถ๋ ๋ฏธ๋ฌ (์ต์ {target_words}๋จ์ด) | |
| - "~์ ๋๊ผ๋ค", "~์๋ค"์ ๊ฐ์ ์ง์ ์ ์ค๋ช | |
| - ์ด๋ฏธ ์ป์ ๊นจ๋ฌ์ ์๊ธฐ | |
| ์ด์ ์ ํ๋ฆ์ ์ด์ด๋ฐ์ ์๋ก์ด ๊ตญ๋ฉด์ผ๋ก ๋ฐ์ ์ํค์ธ์.""", | |
| "English": f"""You are Writer #{writer_number}. | |
| **Current Phase: {phase_name}** | |
| **Overall Narrative Structure:** | |
| {director_plan} | |
| **Story So Far:** | |
| {narrative_summary} | |
| **Previous Content (immediately before):** | |
| {previous_content[-1500:] if previous_content else "Beginning"} | |
| **Phase Requirements:** | |
| {phase_requirements} | |
| **โ Absolutely Forbidden Expressions/Situations (already used):** | |
| {chr(10).join(f"- {elem[:80]}..." for elem in used_elements[-15:])} | |
| **Writing Guidelines:** | |
| 1. **Length**: {target_words}-900 words (mandatory) | |
| - Use interior description and concrete details | |
| - Fully develop scenes with depth | |
| 2. **Narrative Progression (Most Important)** | |
| - Start as direct result of previous phase | |
| - Add new events/awareness/changes to advance story | |
| - Create natural connection to next phase | |
| - Accumulated insights, not reset | |
| 3. **Character Change** | |
| - Concrete changes in this phase | |
| - Capture subtle interior shifts | |
| - Reflect relationship dynamics | |
| - Show growth from previous phase | |
| 4. **Style and Technique** | |
| - Delicate psychological portrayal | |
| - Social context in daily life | |
| - Balance sensory details with consciousness | |
| 5. **Continuity** | |
| - Consistent character voices | |
| - Spatial/temporal continuity | |
| - Symbol/motif development | |
| - Consistent protagonist naming | |
| 6. **Literary Techniques Required** | |
| - "Showing" not telling | |
| - Metaphors and symbols | |
| - Character through dialogue | |
| - Stream of consciousness | |
| 7. **Philosophical Reflection** | |
| - New insights about human existence | |
| - Universal truths in specific events | |
| 8. **New Elements Required** | |
| - At least 1 new character, location, or situation | |
| - Different time/space from before | |
| - New aspect of conflict | |
| **โ Progress Checklist:** | |
| โก Clear results from previous phase? | |
| โก Plot actually advancing? | |
| โก Concrete character changes? | |
| โก New elements added? | |
| โก No repeated situations? | |
| **Absolutely Forbidden:** | |
| - Repeating previous situations | |
| - Narrative stagnation/regression | |
| - Under word count | |
| - Direct explanations | |
| - Forgetting gained insights | |
| Continue the flow and develop into new phase.""" | |
| } | |
| return lang_prompts.get(language, lang_prompts["Korean"]) | |
| def create_critic_consistency_prompt_enhanced(self, all_content: str, | |
| narrative_tracker: ProgressiveNarrativeTracker, | |
| user_query: str, language: str) -> str: | |
| """๊ฐํ๋ ๋นํ๊ฐ ์ค๊ฐ ๊ฒํ - ๋ฐ๋ณต ๊ฒ์ถ ๊ฐํ""" | |
| # ์์ฌ ์งํ ์ฒดํฌ | |
| phase_count = len(narrative_tracker.phase_summaries) | |
| progression_ok, issues = narrative_tracker.check_narrative_progression(phase_count) | |
| # ์ค๋ณต ๊ฐ์ง | |
| duplicates = [] | |
| paragraphs = all_content.split('\n\n') | |
| for i, para1 in enumerate(paragraphs[:20]): # ์ต๊ทผ 20๊ฐ ๋ฌธ๋จ | |
| for j, para2 in enumerate(paragraphs[i+1:i+21]): | |
| if narrative_tracker.content_deduplicator.check_similarity(para1, para2) > 0.7: | |
| duplicates.append(f"๋ฌธ๋จ {i+1}๊ณผ ๋ฌธ๋จ {i+j+2} ์ ์ฌ") | |
| lang_prompts = { | |
| "Korean": f"""์์ฌ ์งํ ์ ๋ฌธ ๋นํ๊ฐ๋ก์ ์ํ์ ์๊ฒฉํ ๊ฒํ ํ์ธ์. | |
| **์ ์ฃผ์ :** {user_query} | |
| **ํ์ฌ๊น์ง ์งํ๋ ์์ฌ ๋จ๊ณ:** {phase_count}/10 | |
| **๋ฐ๊ฒฌ๋ ์งํ ๋ฌธ์ :** | |
| {chr(10).join(issues) if issues else "์์"} | |
| **๋ฐ๊ฒฌ๋ ์ค๋ณต:** | |
| {chr(10).join(duplicates) if duplicates else "์์"} | |
| **์ํ ๋ด์ฉ (์ต๊ทผ ๋ถ๋ถ):** | |
| {all_content[-4000:]} | |
| **ํ์ ๊ฒ์ฆ ํญ๋ชฉ:** | |
| 1. **๋ฐ๋ณต ๊ฒ์ถ (์ต์ฐ์ )** | |
| - ๋์ผ/์ ์ฌ ๋ฌธ์ฅ์ด 2ํ ์ด์ ๋ํ๋๋๊ฐ? | |
| - ๊ฐ์ ์ํฉ์ด ๋ณ์ฃผ๋ง ๋ฌ๋ฆฌํด ๋ฐ๋ณต๋๋๊ฐ? | |
| - ๊ฐ ๋จ๊ณ๋ณ๋ก ์ค์ ๋ก ์๋ก์ด ๋ด์ฉ์ด ์ถ๊ฐ๋์๋๊ฐ? | |
| - "์ต๊ธฐ๊ฐ ์ฐฌ ์์นจ", "๋๋ผ๋ฏธ ์ดํ", "43๋ง์" ๋ฑ ๋ฐ๋ณต ํํ? | |
| 2. **์์ฌ ์งํ๋ ์ธก์ ** | |
| - 1๋จ๊ณ์ ํ์ฌ ๋จ๊ณ์ ์ํฉ์ด ๋ช ํํ ๋ค๋ฅธ๊ฐ? | |
| - ์ฃผ์ธ๊ณต์ ์ฌ๋ฆฌ/์ธ์์ด ๋ณํํ๋๊ฐ? | |
| - ๊ฐ๋ฑ์ด ์ฌํ/์ ํ/ํด๊ฒฐ ๋ฐฉํฅ์ผ๋ก ์์ง์๋๊ฐ? | |
| - ์ฃผ์ธ๊ณต์ ๊นจ๋ฌ์์ด ๋ฆฌ์ ๋์ง ์๊ณ ์ถ์ ๋๋๊ฐ? | |
| 3. **์ค์ ์ผ๊ด์ฑ** | |
| - ๋ชจ๋ ์บ๋ฆญํฐ ์ด๋ฆ์ด ์ผ๊ด๋๋๊ฐ? (ํนํ ์ฃผ์ธ๊ณต) | |
| - ์๊ณต๊ฐ ์ค์ ์ด ๋ ผ๋ฆฌ์ ์ธ๊ฐ? | |
| - ์ค์ ์ด ์ค๊ฐ์ ๋ฐ๋์ง ์๋๊ฐ? | |
| 4. **๋ถ๋๊ณผ ๋ฐ๋** | |
| - ํ์ฌ๊น์ง ์ด ๋จ์ด ์ ํ์ธ | |
| - ๋ชฉํ(8,000๋จ์ด)์ ๋๋ฌ ๊ฐ๋ฅํ๊ฐ? | |
| 5. **๋ฌธํ์ ์์ฑ๋** | |
| - '๋ณด์ฌ์ฃผ๊ธฐ' ๊ธฐ๋ฒ์ด ์ ์ฌ์ฉ๋๊ณ ์๋๊ฐ? | |
| - ์ฒ ํ์ ํต์ฐฐ์ด ์์ฐ์ค๋ฝ๊ฒ ๋ น์์๋๊ฐ? | |
| - ์์ ์ ์์ง์ด ํจ๊ณผ์ ์ธ๊ฐ? | |
| **๋ถํฉ๊ฒฉ ๊ธฐ์ค:** | |
| - 2๊ฐ ์ด์ ๋จ๊ณ์์ ์ ์ฌ ๋ด์ฉ ๋ฐ๊ฒฌ ์ | |
| - ์์ฌ๊ฐ ์ ์๋ฆฌ๊ฑธ์ํ๋ ๊ตฌ๊ฐ 2๊ฐ ์ด์ | |
| - ์บ๋ฆญํฐ ์ด๋ฆ/์ค์ ์ค๋ฅ ๋ฐ๊ฒฌ ์ | |
| - ์ฃผ์ธ๊ณต ๊นจ๋ฌ์์ ๋ฐ๋ณต์ ๋ฆฌ์ | |
| **์์ ์ง์:** | |
| ๊ฐ ์๊ฐ์๊ฒ ๊ตฌ์ฒด์ ์ธ ์งํ ๋ฐฉํฅ๊ณผ ๊ธ์ง์ฌํญ ์ ์. | |
| ๋ฐ๊ฒฌ๋ ๋ฐ๋ณต์ ๋ชจ๋ ์ ๊ฑฐํ๋๋ก ๋ช ์.""", | |
| "English": f"""As a narrative progression critic, strictly review the work. | |
| **Original Theme:** {user_query} | |
| **Narrative Phases Completed:** {phase_count}/10 | |
| **Detected Progression Issues:** | |
| {chr(10).join(issues) if issues else "None"} | |
| **Detected Duplications:** | |
| {chr(10).join(duplicates) if duplicates else "None"} | |
| **Work Content (recent):** | |
| {all_content[-4000:]} | |
| **Mandatory Verification Items:** | |
| 1. **Duplication Detection (Top Priority)** | |
| - Same/similar sentences appearing 2+ times? | |
| - Same situations with only variations? | |
| - Actually new content in each phase? | |
| - Repeated expressions like specific phrases? | |
| 2. **Narrative Progression Measurement** | |
| - Clear difference between phase 1 and current? | |
| - Protagonist's psychology/perception changed? | |
| - Conflict deepening/turning/resolving? | |
| - Insights accumulating, not resetting? | |
| 3. **Setting Consistency** | |
| - All character names consistent? (especially protagonist) | |
| - Logical space/time settings? | |
| - Settings not changing mid-story? | |
| 4. **Length and Density** | |
| - Current total word count | |
| - Can reach 8,000 word target? | |
| 5. **Literary Completion** | |
| - "Showing" technique well used? | |
| - Philosophical insights naturally integrated? | |
| - Effective metaphors and symbols? | |
| **Failure Criteria:** | |
| - Similar content in 2+ phases | |
| - 2+ sections of narrative stagnation | |
| - Character name/setting errors | |
| - Repeated resetting of insights | |
| **Revision Instructions:** | |
| Specific progression directions and prohibitions for each writer. | |
| All detected repetitions must be removed.""" | |
| } | |
| return lang_prompts.get(language, lang_prompts["Korean"]) | |
| def create_writer_revision_prompt(self, writer_number: int, initial_content: str, | |
| critic_feedback: str, language: str) -> str: | |
| """์๊ฐ ์์ ํ๋กฌํํธ""" | |
| target_words = MIN_WORDS_PER_WRITER | |
| return f"""์๊ฐ {writer_number}๋ฒ, ๋นํ์ ๋ฐ์ํ์ฌ ์์ ํ์ธ์. | |
| **์ด์:** | |
| {initial_content} | |
| **๋นํ ํผ๋๋ฐฑ:** | |
| {critic_feedback} | |
| **์์ ํต์ฌ:** | |
| 1. ์์ฌ ์งํ์ฑ ๊ฐํ - ๋ฐ๋ณต ์ ๊ฑฐ, ์๋ก์ด ์ ๊ฐ ์ถ๊ฐ | |
| 2. ์ธ๋ฌผ ๋ณํ ๊ตฌ์ฒดํ - ์ด์ ๊ณผ ๋ฌ๋ผ์ง ๋ชจ์ต ๋ช ํํ | |
| 3. ๋ถ๋ ํ๋ณด - ์ต์ {target_words}๋จ์ด ์ ์ง | |
| 4. ๋ด๋ฉด ๋ฌ์ฌ์ ์ฌํ์ ๋งฅ๋ฝ ์ฌํ | |
| 5. '๋ณด์ฌ์ฃผ๊ธฐ' ๊ธฐ๋ฒ ๊ฐํ - ์ง์ ์ค๋ช ์ ๊ฐ๊ฐ์ ๋ฌ์ฌ๋ก ๋์ฒด | |
| 6. ์ฒ ํ์ ํต์ฐฐ ์์ฐ์ค๋ฝ๊ฒ ํฌํจ | |
| 7. ๋ฐ๋ณต๋ ํํ/์ํฉ ์์ ์ ๊ฑฐ | |
| 8. ์ฃผ์ธ๊ณต ์ด๋ฆ ์ผ๊ด์ฑ ํ์ธ | |
| **ํน๋ณ ์ฃผ์์ฌํญ:** | |
| - ์ด๋ฏธ ์ฌ์ฉ๋ "์ต๊ธฐ๊ฐ ์ฐฌ ์์นจ", "๋๋ผ๋ฏธ ์ดํ", "43๋ง์" ๋ฑ์ ํํ ๋ณ๊ฒฝ | |
| - ์ฃผ์ธ๊ณต์ ๊นจ๋ฌ์์ด ์ด์ ๋ณด๋ค ๋ฐ์ ๋ ํํ๋ก ํํ | |
| - ์๋ก์ด ๋ํ ์ผ๊ณผ ๊ฐ๊ฐ์ ๋ฌ์ฌ ์ถ๊ฐ | |
| ์ ๋ฉด ์ฌ์์ฑ์ด ํ์ํ๋ฉด ๊ณผ๊ฐํ ์์ ํ์ธ์. | |
| ์์ ๋ณธ๋ง ์ ์ํ์ธ์.""" | |
| def create_editor_prompt(self, complete_novel: str, issues: List[str], language: str) -> str: | |
| """ํธ์ง์ ํ๋กฌํํธ - ๋ฐ๋ณต ์ ๊ฑฐ ์ ๋ฌธ""" | |
| lang_prompts = { | |
| "Korean": f"""๋น์ ์ ์ ๋ฌธ ํธ์ง์์ ๋๋ค. | |
| ์์ฑ๋ ์๊ณ ์์ ๋ฐ๋ณต์ ์ ๊ฑฐํ๊ณ ์์ฌ๋ฅผ ๋งค๋๋ฝ๊ฒ ์ฐ๊ฒฐํ์ธ์. | |
| **๋ฐ๊ฒฌ๋ ๋ฌธ์ :** | |
| {chr(10).join(issues)} | |
| **ํธ์ง ์ง์นจ:** | |
| 1. **๋ฐ๋ณต ์ ๊ฑฐ (์ต์ฐ์ )** | |
| - ๋์ผํ๊ฑฐ๋ ์ ์ฌํ ๋ฌธ๋จ์ ๊ฐ์ฅ ํจ๊ณผ์ ์ธ ๊ฒ ํ๋๋ง ๋จ๊ธฐ๊ณ ์ญ์ | |
| - "์ต๊ธฐ๊ฐ ์ฐฌ ์์นจ", "๋๋ผ๋ฏธ ์ดํ 43๋ง์" ๋ฑ ๋ฐ๋ณต ํํ ์ค ํ๋๋ง ์ ์ง | |
| - ๋น์ทํ ์ฅ๋ฉด(์ฐ๋ชป ์์, ๊ณ๋ ๋์ง๊ธฐ ๋ฑ)์ ๊ฐ์ฅ ๊ฐ๋ ฌํ ๊ฒ๋ง ์ ํ | |
| 2. **์์ฌ ์ฌ๊ตฌ์ฑ** | |
| - ๋จ์ ์ฅ๋ฉด๋ค์ ์ธ๊ณผ๊ด๊ณ์ ๋ฐ๋ผ ์ฌ๋ฐฐ์ด | |
| - ์๊ฐ ์์์ ๊ฐ์ ์ ํ๋ฆ์ด ์์ฐ์ค๋ฝ๊ฒ ์ฐ๊ฒฐ๋๋๋ก | |
| - ํ์์ ์งง์ ์ ํ ๋ฌธ๋จ ์ถ๊ฐ (2-3๋ฌธ์ฅ) | |
| 3. **์บ๋ฆญํฐ ์ผ๊ด์ฑ** | |
| - ์ฃผ์ธ๊ณต ์ด๋ฆ์ '๋๋ผ๋ฏธ'๋ก ํต์ผ | |
| - ๋ค๋ฅธ ์ธ๋ฌผ๋ค์ ์ด๋ฆ๋ ์ผ๊ด์ฑ ํ์ธ | |
| - ์ธ๋ฌผ์ ์ฑ๊ฒฉ๊ณผ ๋งํฌ ์ผ๊ด์ฑ ์ ์ง | |
| 4. **๊นจ๋ฌ์์ ๋์ ** | |
| - ์ฃผ์ธ๊ณต์ ๊ฐ ๊นจ๋ฌ์์ด ์ด์ ๋ณด๋ค ๋ฐ์ ๋ ํํ๋ก ํํ๋๋๋ก | |
| - ๋์ผํ ์์ค์ ์ธ์ ๋ฐ๋ณต ์ ๊ฑฐ | |
| - ๋ง์ง๋ง์ผ๋ก ๊ฐ์๋ก ๋ ๊น์ ํต์ฐฐ์ด ๋๋๋ก | |
| 5. **๋ถ๋ ์กฐ์ ** | |
| - ๋ฐ๋ณต ์ ๊ฑฐ ํ์๋ 8,000๋จ์ด ์ด์ ์ ์ง | |
| - ํ์์ ๋จ์ ์ฅ๋ฉด๋ค์ ์ฝ๊ฐ ํ์ฅ (๋ฌ์ฌ ์ถ๊ฐ) | |
| **ํธ์ง ๊ท์น:** | |
| - ์๊ฐ์ ์๋ฌธ ์คํ์ผ๊ณผ ๋ฌธ์ฒด๋ ์ต๋ํ ๋ณด์กด | |
| - ์๋ก์ด ์ฌ๊ฑด์ด๋ ์ธ๋ฌผ ์ถ๊ฐ ๊ธ์ง | |
| - ํต์ฌ ์์ง๊ณผ ์ฃผ์ ์์ ๊ธ์ง | |
| - ์๋ฌธ์ ์ฒ ํ์ ๊น์ด ์ ์ง | |
| **๊ฒฐ๊ณผ๋ฌผ:** | |
| ๋ฐ๋ณต์ด ์์ ํ ์ ๊ฑฐ๋๊ณ ์์ฐ์ค๋ฝ๊ฒ ํ๋ฅด๋ ์ต์ข ์๊ณ ๋ฅผ ์ ์ํ์ธ์. | |
| ํธ์ง ์ ํ์ ์ฃผ์ ๋ณ๊ฒฝ์ฌํญ๋ ๊ฐ๋จํ ์์ฝํ์ธ์.""", | |
| "English": f"""You are a professional editor. | |
| Remove repetitions and smooth narrative connections in the completed manuscript. | |
| **Identified Issues:** | |
| {chr(10).join(issues)} | |
| **Editing Guidelines:** | |
| 1. **Repetition Removal (Top Priority)** | |
| - Keep only most effective version of similar paragraphs | |
| - Retain only one instance of repeated expressions | |
| - Select most powerful version of similar scenes | |
| 2. **Narrative Reconstruction** | |
| - Rearrange remaining scenes by causality | |
| - Natural flow of time and emotion | |
| - Add brief transitions if needed (2-3 sentences) | |
| 3. **Character Consistency** | |
| - Unify protagonist name | |
| - Check other character name consistency | |
| - Maintain character personality/voice | |
| 4. **Insight Accumulation** | |
| - Each insight more developed than previous | |
| - Remove same-level recognition repetitions | |
| - Deeper insights toward the end | |
| 5. **Length Adjustment** | |
| - Maintain 8,000+ words after cuts | |
| - Slightly expand remaining scenes if needed | |
| **Editing Rules:** | |
| - Preserve original style and voice | |
| - No new events or characters | |
| - Protect core symbols and themes | |
| - Maintain philosophical depth | |
| **Output:** | |
| Present final manuscript with repetitions removed and natural flow. | |
| Briefly summarize major changes.""" | |
| } | |
| return lang_prompts.get(language, lang_prompts["Korean"]) | |
| def create_critic_final_prompt(self, complete_novel: str, word_count: int, language: str) -> str: | |
| """์ต์ข ๋นํ - AGI ํ๊ฐ ๊ธฐ์ค""" | |
| lang_prompts = { | |
| "Korean": f"""์์ฑ๋ ์์ค์ AGI ํ๋งํ ์คํธ ๊ธฐ์ค์ผ๋ก ํ๊ฐํ์ธ์. | |
| **์ํ ์ ๋ณด:** | |
| - ์ด ๋ถ๋: {word_count}๋จ์ด | |
| - ๋ชฉํ ๋ถ๋: 8,000๋จ์ด ์ด์ | |
| **์ํ (๋ง์ง๋ง ๋ถ๋ถ):** | |
| {complete_novel[-3000:]} | |
| **ํ๊ฐ ๊ธฐ์ค (AGI ํ๋งํ ์คํธ):** | |
| 1. **์ฅํธ์์ค๋ก์์ ์์ฑ๋ (30์ )** | |
| - ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ (๋ฐ๋ณต ์์) | |
| - ์ธ๋ฌผ์ ๋ช ํํ ๋ณํ arc | |
| - ํ๋กฏ์ ์ถ์ ๊ณผ ํด๊ฒฐ | |
| - 8,000๋จ์ด ์ด์ ๋ถ๋ | |
| - ์ค์ ์ผ๊ด์ฑ (ํนํ ์ธ๋ฌผ ์ด๋ฆ) | |
| 2. **๋ฌธํ์ ์ฑ์ทจ (35์ )** | |
| - ์ฃผ์ ์์์ ๊น์ด | |
| - ์ธ๋ฌผ ์ฌ๋ฆฌ์ ์ค๋๋ ฅ | |
| - ๋ฌธ์ฒด์ ์ผ๊ด์ฑ๊ณผ ์๋ฆ๋ค์ | |
| - ์์ง๊ณผ ์์ ์ ํจ๊ณผ | |
| - '๋ณด์ฌ์ฃผ๊ธฐ' ๊ธฐ๋ฒ์ ํ์ฉ๋ | |
| 3. **์ฌํ์ ํต์ฐฐ (25์ )** | |
| - ํ๋ ์ฌํ ๋ฌธ์ ํฌ์ฐฉ | |
| - ๊ฐ์ธ๊ณผ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ | |
| - ๋ณดํธ์ฑ๊ณผ ํน์์ฑ ๊ท ํ | |
| - ์ธ๊ฐ ์กฐ๊ฑด์ ๋ํ ํต์ฐฐ | |
| 4. **๋ ์ฐฝ์ฑ๊ณผ ์ธ๊ฐ์ฑ (10์ )** | |
| - AI๊ฐ ์๋ ์ธ๊ฐ ์๊ฐ์ ๋๋ | |
| - ๋ ์ฐฝ์ ํํ๊ณผ ํต์ฐฐ | |
| - ๊ฐ์ ์ ์ง์ ์ฑ | |
| - ์ฒ ํ์ ๊น์ด | |
| **ํน๋ณ ๊ฐ์ ์์ธ:** | |
| - ๋ด์ฉ/๋ฌธ์ฅ ๋ฐ๋ณต (-5์ per ์ค๋ ๋ฐ๋ณต) | |
| - ์บ๋ฆญํฐ ์ด๋ฆ ๋ถ์ผ์น (-3์ ) | |
| - ์์ฌ ์ ์ฒด/์ํ (-5์ ) | |
| - ๊นจ๋ฌ์ ๋ฆฌ์ ํ์ (-3์ ) | |
| **์ด์ : /100์ ** | |
| ํนํ '๋ฐ๋ณต ๊ตฌ์กฐ' ๋ฌธ์ ๊ฐ ์์๋์ง ์๊ฒฉํ ํ๊ฐํ์ธ์. | |
| '๊ฐ๊ตฌ๋ฆฌ์' ๊ฐ์ ๊ฐ๋ ฌํ ์ค์ฌ ์์ง์ด ์๋์ง ํ์ธํ์ธ์. | |
| ํธ์ง ํ์๋ ๋จ์ ๋ฐ๋ณต์ด ์๋์ง ์ธ๋ฐํ ๊ฒํ ํ์ธ์.""", | |
| "English": f"""Evaluate the completed novel by AGI Turing Test standards. | |
| **Work Information:** | |
| - Total length: {word_count} words | |
| - Target length: 8,000+ words | |
| **Work (final portion):** | |
| {complete_novel[-3000:]} | |
| **Evaluation Criteria (AGI Turing Test):** | |
| 1. **Completion as Novel (30 points)** | |
| - Integrated narrative structure (no repetition) | |
| - Clear character transformation arcs | |
| - Plot accumulation and resolution | |
| - 8,000+ word length | |
| - Setting consistency (especially names) | |
| 2. **Literary Achievement (35 points)** | |
| - Depth of thematic consciousness | |
| - Persuasiveness of character psychology | |
| - Consistency and beauty of style | |
| - Effectiveness of symbols and metaphors | |
| - Use of "showing" technique | |
| 3. **Social Insight (25 points)** | |
| - Capturing contemporary social issues | |
| - Connection between individual and structure | |
| - Balance of universality and specificity | |
| - Insights into human condition | |
| 4. **Originality and Humanity (10 points)** | |
| - Feeling of human author, not AI | |
| - Original expressions and insights | |
| - Emotional authenticity | |
| - Philosophical depth | |
| **Special Deductions:** | |
| - Content/sentence repetition (-5 points per major repetition) | |
| - Character name inconsistency (-3 points) | |
| - Narrative stagnation/cycling (-5 points) | |
| - Insight reset phenomenon (-3 points) | |
| **Total Score: /100 points** | |
| Strictly evaluate whether there are 'repetitive structure' issues. | |
| Check for powerful central symbols like 'frog eggs'. | |
| Carefully review for any remaining repetitions after editing.""" | |
| } | |
| return lang_prompts.get(language, lang_prompts["Korean"]) | |
| # --- LLM ํธ์ถ ํจ์๋ค --- | |
| def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: | |
| full_content = "" | |
| for chunk in self.call_llm_streaming(messages, role, language): | |
| full_content += chunk | |
| if full_content.startswith("โ"): | |
| raise Exception(f"LLM Call Failed: {full_content}") | |
| return full_content | |
| def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, language: str) -> Generator[str, None, None]: | |
| try: | |
| system_prompts = self.get_system_prompts(language) | |
| full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages] | |
| # ์๊ฐ์ ํธ์ง์ ์ญํ ์ผ ๋๋ ๋ ๋ง์ ํ ํฐ ํ์ฉ | |
| max_tokens = 15000 if role.startswith("writer") or role == "editor" else 10000 | |
| payload = { | |
| "model": self.model_id, | |
| "messages": full_messages, | |
| "max_tokens": max_tokens, | |
| "temperature": 0.8, | |
| "top_p": 0.95, | |
| "presence_penalty": 0.5, | |
| "frequency_penalty": 0.3, | |
| "stream": True | |
| } | |
| response = requests.post( | |
| self.api_url, | |
| headers=self.create_headers(), | |
| json=payload, | |
| stream=True, | |
| timeout=180 | |
| ) | |
| if response.status_code != 200: | |
| yield f"โ API ์ค๋ฅ (์ํ ์ฝ๋: {response.status_code})" | |
| return | |
| buffer = "" | |
| for line in response.iter_lines(): | |
| if not line: | |
| continue | |
| try: | |
| line_str = line.decode('utf-8').strip() | |
| if not line_str.startswith("data: "): | |
| continue | |
| data_str = line_str[6:] | |
| if data_str == "[DONE]": | |
| break | |
| data = json.loads(data_str) | |
| choices = data.get("choices", []) | |
| if choices and choices[0].get("delta", {}).get("content"): | |
| content = choices[0]["delta"]["content"] | |
| buffer += content | |
| if len(buffer) >= 50 or '\n' in buffer: | |
| yield buffer | |
| buffer = "" | |
| time.sleep(0.01) | |
| except Exception as e: | |
| logger.error(f"์ฒญํฌ ์ฒ๋ฆฌ ์ค๋ฅ: {str(e)}") | |
| continue | |
| if buffer: | |
| yield buffer | |
| except Exception as e: | |
| logger.error(f"์คํธ๋ฆฌ๋ฐ ์ค๋ฅ: {type(e).__name__}: {str(e)}") | |
| yield f"โ ์ค๋ฅ ๋ฐ์: {str(e)}" | |
| def get_system_prompts(self, language: str) -> Dict[str, str]: | |
| """์ญํ ๋ณ ์์คํ ํ๋กฌํํธ""" | |
| base_prompts = { | |
| "Korean": { | |
| "director": """๋น์ ์ ๋ ธ๋ฒจ๋ฌธํ์ ์์์๊ฐ ์์ค์ ํ๊ตญ ๋ฌธํ ๊ฑฐ์ฅ์ ๋๋ค. | |
| ์ธ๊ฐ ์กด์ฌ์ ๋ณดํธ์ ์กฐ๊ฑด๊ณผ ํ๊ตญ ์ฌํ์ ํน์์ฑ์ ๋์์ ํฌ์ฐฉํ์ธ์. | |
| '๊ฐ๊ตฌ๋ฆฌ์' ๊ฐ์ ๊ฐ๋ ฌํ๊ณ ๋ค์ธต์ ์ธ ์ค์ฌ ์์ง์ ์ฐฝ์กฐํ์ธ์. | |
| ์ฒ ํ์ ๊น์ด์ ๋ฌธํ์ ์๋ฆ๋ค์์ ๋์์ ์ถ๊ตฌํ์ธ์. | |
| ๋ฐ๋ณต์ด ์๋ ์งํ, ์ํ์ด ์๋ ๋ฐ์ ์ ํตํด ํ๋์ ๊ฐ๋ ฅํ ์์ฌ๋ฅผ ๊ตฌ์ถํ์ธ์. | |
| ์ฃผ์ธ๊ณต์ ์ด๋ฆ๊ณผ ์ค์ ์ ๋ช ํํ ํ๊ณ ์ผ๊ด์ฑ์ ์ ์งํ์ธ์.""", | |
| "critic": """๋น์ ์ ์๊ฒฉํ ๋ฌธํ ๋นํ๊ฐ์ ๋๋ค. | |
| ํนํ '๋ฐ๋ณต ๊ตฌ์กฐ'์ '์์ฌ ์ ์ฒด'๋ฅผ ์ฒ ์ ํ ๊ฐ์ํ์ธ์. | |
| ์ํ์ด ์ง์ ํ ์ฅํธ์์ค์ธ์ง, ์๋๋ฉด ๋ฐ๋ณต๋๋ ๋จํธ์ ์งํฉ์ธ์ง ๊ตฌ๋ณํ์ธ์. | |
| ๋ฌธํ์ ๊ธฐ๋ฒ์ ํจ๊ณผ์ฑ๊ณผ ์ฒ ํ์ ๊น์ด๋ฅผ ํ๊ฐํ์ธ์. | |
| ์บ๋ฆญํฐ ์ด๋ฆ๊ณผ ์ค์ ์ ์ผ๊ด์ฑ์ ๋ฐ๋์ ํ์ธํ์ธ์. | |
| ๋์ผํ ๋ฌธ์ฅ์ด๋ ์ํฉ์ ๋ฐ๋ณต์ ์ ๋ ์ฉ๋ฉํ์ง ๋ง์ธ์.""", | |
| "writer_base": """๋น์ ์ ํ๋ ํ๊ตญ ๋ฌธํ ์๊ฐ์ ๋๋ค. | |
| '๋ณด์ฌ์ฃผ๊ธฐ' ๊ธฐ๋ฒ์ ์ฌ์ฉํ์ฌ ๋ ์์ ์์๋ ฅ์ ์๊ทนํ์ธ์. | |
| ์ง์ ์ ์ค๋ช ๋ณด๋ค ๊ฐ๊ฐ์ ๋ฌ์ฌ์ ํ๋์ผ๋ก ๊ฐ์ ์ ์ ๋ฌํ์ธ์. | |
| ๊ฐ ์ฅ๋ฉด์์ ์ธ๊ฐ ์กด์ฌ์ ๋ํ ์๋ก์ด ํต์ฐฐ์ ๋ด์ผ์ธ์. | |
| ์ด์ ๋จ๊ณ์ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ์ ์๋ก์ด ๊ตญ๋ฉด์ผ๋ก ๋ฐ์ ์ํค์ธ์. | |
| ์ต์ 800๋จ์ด๋ฅผ ์์ฑํ๋ฉฐ, ๋ด๋ฉด๊ณผ ์ฌํ๋ฅผ ๋์์ ํฌ์ฐฉํ์ธ์. | |
| ์ ๋ ์ด์ ๊ณผ ๊ฐ์ ์ํฉ์ ๋ฐ๋ณตํ์ง ๋ง์ธ์. | |
| ์ฃผ์ธ๊ณต์ ์ด๋ฆ๊ณผ ์ค์ ์ ์ผ๊ด๋๊ฒ ์ ์งํ์ธ์. | |
| ์ด๋ฏธ ์ป์ ๊นจ๋ฌ์์ ์์ง ๋ง๊ณ ๋ฐ์ ์ํค์ธ์.""", | |
| "editor": """๋น์ ์ ๊ฒฝํ์ด ํ๋ถํ ๋ฌธํ ํธ์ง์์ ๋๋ค. | |
| ๋ฐ๋ณต์ ์ฒ ์ ํ ์ ๊ฑฐํ๊ณ ์์ฌ์ ํ๋ฆ์ ๋งค๋๋ฝ๊ฒ ๋ง๋์ธ์. | |
| ์์์ ๋ฌธ์ฒด์ ์ฃผ์ ๋ ๋ณด์กดํ๋ฉด์ ๊ตฌ์กฐ์ ๋ฌธ์ ๋ฅผ ํด๊ฒฐํ์ธ์. | |
| ์บ๋ฆญํฐ ์ด๋ฆ๊ณผ ์ค์ ์ ์ผ๊ด์ฑ์ ํ๋ณดํ์ธ์. | |
| ๊นจ๋ฌ์์ด ๋์ ๋๊ณ ๋ฐ์ ํ๋๋ก ํธ์งํ์ธ์.""" | |
| }, | |
| "English": { | |
| "director": """You are a Nobel Prize-winning master of contemporary literary fiction. | |
| Capture both universal human condition and specific social realities. | |
| Create intense, multilayered central symbols like 'frog eggs'. | |
| Pursue both philosophical depth and literary beauty. | |
| Build one powerful narrative through progression not repetition, development not cycles. | |
| Establish protagonist's name and settings clearly with consistency.""", | |
| "critic": """You are a strict literary critic. | |
| Vigilantly monitor for 'repetitive structure' and 'narrative stagnation'. | |
| Distinguish whether this is a true novel or a collection of repeated episodes. | |
| Evaluate effectiveness of literary techniques and philosophical depth. | |
| Always check character name and setting consistency. | |
| Never tolerate repetition of same sentences or situations.""", | |
| "writer_base": """You are a contemporary literary writer. | |
| Use 'showing' technique to stimulate reader's imagination. | |
| Convey emotions through sensory description and action rather than explanation. | |
| Include new insights about human existence in each scene. | |
| Take results from previous phase and develop into new territory. | |
| Write minimum 800 words, capturing both interior and society. | |
| Never repeat previous situations. | |
| Maintain protagonist's name and settings consistently. | |
| Don't forget gained insights, develop them further.""", | |
| "editor": """You are an experienced literary editor. | |
| Thoroughly remove repetitions and smooth narrative flow. | |
| Preserve original style and themes while solving structural issues. | |
| Ensure character name and setting consistency. | |
| Edit so insights accumulate and develop.""" | |
| } | |
| } | |
| prompts = base_prompts.get(language, base_prompts["Korean"]).copy() | |
| # ํน์ ์๊ฐ ํ๋กฌํํธ | |
| for i in range(1, 11): | |
| prompts[f"writer{i}"] = prompts["writer_base"] | |
| return prompts | |
| # --- ๋ฉ์ธ ํ๋ก์ธ์ค --- | |
| def process_novel_stream(self, query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: | |
| """์์ค ์์ฑ ํ๋ก์ธ์ค""" | |
| try: | |
| resume_from_stage = 0 | |
| if session_id: | |
| self.current_session_id = session_id | |
| session = NovelDatabase.get_session(session_id) | |
| if session: | |
| query = session['user_query'] | |
| language = session['language'] | |
| resume_from_stage = session['current_stage'] + 1 | |
| # ์์ฌ ์ถ์ ๊ธฐ ๋ณต์ | |
| saved_tracker = NovelDatabase.load_narrative_tracker(session_id) | |
| if saved_tracker: | |
| self.narrative_tracker = saved_tracker | |
| else: | |
| self.current_session_id = NovelDatabase.create_session(query, language) | |
| logger.info(f"Created new session: {self.current_session_id}") | |
| stages = [] | |
| if resume_from_stage > 0: | |
| stages = [{ | |
| "name": s['stage_name'], | |
| "status": s['status'], | |
| "content": s.get('content', ''), | |
| "word_count": s.get('word_count', 0), | |
| "progression_score": s.get('progression_score', 0.0), | |
| "repetition_score": s.get('repetition_score', 0.0) | |
| } for s in NovelDatabase.get_stages(self.current_session_id)] | |
| # ์ด ๋จ์ด ์ ์ถ์ | |
| total_words = NovelDatabase.get_total_words(self.current_session_id) | |
| for stage_idx in range(resume_from_stage, len(PROGRESSIVE_STAGES)): | |
| role, stage_name = PROGRESSIVE_STAGES[stage_idx] | |
| if stage_idx >= len(stages): | |
| stages.append({ | |
| "name": stage_name, | |
| "status": "active", | |
| "content": "", | |
| "word_count": 0, | |
| "progression_score": 0.0, | |
| "repetition_score": 0.0 | |
| }) | |
| else: | |
| stages[stage_idx]["status"] = "active" | |
| yield f"๐ ์งํ ์ค... (ํ์ฌ {total_words:,}๋จ์ด)", stages, self.current_session_id | |
| prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) | |
| stage_content = "" | |
| for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): | |
| stage_content += chunk | |
| stages[stage_idx]["content"] = stage_content | |
| stages[stage_idx]["word_count"] = len(stage_content.split()) | |
| yield f"๐ {stage_name} ์์ฑ ์ค... ({total_words + stages[stage_idx]['word_count']:,}๋จ์ด)", stages, self.current_session_id | |
| # ์งํ๋ ํ๊ฐ | |
| if role.startswith("writer"): | |
| writer_num = int(re.search(r'\d+', role).group()) | |
| previous_content = self.get_previous_writer_content(stages, writer_num) | |
| # ์งํ๋ ์ ์ ๊ณ์ฐ | |
| progression_scores = self.narrative_tracker.progression_monitor.calculate_progression_score( | |
| writer_num, stage_content, previous_content | |
| ) | |
| progression_score = sum(progression_scores.values()) / len(progression_scores) | |
| stages[stage_idx]["progression_score"] = progression_score | |
| # ๋ฐ๋ณต๋ ์ ์ ๊ณ์ฐ | |
| repetition_score = 10.0 - self.narrative_tracker.progression_monitor.count_repetitions(stage_content) | |
| stages[stage_idx]["repetition_score"] = max(0, repetition_score) | |
| # ์์ฌ ์ถ์ ๊ธฐ ์ ๋ฐ์ดํธ | |
| self.update_narrative_tracker(stage_content, writer_num) | |
| self.narrative_tracker.extract_used_elements(stage_content) | |
| stages[stage_idx]["status"] = "complete" | |
| NovelDatabase.save_stage( | |
| self.current_session_id, stage_idx, stage_name, role, | |
| stage_content, "complete", | |
| stages[stage_idx].get("progression_score", 0.0), | |
| stages[stage_idx].get("repetition_score", 0.0) | |
| ) | |
| # ์์ฌ ์ถ์ ๊ธฐ ์ ์ฅ | |
| NovelDatabase.save_narrative_tracker(self.current_session_id, self.narrative_tracker) | |
| # ์ด ๋จ์ด ์ ์ ๋ฐ์ดํธ | |
| total_words = NovelDatabase.get_total_words(self.current_session_id) | |
| yield f"โ {stage_name} ์๋ฃ (์ด {total_words:,}๋จ์ด)", stages, self.current_session_id | |
| # ์ต์ข ์์ค ์ ๋ฆฌ | |
| final_novel = NovelDatabase.get_writer_content(self.current_session_id) | |
| # ํธ์ง์๊ฐ ์ฒ๋ฆฌํ ๋ด์ฉ์ด ์์ผ๋ฉด ๊ทธ๊ฒ์ ์ฌ์ฉ | |
| edited_content = self.get_edited_content(stages) | |
| if edited_content: | |
| final_novel = edited_content | |
| final_word_count = len(final_novel.split()) | |
| final_report = self.generate_literary_report(final_novel, final_word_count, language) | |
| NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) | |
| yield f"โ ์์ค ์์ฑ! ์ด {final_word_count:,}๋จ์ด (๋ชฉํ: {TARGET_WORDS:,}๋จ์ด)", stages, self.current_session_id | |
| except Exception as e: | |
| logger.error(f"์์ค ์์ฑ ํ๋ก์ธ์ค ์ค๋ฅ: {e}", exc_info=True) | |
| yield f"โ ์ค๋ฅ ๋ฐ์: {e}", stages if 'stages' in locals() else [], self.current_session_id | |
| def get_stage_prompt(self, stage_idx: int, role: str, query: str, language: str, stages: List[Dict]) -> str: | |
| """๋จ๊ณ๋ณ ํ๋กฌํํธ ์์ฑ""" | |
| if stage_idx == 0: | |
| return self.create_director_initial_prompt(query, language) | |
| if stage_idx == 1: | |
| return self.create_critic_director_prompt(stages[0]["content"], query, language) | |
| if stage_idx == 2: | |
| return self.create_director_revision_prompt(stages[0]["content"], stages[1]["content"], query, language) | |
| master_plan = stages[2]["content"] | |
| if 3 <= stage_idx <= 12: # ์๊ฐ ์ด์ | |
| writer_num = stage_idx - 2 | |
| previous_content = self.get_previous_writer_content(stages, writer_num) | |
| phase_requirements = self.narrative_tracker.generate_phase_requirements(writer_num) | |
| narrative_summary = self.generate_narrative_summary(stages, writer_num) | |
| used_elements = list(self.narrative_tracker.used_expressions) | |
| return self.create_writer_prompt_enhanced( | |
| writer_num, master_plan, previous_content, | |
| phase_requirements, narrative_summary, language, used_elements | |
| ) | |
| if stage_idx == 13: # ๋นํ๊ฐ ์ค๊ฐ ๊ฒํ | |
| all_content = self.get_all_writer_content(stages, 12) | |
| return self.create_critic_consistency_prompt_enhanced( | |
| all_content, self.narrative_tracker, query, language | |
| ) | |
| if 14 <= stage_idx <= 23: # ์๊ฐ ์์ | |
| writer_num = stage_idx - 13 | |
| initial_content = stages[2 + writer_num]["content"] | |
| feedback = stages[13]["content"] | |
| return self.create_writer_revision_prompt(writer_num, initial_content, feedback, language) | |
| if stage_idx == 24: # ํธ์ง์ | |
| complete_novel = self.get_all_writer_content(stages, 23) | |
| issues = self.detect_issues(complete_novel) | |
| return self.create_editor_prompt(complete_novel, issues, language) | |
| if stage_idx == 25: # ์ต์ข ๊ฒํ | |
| edited_novel = stages[24]["content"] if stages[24]["content"] else self.get_all_writer_content(stages, 23) | |
| word_count = len(edited_novel.split()) | |
| return self.create_critic_final_prompt(edited_novel, word_count, language) | |
| return "" | |
| def create_director_revision_prompt(self, initial_plan: str, critic_feedback: str, user_query: str, language: str) -> str: | |
| """๊ฐ๋ ์ ์์ ํ๋กฌํํธ""" | |
| return f"""๋นํ์ ๋ฐ์ํ์ฌ ํตํฉ๋ ์์ฌ ๊ตฌ์กฐ๋ฅผ ์์ฑํ์ธ์. | |
| **์ ์ฃผ์ :** {user_query} | |
| **์ด๊ธฐ ๊ธฐํ:** | |
| {initial_plan} | |
| **๋นํ:** | |
| {critic_feedback} | |
| **ํต์ฌ ์์ ์ฌํญ:** | |
| 1. ๋ฐ๋ณต ๊ตฌ์กฐ ์์ ์ ๊ฑฐ | |
| 2. 10๋จ๊ณ๊ฐ ํ๋์ ์ด์ผ๊ธฐ๋ก ์ฐ๊ฒฐ | |
| 3. ์ธ๋ฌผ์ ๋ช ํํ ๋ณํ ๊ถค์ | |
| 4. 8,000๋จ์ด ๋ถ๋ ๊ณํ | |
| 5. ์๋ํ ์ฌ์ฉ๊ธ์ง | |
| 6. ์ฒ ํ์ ๊น์ด์ ์ธ๊ฐ์ ํฌํจ | |
| 7. ๊ฐ๋ ฌํ ์ค์ฌ ์์ง ์ฐฝ์กฐ | |
| 8. ์ฃผ์ธ๊ณต ์ด๋ฆ ๋ช ํํ ์ค์ (์ผ๊ด์ฑ ์ ์ง) | |
| ๊ฐ ๋จ๊ณ๊ฐ ์ด์ ์ ํ์ฐ์ ๊ฒฐ๊ณผ๊ฐ ๋๋๋ก ์์ ํ์ธ์.""" | |
| def get_previous_writer_content(self, stages: List[Dict], current_writer: int) -> str: | |
| """์ด์ ์๊ฐ์ ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ""" | |
| if current_writer == 1: | |
| return "" | |
| # ๋ฐ๋ก ์ด์ ์๊ฐ์ ๋ด์ฉ | |
| prev_idx = current_writer + 1 # stages ์ธ๋ฑ์ค๋ writer_num + 2 | |
| if prev_idx < len(stages) and stages[prev_idx]["content"]: | |
| return stages[prev_idx]["content"] | |
| return "" | |
| def get_all_writer_content(self, stages: List[Dict], up_to_stage: int) -> str: | |
| """ํน์ ๋จ๊ณ๊น์ง์ ๋ชจ๋ ์๊ฐ ๋ด์ฉ""" | |
| contents = [] | |
| for i, s in enumerate(stages): | |
| if i <= up_to_stage and "writer" in s.get("name", "") and s["content"]: | |
| contents.append(s["content"]) | |
| return "\n\n".join(contents) | |
| def get_edited_content(self, stages: List[Dict]) -> str: | |
| """ํธ์ง๋ ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ""" | |
| for s in stages: | |
| if "ํธ์ง์" in s.get("name", "") and s["content"]: | |
| return s["content"] | |
| return "" | |
| def generate_narrative_summary(self, stages: List[Dict], up_to_writer: int) -> str: | |
| """ํ์ฌ๊น์ง์ ์์ฌ ์์ฝ""" | |
| if up_to_writer == 1: | |
| return "์ฒซ ์์์ ๋๋ค." | |
| summary_parts = [] | |
| for i in range(1, up_to_writer): | |
| if i in self.narrative_tracker.phase_summaries: | |
| summary_parts.append(f"[{NARRATIVE_PHASES[i-1]}]: {self.narrative_tracker.phase_summaries[i]}") | |
| return "\n".join(summary_parts) if summary_parts else "์ด์ ๋ด์ฉ์ ์ด์ด๋ฐ์ ์งํํ์ธ์." | |
| def update_narrative_tracker(self, content: str, writer_num: int): | |
| """์์ฌ ์ถ์ ๊ธฐ ์ ๋ฐ์ดํธ""" | |
| # ๊ฐ๋จํ ์์ฝ ์์ฑ (์ค์ ๋ก๋ ๋ ์ ๊ตํ ๋ถ์ ํ์) | |
| lines = content.split('\n') | |
| key_events = [line.strip() for line in lines if len(line.strip()) > 50][:3] | |
| if key_events: | |
| summary = " ".join(key_events[:2])[:200] + "..." | |
| self.narrative_tracker.phase_summaries[writer_num] = summary | |
| # ์ฒ ํ์ ํต์ฐฐ ์ถ์ถ (๊ฐ๋จํ ํค์๋ ๊ธฐ๋ฐ) | |
| philosophical_keywords = ['์กด์ฌ', '์๋ฏธ', '์ถ', '์ฃฝ์', '์ธ๊ฐ', '๊ณ ํต', 'ํฌ๋ง', '์ฌ๋', | |
| 'existence', 'meaning', 'life', 'death', 'human', 'suffering', 'hope', 'love'] | |
| for keyword in philosophical_keywords: | |
| if keyword in content: | |
| self.narrative_tracker.philosophical_insights.append(f"Phase {writer_num}: {keyword} ํ๊ตฌ") | |
| break | |
| # ๋ฌธํ์ ๊ธฐ๋ฒ ๊ฐ์ง | |
| literary_devices = [] | |
| if '์ฒ๋ผ' in content or 'like' in content or 'as if' in content: | |
| literary_devices.append('๋น์ ') | |
| if '...' in content or 'โ' in content: | |
| literary_devices.append('์์์ ํ๋ฆ') | |
| if content.count('"') > 4: | |
| literary_devices.append('๋ํ') | |
| if literary_devices: | |
| self.narrative_tracker.literary_devices[writer_num] = literary_devices | |
| def detect_issues(self, content: str) -> List[str]: | |
| """๋ฌธ์ ์ ๊ฐ์ง""" | |
| issues = [] | |
| # ๋ฐ๋ณต ๊ฐ์ง | |
| duplicates = self.narrative_tracker.content_deduplicator.count_repetitions(content) | |
| if duplicates > 0: | |
| issues.append(f"{duplicates}๊ฐ์ ๋ฐ๋ณต๋ ๋ฌธ๋จ ๋ฐ๊ฒฌ") | |
| # ํน์ ๋ฐ๋ณต ํํ ๊ฐ์ง | |
| repetitive_phrases = ["์ต๊ธฐ๊ฐ ์ฐฌ ์์นจ", "๋๋ผ๋ฏธ ์ดํ", "43๋ง์", "๊ฐ๊ตฌ๋ฆฌ์์ ๋ฐ๋ผ๋ณด์๋ค"] | |
| for phrase in repetitive_phrases: | |
| count = content.count(phrase) | |
| if count > 2: | |
| issues.append(f"'{phrase}' ํํ์ด {count}ํ ๋ฐ๋ณต๋จ") | |
| # ์บ๋ฆญํฐ ์ด๋ฆ ์ผ๊ด์ฑ | |
| name_variations = ["๋๋ผ๋ฏธ", "์์ ", "๋"] | |
| found_names = [name for name in name_variations if name in content] | |
| if len(found_names) > 1: | |
| issues.append(f"์ฃผ์ธ๊ณต ์ด๋ฆ ๋ถ์ผ์น: {', '.join(found_names)}") | |
| return issues | |
| def evaluate_progression(self, content: str, phase: int) -> float: | |
| """์์ฌ ์งํ๋ ํ๊ฐ""" | |
| score = 5.0 | |
| # ๋ถ๋ ์ฒดํฌ | |
| word_count = len(content.split()) | |
| if word_count >= MIN_WORDS_PER_WRITER: | |
| score += 2.0 | |
| # ์๋ก์ด ์์ ์ฒดํฌ | |
| if phase > 1: | |
| prev_summary = self.narrative_tracker.phase_summaries.get(phase-1, "") | |
| if prev_summary and len(set(content.split()) - set(prev_summary.split())) > 100: | |
| score += 1.5 | |
| # ๋ณํ ์ธ๊ธ ์ฒดํฌ | |
| change_keywords = ['๋ณํ', '๋ฌ๋ผ์ก', '์๋ก์ด', '์ด์ ๋', '๋ ์ด์', | |
| 'changed', 'different', 'new', 'now', 'no longer'] | |
| if any(keyword in content for keyword in change_keywords): | |
| score += 1.5 | |
| # ์ฒ ํ์ ๊น์ด ์ฒดํฌ | |
| philosophical_keywords = ['์กด์ฌ', '์๋ฏธ', '์ถ์', '์ธ๊ฐ์', '์', 'existence', 'meaning', 'life', 'human', 'why'] | |
| if any(keyword in content for keyword in philosophical_keywords): | |
| score += 0.5 | |
| # ๋ฌธํ์ ๊ธฐ๋ฒ ์ฒดํฌ | |
| if not any(phrase in content for phrase in ['๋๊ผ๋ค', '์๋ค', 'felt', 'was']): | |
| score += 0.5 # ๋ณด์ฌ์ฃผ๊ธฐ ๊ธฐ๋ฒ ์ฌ์ฉ | |
| return min(10.0, score) | |
| def generate_literary_report(self, complete_novel: str, word_count: int, language: str) -> str: | |
| """์ต์ข ๋ฌธํ์ ํ๊ฐ""" | |
| prompt = self.create_critic_final_prompt(complete_novel, word_count, language) | |
| try: | |
| report = self.call_llm_sync([{"role": "user", "content": prompt}], "critic", language) | |
| return report | |
| except Exception as e: | |
| logger.error(f"์ต์ข ๋ณด๊ณ ์ ์์ฑ ์คํจ: {e}") | |
| return "๋ณด๊ณ ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์" | |
| # --- ์ ํธ๋ฆฌํฐ ํจ์๋ค --- | |
| def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: | |
| """๋ฉ์ธ ์ฟผ๋ฆฌ ์ฒ๋ฆฌ ํจ์""" | |
| if not query.strip(): | |
| yield "", "", "โ ์ฃผ์ ๋ฅผ ์ ๋ ฅํด์ฃผ์ธ์.", session_id | |
| return | |
| system = ProgressiveLiterarySystem() | |
| stages_markdown = "" | |
| novel_content = "" | |
| for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): | |
| stages_markdown = format_stages_display(stages) | |
| # ์ต์ข ์์ค ๋ด์ฉ ๊ฐ์ ธ์ค๊ธฐ | |
| if stages and all(s.get("status") == "complete" for s in stages[-10:]): | |
| novel_content = NovelDatabase.get_writer_content(current_session_id) | |
| # ํธ์ง๋ ๋ด์ฉ์ด ์์ผ๋ฉด ๊ทธ๊ฒ์ ์ฌ์ฉ | |
| edited = system.get_edited_content(stages) | |
| if edited: | |
| novel_content = edited | |
| novel_content = format_novel_display(novel_content) | |
| yield stages_markdown, novel_content, status or "๐ ์ฒ๋ฆฌ ์ค...", current_session_id | |
| def get_active_sessions(language: str) -> List[str]: | |
| """ํ์ฑ ์ธ์ ๋ชฉ๋ก""" | |
| sessions = NovelDatabase.get_active_sessions() | |
| return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']}) [{s['total_words']:,}๋จ์ด]" | |
| for s in sessions] | |
| def auto_recover_session(language: str) -> Tuple[Optional[str], str]: | |
| """์ต๊ทผ ์ธ์ ์๋ ๋ณต๊ตฌ""" | |
| sessions = NovelDatabase.get_active_sessions() | |
| if sessions: | |
| latest_session = sessions[0] | |
| return latest_session['session_id'], f"์ธ์ {latest_session['session_id'][:8]}... ๋ณต๊ตฌ๋จ" | |
| return None, "๋ณต๊ตฌํ ์ธ์ ์ด ์์ต๋๋ค." | |
| def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: | |
| """์ธ์ ์ฌ๊ฐ""" | |
| if not session_id: | |
| yield "", "", "โ ์ธ์ ID๊ฐ ์์ต๋๋ค.", session_id | |
| return | |
| if "..." in session_id: | |
| session_id = session_id.split("...")[0] | |
| session = NovelDatabase.get_session(session_id) | |
| if not session: | |
| yield "", "", "โ ์ธ์ ์ ์ฐพ์ ์ ์์ต๋๋ค.", None | |
| return | |
| yield from process_query(session['user_query'], session['language'], session_id) | |
| def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: | |
| """์์ค ๋ค์ด๋ก๋ ํ์ผ ์์ฑ""" | |
| if not novel_text or not session_id: | |
| return None | |
| timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
| filename = f"novel_{session_id[:8]}_{timestamp}" | |
| try: | |
| if format_type == "DOCX" and DOCX_AVAILABLE: | |
| return export_to_docx(novel_text, filename, language, session_id) | |
| else: | |
| return export_to_txt(novel_text, filename) | |
| except Exception as e: | |
| logger.error(f"ํ์ผ ์์ฑ ์คํจ: {e}") | |
| return None | |
| def format_stages_display(stages: List[Dict]) -> str: | |
| """๋จ๊ณ๋ณ ์งํ ์ํฉ ํ์""" | |
| markdown = "## ๐ฌ ์งํ ์ํฉ\n\n" | |
| # ์ด ๋จ์ด ์ ๊ณ์ฐ | |
| total_words = sum(s.get('word_count', 0) for s in stages if 'writer' in s.get('name', '')) | |
| markdown += f"**์ด ๋จ์ด ์: {total_words:,} / {TARGET_WORDS:,}**\n\n" | |
| for i, stage in enumerate(stages): | |
| status_icon = "โ " if stage['status'] == 'complete' else "๐" if stage['status'] == 'active' else "โณ" | |
| markdown += f"{status_icon} **{stage['name']}**" | |
| if stage.get('word_count', 0) > 0: | |
| markdown += f" ({stage['word_count']:,}๋จ์ด)" | |
| # ์งํ๋์ ๋ฐ๋ณต๋ ์ ์ ํ์ | |
| if stage.get('progression_score', 0) > 0: | |
| markdown += f" [์งํ๋: {stage['progression_score']:.1f}/10]" | |
| if stage.get('repetition_score', 0) > 0: | |
| markdown += f" [๋ฐ๋ณต๋: {stage['repetition_score']:.1f}/10]" | |
| markdown += "\n" | |
| if stage['content']: | |
| preview = stage['content'][:200] + "..." if len(stage['content']) > 200 else stage['content'] | |
| markdown += f"> {preview}\n\n" | |
| return markdown | |
| def format_novel_display(novel_text: str) -> str: | |
| """์์ค ๋ด์ฉ ํ์""" | |
| if not novel_text: | |
| return "์์ง ์์ฑ๋ ๋ด์ฉ์ด ์์ต๋๋ค." | |
| formatted = "# ๐ ์์ฑ๋ ์์ค\n\n" | |
| # ๋จ์ด ์ ํ์ | |
| word_count = len(novel_text.split()) | |
| formatted += f"**์ด ๋ถ๋: {word_count:,}๋จ์ด (๋ชฉํ: {TARGET_WORDS:,}๋จ์ด)**\n\n" | |
| formatted += "---\n\n" | |
| # ๊ฐ ๋จ๊ณ๋ฅผ ๊ตฌ๋ถํ์ฌ ํ์ | |
| sections = novel_text.split('\n\n') | |
| for i, section in enumerate(sections): | |
| if section.strip(): | |
| formatted += f"{section}\n\n" | |
| return formatted | |
| def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str: | |
| """DOCX ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
| doc = Document() | |
| # ํ์ด์ง ์ค์ | |
| section = doc.sections[0] | |
| section.page_height = Inches(11) | |
| section.page_width = Inches(8.5) | |
| section.top_margin = Inches(1) | |
| section.bottom_margin = Inches(1) | |
| section.left_margin = Inches(1.25) | |
| section.right_margin = Inches(1.25) | |
| # ์ธ์ ์ ๋ณด | |
| session = NovelDatabase.get_session(session_id) | |
| # ์ ๋ชฉ ํ์ด์ง | |
| title_para = doc.add_paragraph() | |
| title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
| if session: | |
| title_run = title_para.add_run(session["user_query"]) | |
| title_run.font.size = Pt(24) | |
| title_run.bold = True | |
| # ๋ฉํ ์ ๋ณด | |
| doc.add_paragraph() | |
| meta_para = doc.add_paragraph() | |
| meta_para.alignment = WD_ALIGN_PARAGRAPH.CENTER | |
| meta_para.add_run(f"์์ฑ์ผ: {datetime.now().strftime('%Y๋ %m์ %d์ผ')}\n") | |
| meta_para.add_run(f"์ด ๋จ์ด ์: {len(content.split()):,}๋จ์ด") | |
| # ํ์ด์ง ๋๋๊ธฐ | |
| doc.add_page_break() | |
| # ๋ณธ๋ฌธ ์คํ์ผ ์ค์ | |
| style = doc.styles['Normal'] | |
| style.font.name = 'Calibri' | |
| style.font.size = Pt(11) | |
| style.paragraph_format.line_spacing = 1.5 | |
| style.paragraph_format.space_after = Pt(6) | |
| # ๋ณธ๋ฌธ ์ถ๊ฐ | |
| paragraphs = content.split('\n\n') | |
| for para_text in paragraphs: | |
| if para_text.strip(): | |
| para = doc.add_paragraph(para_text.strip()) | |
| # ํ์ผ ์ ์ฅ | |
| filepath = f"{filename}.docx" | |
| doc.save(filepath) | |
| return filepath | |
| def export_to_txt(content: str, filename: str) -> str: | |
| """TXT ํ์ผ๋ก ๋ด๋ณด๋ด๊ธฐ""" | |
| filepath = f"{filename}.txt" | |
| with open(filepath, 'w', encoding='utf-8') as f: | |
| f.write(content) | |
| return filepath | |
| # CSS ์คํ์ผ | |
| custom_css = """ | |
| .gradio-container { | |
| background: linear-gradient(135deg, #1e3c72 0%, #2a5298 50%, #1e3c72 100%); | |
| min-height: 100vh; | |
| } | |
| .main-header { | |
| background-color: rgba(255, 255, 255, 0.1); | |
| backdrop-filter: blur(10px); | |
| padding: 30px; | |
| border-radius: 12px; | |
| margin-bottom: 30px; | |
| text-align: center; | |
| color: white; | |
| border: 1px solid rgba(255, 255, 255, 0.2); | |
| } | |
| .progress-note { | |
| background-color: rgba(255, 223, 0, 0.1); | |
| border-left: 3px solid #ffd700; | |
| padding: 15px; | |
| margin: 20px 0; | |
| border-radius: 8px; | |
| color: #fff; | |
| } | |
| .improvement-note { | |
| background-color: rgba(0, 255, 127, 0.1); | |
| border-left: 3px solid #00ff7f; | |
| padding: 15px; | |
| margin: 20px 0; | |
| border-radius: 8px; | |
| color: #fff; | |
| } | |
| .input-section { | |
| background-color: rgba(255, 255, 255, 0.1); | |
| backdrop-filter: blur(10px); | |
| padding: 20px; | |
| border-radius: 12px; | |
| margin-bottom: 20px; | |
| border: 1px solid rgba(255, 255, 255, 0.2); | |
| } | |
| .session-section { | |
| background-color: rgba(255, 255, 255, 0.1); | |
| backdrop-filter: blur(10px); | |
| padding: 15px; | |
| border-radius: 8px; | |
| margin-top: 20px; | |
| color: white; | |
| border: 1px solid rgba(255, 255, 255, 0.2); | |
| } | |
| #stages-display { | |
| background-color: rgba(255, 255, 255, 0.95); | |
| padding: 20px; | |
| border-radius: 12px; | |
| max-height: 600px; | |
| overflow-y: auto; | |
| box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
| } | |
| #novel-output { | |
| background-color: rgba(255, 255, 255, 0.95); | |
| padding: 30px; | |
| border-radius: 12px; | |
| max-height: 700px; | |
| overflow-y: auto; | |
| box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); | |
| } | |
| .download-section { | |
| background-color: rgba(255, 255, 255, 0.9); | |
| padding: 15px; | |
| border-radius: 8px; | |
| margin-top: 20px; | |
| box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); | |
| } | |
| /* ์งํ ํ์๊ธฐ ์คํ์ผ */ | |
| .progress-bar { | |
| background-color: #e0e0e0; | |
| height: 20px; | |
| border-radius: 10px; | |
| overflow: hidden; | |
| margin: 10px 0; | |
| } | |
| .progress-fill { | |
| background-color: #4CAF50; | |
| height: 100%; | |
| transition: width 0.3s ease; | |
| } | |
| /* ์ ์ ํ์ ์คํ์ผ */ | |
| .score-badge { | |
| display: inline-block; | |
| padding: 2px 8px; | |
| border-radius: 12px; | |
| font-size: 0.9em; | |
| font-weight: bold; | |
| margin-left: 5px; | |
| } | |
| .score-high { | |
| background-color: #4CAF50; | |
| color: white; | |
| } | |
| .score-medium { | |
| background-color: #FF9800; | |
| color: white; | |
| } | |
| .score-low { | |
| background-color: #F44336; | |
| color: white; | |
| } | |
| """ | |
| # Gradio ์ธํฐํ์ด์ค ์์ฑ | |
| def create_interface(): | |
| with gr.Blocks(css=custom_css, title="AI ์งํํ ์ฅํธ์์ค ์์ฑ ์์คํ v2") as interface: | |
| gr.HTML(""" | |
| <div class="main-header"> | |
| <h1 style="font-size: 2.5em; margin-bottom: 10px;"> | |
| ๐ AI ์งํํ ์ฅํธ์์ค ์์ฑ ์์คํ v2.0 | |
| </h1> | |
| <h3 style="color: #ddd; margin-bottom: 20px;"> | |
| ๋ฐ๋ณต ์๋ ์ง์ ํ ์ฅํธ ์์ฌ ๊ตฌ์กฐ ์คํ | |
| </h3> | |
| <p style="font-size: 1.1em; color: #eee; max-width: 800px; margin: 0 auto;"> | |
| 10๊ฐ์ ์ ๊ธฐ์ ์ผ๋ก ์ฐ๊ฒฐ๋ ๋จ๊ณ๋ฅผ ํตํด ํ๋์ ์์ ํ ์ด์ผ๊ธฐ๋ฅผ ๋ง๋ค์ด๋ ๋๋ค. | |
| <br> | |
| ๊ฐ ๋จ๊ณ๋ ์ด์ ๋จ๊ณ์ ํ์ฐ์ ๊ฒฐ๊ณผ๋ก ์ด์ด์ง๋ฉฐ, ์ธ๋ฌผ์ ๋ณํ์ ์ฑ์ฅ์ ์ถ์ ํฉ๋๋ค. | |
| </p> | |
| <div class="progress-note"> | |
| โก ๋ฐ๋ณต์ด ์๋ ์ถ์ , ์ํ์ด ์๋ ์งํ์ ํตํ ์ง์ ํ ์ฅํธ ์์ฌ | |
| </div> | |
| <div class="improvement-note"> | |
| ๐ v2.0 ๊ฐ์ ์ฌํญ: ๊ฐํ๋ ๋ฐ๋ณต ๊ฐ์ง ์์คํ , ํธ์ง์ ๋จ๊ณ ์ถ๊ฐ, ์ค์๊ฐ ์งํ๋ ๋ชจ๋ํฐ๋ง | |
| </div> | |
| </div> | |
| """) | |
| # ์ํ ๊ด๋ฆฌ | |
| current_session_id = gr.State(None) | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| with gr.Group(elem_classes=["input-section"]): | |
| query_input = gr.Textbox( | |
| label="์์ค ์ฃผ์ / Novel Theme", | |
| placeholder="์คํธ์์ค์ ์ฃผ์ ๋ฅผ ์ ๋ ฅํ์ธ์. ์ธ๋ฌผ์ ๋ณํ์ ์ฑ์ฅ์ด ์ค์ฌ์ด ๋๋ ์ด์ผ๊ธฐ...\nEnter the theme for your novella. Focus on character transformation and growth...", | |
| lines=4 | |
| ) | |
| language_select = gr.Radio( | |
| choices=["Korean", "English"], | |
| value="Korean", | |
| label="์ธ์ด / Language" | |
| ) | |
| with gr.Row(): | |
| submit_btn = gr.Button("๐ ์์ค ์์ฑ ์์", variant="primary", scale=2) | |
| clear_btn = gr.Button("๐๏ธ ์ด๊ธฐํ", scale=1) | |
| status_text = gr.Textbox( | |
| label="์ํ", | |
| interactive=False, | |
| value="๐ ์ค๋น ์๋ฃ" | |
| ) | |
| # ์ธ์ ๊ด๋ฆฌ | |
| with gr.Group(elem_classes=["session-section"]): | |
| gr.Markdown("### ๐พ ์งํ ์ค์ธ ์ธ์ ") | |
| session_dropdown = gr.Dropdown( | |
| label="์ธ์ ์ ํ", | |
| choices=[], | |
| interactive=True | |
| ) | |
| with gr.Row(): | |
| refresh_btn = gr.Button("๐ ๋ชฉ๋ก ์๋ก๊ณ ์นจ", scale=1) | |
| resume_btn = gr.Button("โถ๏ธ ์ ํ ์ฌ๊ฐ", variant="secondary", scale=1) | |
| auto_recover_btn = gr.Button("โป๏ธ ์ต๊ทผ ์ธ์ ๋ณต๊ตฌ", scale=1) | |
| with gr.Column(scale=2): | |
| with gr.Tab("๐ ์ฐฝ์ ์งํ"): | |
| stages_display = gr.Markdown( | |
| value="์ฐฝ์ ๊ณผ์ ์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
| elem_id="stages-display" | |
| ) | |
| with gr.Tab("๐ ์์ฑ๋ ์์ค"): | |
| novel_output = gr.Markdown( | |
| value="์์ฑ๋ ์์ค์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...", | |
| elem_id="novel-output" | |
| ) | |
| with gr.Group(elem_classes=["download-section"]): | |
| gr.Markdown("### ๐ฅ ์์ค ๋ค์ด๋ก๋") | |
| with gr.Row(): | |
| format_select = gr.Radio( | |
| choices=["DOCX", "TXT"], | |
| value="DOCX" if DOCX_AVAILABLE else "TXT", | |
| label="ํ์" | |
| ) | |
| download_btn = gr.Button("โฌ๏ธ ๋ค์ด๋ก๋", variant="secondary") | |
| download_file = gr.File( | |
| label="๋ค์ด๋ก๋๋ ํ์ผ", | |
| visible=False | |
| ) | |
| # ์จ๊ฒจ์ง ์ํ | |
| novel_text_state = gr.State("") | |
| # ์์ | |
| with gr.Row(): | |
| gr.Examples( | |
| examples=[ | |
| ["์ค์งํ ์ค๋ ๋จ์ฑ์ด ์๋ก์ด ์ถ์ ์๋ฏธ๋ฅผ ์ฐพ์๊ฐ๋ ์ฌ์ "], | |
| ["๋์์์ ์๊ณจ๋ก ์ด์ฃผํ ์ฒญ๋ ์ ์ ์๊ณผ ์ฑ์ฅ ์ด์ผ๊ธฐ"], | |
| ["์ธ ์ธ๋๊ฐ ํจ๊ป ์ฌ๋ ๊ฐ์กฑ์ ๊ฐ๋ฑ๊ณผ ํํด"], | |
| ["A middle-aged woman's journey to rediscover herself after divorce"], | |
| ["The transformation of a cynical journalist through unexpected encounters"], | |
| ["์์ ์์ ์ ์ด์ํ๋ ๋ ธ๋ถ๋ถ์ ๋ง์ง๋ง 1๋ "], | |
| ["AI ์๋์ ์ผ์๋ฆฌ๋ฅผ ์์ ๋ฒ์ญ๊ฐ์ ์๋ก์ด ๋์ "], | |
| ["๊ธฐ์ด์ํ์๊ธ์๊ฐ ๋ ์ฒญ๋ ์ ์์กด๊ณผ ์กด์์ฑ ์ฐพ๊ธฐ"] | |
| ], | |
| inputs=query_input, | |
| label="๐ก ์ฃผ์ ์์" | |
| ) | |
| # ์ด๋ฒคํธ ํธ๋ค๋ฌ | |
| def refresh_sessions(): | |
| try: | |
| sessions = get_active_sessions("Korean") | |
| return gr.update(choices=sessions) | |
| except Exception as e: | |
| logger.error(f"Error refreshing sessions: {str(e)}") | |
| return gr.update(choices=[]) | |
| def handle_auto_recover(language): | |
| session_id, message = auto_recover_session(language) | |
| return session_id, message | |
| # ์ด๋ฒคํธ ์ฐ๊ฒฐ | |
| submit_btn.click( | |
| fn=process_query, | |
| inputs=[query_input, language_select, current_session_id], | |
| outputs=[stages_display, novel_output, status_text, current_session_id] | |
| ) | |
| novel_output.change( | |
| fn=lambda x: x, | |
| inputs=[novel_output], | |
| outputs=[novel_text_state] | |
| ) | |
| resume_btn.click( | |
| fn=lambda x: x.split("...")[0] if x and "..." in x else x, | |
| inputs=[session_dropdown], | |
| outputs=[current_session_id] | |
| ).then( | |
| fn=resume_session, | |
| inputs=[current_session_id, language_select], | |
| outputs=[stages_display, novel_output, status_text, current_session_id] | |
| ) | |
| auto_recover_btn.click( | |
| fn=handle_auto_recover, | |
| inputs=[language_select], | |
| outputs=[current_session_id, status_text] | |
| ).then( | |
| fn=resume_session, | |
| inputs=[current_session_id, language_select], | |
| outputs=[stages_display, novel_output, status_text, current_session_id] | |
| ) | |
| refresh_btn.click( | |
| fn=refresh_sessions, | |
| outputs=[session_dropdown] | |
| ) | |
| clear_btn.click( | |
| fn=lambda: ("", "", "๐ ์ค๋น ์๋ฃ", "", None), | |
| outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] | |
| ) | |
| def handle_download(format_type, language, session_id, novel_text): | |
| if not session_id or not novel_text: | |
| return gr.update(visible=False) | |
| file_path = download_novel(novel_text, format_type, language, session_id) | |
| if file_path: | |
| return gr.update(value=file_path, visible=True) | |
| else: | |
| return gr.update(visible=False) | |
| download_btn.click( | |
| fn=handle_download, | |
| inputs=[format_select, language_select, current_session_id, novel_text_state], | |
| outputs=[download_file] | |
| ) | |
| # ์์ ์ ์ธ์ ๋ก๋ | |
| interface.load( | |
| fn=refresh_sessions, | |
| outputs=[session_dropdown] | |
| ) | |
| return interface | |
| # ๋ฉ์ธ ์คํ | |
| if __name__ == "__main__": | |
| logger.info("AI ์งํํ ์ฅํธ์์ค ์์ฑ ์์คํ v2.0 ์์...") | |
| logger.info("=" * 60) | |
| # ํ๊ฒฝ ํ์ธ | |
| logger.info(f"API ์๋ํฌ์ธํธ: {API_URL}") | |
| logger.info(f"๋ชฉํ ๋ถ๋: {TARGET_WORDS:,}๋จ์ด") | |
| logger.info(f"์๊ฐ๋น ์ต์ ๋ถ๋: {MIN_WORDS_PER_WRITER:,}๋จ์ด") | |
| logger.info("์ฃผ์ ๊ฐ์ ์ฌํญ: ๋ฐ๋ณต ๊ฐ์ง ๊ฐํ, ํธ์ง์ ๋จ๊ณ ์ถ๊ฐ, ์งํ๋ ๋ชจ๋ํฐ๋ง") | |
| if BRAVE_SEARCH_API_KEY: | |
| logger.info("์น ๊ฒ์์ด ํ์ฑํ๋์์ต๋๋ค.") | |
| else: | |
| logger.warning("์น ๊ฒ์์ด ๋นํ์ฑํ๋์์ต๋๋ค.") | |
| if DOCX_AVAILABLE: | |
| logger.info("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ํ์ฑํ๋์์ต๋๋ค.") | |
| else: | |
| logger.warning("DOCX ๋ด๋ณด๋ด๊ธฐ๊ฐ ๋นํ์ฑํ๋์์ต๋๋ค.") | |
| logger.info("=" * 60) | |
| # ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ | |
| logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์ค...") | |
| NovelDatabase.init_db() | |
| logger.info("๋ฐ์ดํฐ๋ฒ ์ด์ค ์ด๊ธฐํ ์๋ฃ.") | |
| # ์ธํฐํ์ด์ค ์์ฑ ๋ฐ ์คํ | |
| interface = create_interface() | |
| interface.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=False, | |
| debug=True | |
| ) |