|
from dataclasses import dataclass |
|
|
|
from .prompts import ( |
|
MAPREDUCE_SYSTEM_PROMPT, |
|
QUERY_FORMAT_PROMPT, |
|
PLANNING_SYSTEM_PROMPT, |
|
MAP_PROMPT, |
|
REDUCE_PROMPT, |
|
COLLAPSE_PROMPT, |
|
SUMMARY_PROMPT, |
|
) |
|
|
|
|
|
@dataclass |
|
class LongCepoConfig: |
|
temperature_plan: float = 0.7 |
|
temperature_map: float = 0.7 |
|
temperature_collapse: float = 0.7 |
|
temperature_reduce: float = 0.7 |
|
|
|
chunk_size: int = 4096 |
|
max_output_tokens: int = 1024 |
|
max_context_window: int = 8192 |
|
max_output_tokens_summary: int = 300 |
|
num_neighbor_summaries: int = 5 |
|
|
|
system_prompt: str = MAPREDUCE_SYSTEM_PROMPT |
|
summary_prompt: str = SUMMARY_PROMPT |
|
map_prompt: str = MAP_PROMPT |
|
collapse_prompt: str = COLLAPSE_PROMPT |
|
reduce_prompt: str = REDUCE_PROMPT |
|
query_format_prompt: str = QUERY_FORMAT_PROMPT |
|
planning_system_prompt: str = PLANNING_SYSTEM_PROMPT |
|
|
|
context_query_delimiter: str = "<CONTEXT_END>" |
|
tokenizer_name: str = "meta-llama/Llama-4-Maverick-17B-128E-Instruct" |
|
|