Spaces:
Sleeping
Sleeping
File size: 6,491 Bytes
a818cbb 478b26f 4282819 d7e3547 478b26f 4282819 092d495 4282819 092d495 efdf3f2 478b26f a818cbb efdf3f2 4282819 092d495 478b26f 8e8121c 478b26f a818cbb 4282819 a818cbb 4282819 478b26f 1a6469a a818cbb 478b26f 4282819 1a6469a 092d495 1a6469a 478b26f 092d495 a818cbb 478b26f 1a6469a bbf15ac a818cbb 4282819 1a6469a bbf15ac 092d495 1a6469a 092d495 a818cbb 478b26f 1a6469a a818cbb 4282819 a818cbb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 |
from typing import Dict, Any, List, Optional
from openai import OpenAI
import json
from pydantic import BaseModel, Field
from prompts import SYSTEM_PROMPT, format_exploration_prompt, DEFAULT_RESPONSE
class ExplorationNode(BaseModel):
id: Optional[str] = None
title: str
description: str
connections: List[Dict[str, Any]] = Field(default_factory=list)
depth: int = 0
class ExplorationPath(BaseModel):
nodes: List[ExplorationNode]
query: str
domain: Optional[str] = None
class Config:
populate_by_name = True
arbitrary_types_allowed = True
def transform_response_to_nodes(api_response: Dict[str, Any]) -> List[Dict[str, Any]]:
"""Transform the API response into a list of ExplorationNode-compatible dictionaries"""
nodes = []
# Add main exploration summary as root node
if "exploration_summary" in api_response:
nodes.append({
"id": "root",
"title": "Exploration Overview",
"description": api_response["exploration_summary"]["current_context"],
"depth": 0,
"connections": []
})
# Transform standard axes into nodes
if "knowledge_axes" in api_response and "standard_axes" in api_response["knowledge_axes"]:
for axis in api_response["knowledge_axes"]["standard_axes"]:
# Create node for the axis itself
axis_node = {
"id": f"axis_{axis['name']}",
"title": axis['name'],
"description": f"Standard exploration axis: {axis['name']}",
"depth": 1,
"connections": []
}
nodes.append(axis_node)
# Create nodes for potential values
for idx, value in enumerate(axis.get("potential_values", [])):
value_node = {
"id": f"value_{axis['name']}_{idx}",
"title": value["value"],
"description": value["contextual_rationale"],
"depth": 2,
"connections": []
}
nodes.append(value_node)
# Add connection to axis node
axis_node["connections"].append({
"target_id": value_node["id"],
"relevance_score": value["relevance_score"]
})
# Transform emergent axes into nodes
if "knowledge_axes" in api_response and "emergent_axes" in api_response["knowledge_axes"]:
for e_axis in api_response["knowledge_axes"]["emergent_axes"]:
# Create node for emergent axis
e_axis_node = {
"id": f"emergent_{e_axis['name']}",
"title": f"{e_axis['name']} (Emergent)",
"description": f"Emergent axis derived from {e_axis['parent_axis']}",
"depth": 2,
"connections": []
}
nodes.append(e_axis_node)
# Create nodes for innovative values
for idx, value in enumerate(e_axis.get("innovative_values", [])):
value_node = {
"id": f"innovative_{e_axis['name']}_{idx}",
"title": value["value"],
"description": value["discovery_potential"],
"depth": 3,
"connections": []
}
nodes.append(value_node)
# Add connection to emergent axis node
e_axis_node["connections"].append({
"target_id": value_node["id"],
"innovation_score": value["innovation_score"]
})
return nodes
class ExplorationPathGenerator:
def __init__(self, api_key: str):
self.client = OpenAI(
api_key=api_key,
base_url="https://api.groq.com/openai/v1"
)
def generate_exploration_path(
self,
query: str,
selected_path: List[Dict[str, Any]] = None,
exploration_parameters: Dict[str, Any] = None
) -> Dict[str, Any]:
"""Generate an exploration path based on the query and parameters"""
try:
print("\n=== Starting API Request ===")
selected_path = selected_path or []
exploration_parameters = exploration_parameters or {}
formatted_prompt = format_exploration_prompt(
user_query=query,
selected_path=selected_path,
exploration_parameters=exploration_parameters
)
print("\n=== Formatted Request ===")
print("System Prompt:", SYSTEM_PROMPT[:200] + "...")
print("\nFormatted Prompt (excerpt):", formatted_prompt[:200] + "...")
response = self.client.chat.completions.create(
model="mixtral-8x7b-32768",
messages=[
{"role": "system", "content": SYSTEM_PROMPT},
{"role": "user", "content": formatted_prompt}
],
temperature=0.7,
max_tokens=2000
)
print("\n=== API Response ===")
print("Raw response content:", response.choices[0].message.content)
try:
result = json.loads(response.choices[0].message.content)
print("\n=== Parsed Response ===")
print(json.dumps(result, indent=2))
# Transform the API response into nodes
nodes = transform_response_to_nodes(result)
# Create ExplorationPath with transformed nodes
exploration_path = ExplorationPath(
nodes=nodes,
query=query,
domain=exploration_parameters.get("domain")
)
final_result = exploration_path.model_dump()
print("\n=== Final Result ===")
print(json.dumps(final_result, indent=2))
return final_result
except json.JSONDecodeError as e:
print(f"\n=== JSON Parse Error ===\n{str(e)}")
print("Using default response")
return {
"nodes": [],
"query": query,
"domain": exploration_parameters.get("domain")
}
except Exception as e:
print(f"\n=== Error ===\n{str(e)}")
return {
"error": str(e),
"status": "failed",
"message": "Failed to generate exploration path",
"default_response": DEFAULT_RESPONSE
} |