baconnier commited on
Commit
df9f8ac
·
verified ·
1 Parent(s): 4138588

Update art_explorer.py

Browse files
Files changed (1) hide show
  1. art_explorer.py +11 -9
art_explorer.py CHANGED
@@ -1,17 +1,19 @@
1
  from typing import Optional, List, Dict
2
  import json
 
3
  from openai import OpenAI
4
- from instructor_function_calling import InstructorFunctionCalling
5
  from prompts import SYSTEM_PROMPT, format_exploration_prompt, DEFAULT_RESPONSE
6
  from models import ExplorationResponse
7
 
 
 
 
8
  class ExplorationPathGenerator:
9
  def __init__(self, api_key: str):
10
- base_client = OpenAI(
11
  base_url="https://api.groq.com/openai/v1",
12
  api_key=api_key
13
  )
14
- self.client = InstructorFunctionCalling(client=base_client)
15
 
16
  def generate_exploration_path(
17
  self,
@@ -35,9 +37,9 @@ class ExplorationPathGenerator:
35
  exploration_parameters=exploration_parameters
36
  )
37
 
38
- # Use the function calling client to generate response
39
- response = self.client.chat.completions.create(
40
- model="mixtral-8x7b-32768",
41
  messages=[
42
  {
43
  "role": "system",
@@ -48,12 +50,12 @@ class ExplorationPathGenerator:
48
  "content": formatted_prompt
49
  }
50
  ],
 
51
  temperature=0.7,
52
- max_tokens=2000,
53
- response_model=ExplorationResponse
54
  )
55
 
56
- # Response is already validated against the Pydantic model
57
  return response.model_dump()
58
 
59
  except Exception as e:
 
1
  from typing import Optional, List, Dict
2
  import json
3
+ import instructor
4
  from openai import OpenAI
 
5
  from prompts import SYSTEM_PROMPT, format_exploration_prompt, DEFAULT_RESPONSE
6
  from models import ExplorationResponse
7
 
8
+ # Configure instructor for GROQ
9
+ instructor.patch(backend="groq")
10
+
11
  class ExplorationPathGenerator:
12
  def __init__(self, api_key: str):
13
+ self.client = OpenAI(
14
  base_url="https://api.groq.com/openai/v1",
15
  api_key=api_key
16
  )
 
17
 
18
  def generate_exploration_path(
19
  self,
 
37
  exploration_parameters=exploration_parameters
38
  )
39
 
40
+ # Use instructor with GROQ backend
41
+ response = instructor.llm_mode(mode="groq")(ExplorationResponse).from_response(
42
+ client=self.client,
43
  messages=[
44
  {
45
  "role": "system",
 
50
  "content": formatted_prompt
51
  }
52
  ],
53
+ model="mixtral-8x7b-32768",
54
  temperature=0.7,
55
+ max_tokens=2000
 
56
  )
57
 
58
+ # Convert to dict for JSON serialization
59
  return response.model_dump()
60
 
61
  except Exception as e: