baconnier commited on
Commit
61beaee
·
verified ·
1 Parent(s): 02a2c7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -52
app.py CHANGED
@@ -3,21 +3,18 @@ import openai
3
  import gradio as gr
4
  import json
5
  import plotly.graph_objects as go
6
- from variables import CONTEXTUAL_ZOOM_PROMPT, CONTEXTUAL_ZOOM_default_response
7
 
8
  class ArtExplorer:
9
  def __init__(self):
10
- print("Initializing ArtExplorer...")
11
  self.client = openai.OpenAI(
12
  base_url="https://api.groq.com/openai/v1",
13
  api_key=os.environ.get("GROQ_API_KEY")
14
  )
15
- print("OpenAI client initialized")
16
  self.current_state = {
17
  "zoom_level": 0,
18
  "selections": {}
19
  }
20
- print("Current state initialized")
21
 
22
  def create_map(self, locations):
23
  """Create a Plotly map figure from location data"""
@@ -42,54 +39,41 @@ class ArtExplorer:
42
  )
43
  return fig
44
 
45
- def get_llm_response(self, query: str, zoom_context: dict = None) -> dict:
46
- try:
47
- current_zoom_states = {
48
- "temporal": {"level": self.current_state["zoom_level"], "selection": ""},
49
- "geographical": {"level": self.current_state["zoom_level"], "selection": ""},
50
- "style": {"level": self.current_state["zoom_level"], "selection": ""},
51
- "subject": {"level": self.current_state["zoom_level"], "selection": ""}
52
- }
53
-
54
- if zoom_context:
55
- for key, value in zoom_context.items():
56
- if key in current_zoom_states:
57
- current_zoom_states[key]["selection"] = value
58
-
59
- messages = [
60
- {"role": "system", "content": "You are an expert art historian specializing in interactive exploration."},
61
- {"role": "user", "content": CONTEXTUAL_ZOOM_PROMPT.format(
62
- user_query=query,
63
- current_zoom_states=json.dumps(current_zoom_states, indent=2)
64
- )}
65
- ]
66
-
67
- print("Messages sent to LLM:")
68
- print(messages)
69
-
70
- response = self.client.chat.completions.create(
71
- model="mixtral-8x7b-32768",
72
- messages=messages,
73
- temperature=0.1,
74
- max_tokens=2048
75
- )
76
-
77
- print("Raw response from LLM:")
78
- print(response)
79
-
80
- result = json.loads(response.choices[0].message.content)
81
-
82
- print("Parsed result:")
83
- print(result)
84
-
85
- return result
86
- except json.JSONDecodeError as e:
87
- print(f"JSON decode error: {str(e)}")
88
- print(f"Response content: {response.choices[0].message.content if 'response' in locals() else 'No response'}")
89
- return self.get_default_response()
90
- except Exception as e:
91
- print(f"Error in LLM response: {str(e)}")
92
- return self.get_default_response()
93
 
94
  def get_default_response(self):
95
  return CONTEXTUAL_ZOOM_default_response
 
3
  import gradio as gr
4
  import json
5
  import plotly.graph_objects as go
6
+ from variables import *
7
 
8
  class ArtExplorer:
9
  def __init__(self):
 
10
  self.client = openai.OpenAI(
11
  base_url="https://api.groq.com/openai/v1",
12
  api_key=os.environ.get("GROQ_API_KEY")
13
  )
 
14
  self.current_state = {
15
  "zoom_level": 0,
16
  "selections": {}
17
  }
 
18
 
19
  def create_map(self, locations):
20
  """Create a Plotly map figure from location data"""
 
39
  )
40
  return fig
41
 
42
+ def get_llm_response(self, query: str, zoom_context: dict = None) -> dict:
43
+ try:
44
+ current_zoom_states = {
45
+ "temporal": {"level": self.current_state["zoom_level"], "selection": ""},
46
+ "geographical": {"level": self.current_state["zoom_level"], "selection": ""},
47
+ "style": {"level": self.current_state["zoom_level"], "selection": ""},
48
+ "subject": {"level": self.current_state["zoom_level"], "selection": ""}
49
+ }
50
+
51
+ if zoom_context:
52
+ for key, value in zoom_context.items():
53
+ if key in current_zoom_states:
54
+ current_zoom_states[key]["selection"] = value
55
+
56
+ messages = [
57
+ {"role": "system", "content": "You are an expert art historian specializing in interactive exploration."},
58
+ {"role": "user", "content": CONTEXTUAL_ZOOM_PROMPT.format(
59
+ user_query=query,
60
+ current_zoom_states=json.dumps(current_zoom_states, indent=2)
61
+ )}
62
+ ]
63
+
64
+ print(messages)
65
+ response = self.client.chat.completions.create(
66
+ model="mixtral-8x7b-32768",
67
+ messages=messages,
68
+ temperature=0.1,
69
+ max_tokens=2048
70
+ )
71
+ print(response)
72
+ result = json.loads(response.choices[0].message.content)
73
+ return result
74
+ except Exception as e:
75
+ print(f"Error in LLM response: {str(e)}")
76
+ return self.get_default_response()
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
78
  def get_default_response(self):
79
  return CONTEXTUAL_ZOOM_default_response