hadadrjt commited on
Commit
083bf02
·
1 Parent(s): 1c78115

ai: Switch to OpenAI requests style.

Browse files
Files changed (2) hide show
  1. jarvis.py +8 -13
  2. requirements.txt +1 -0
jarvis.py CHANGED
@@ -18,6 +18,8 @@ import fitz
18
  import io
19
  import uuid
20
 
 
 
21
  from optillm.cot_reflection import cot_reflection
22
  from optillm.leap import leap
23
  from optillm.plansearch import plansearch
@@ -50,7 +52,6 @@ ALLOWED_EXTENSIONS = json.loads(os.getenv("ALLOWED_EXTENSIONS"))
50
  class SessionWithID(requests.Session):
51
  def __init__(self):
52
  super().__init__()
53
- self.headers.update({"Connection": "keep-alive"})
54
  self.session_id = str(uuid.uuid4())
55
 
56
  def create_session():
@@ -125,20 +126,14 @@ def chat_with_model(history, user_input, selected_model_display, sess):
125
  messages = [{"role": "user", "content": user} for user, _ in history]
126
  messages += [{"role": "assistant", "content": assistant} for _, assistant in history if assistant]
127
  messages.append({"role": "user", "content": user_input})
128
- data = {"model": selected_model, "messages": messages, **model_config, "session_id": sess.session_id}
129
  random.shuffle(LINUX_SERVER_PROVIDER_KEYS)
130
  random.shuffle(LINUX_SERVER_HOSTS)
131
- for api_key in LINUX_SERVER_PROVIDER_KEYS:
132
- for host in LINUX_SERVER_HOSTS:
133
- try:
134
- response = sess.post(host, json=data, headers={"Authorization": f"Bearer {api_key}"}, timeout=1)
135
- if response.status_code < 400:
136
- ai_text = response.json().get("choices", [{}])[0].get("message", {}).get("content", RESPONSES["RESPONSE_2"])
137
- processed_text = process_ai_response(ai_text)
138
- return processed_text
139
- except requests.exceptions.RequestException:
140
- continue
141
- return RESPONSES["RESPONSE_3"]
142
 
143
  def respond(multi_input, history, selected_model_display, sess):
144
  message = {"text": multi_input.get("text", "").strip(), "files": multi_input.get("files", [])}
 
18
  import io
19
  import uuid
20
 
21
+ from openai import OpenAI
22
+
23
  from optillm.cot_reflection import cot_reflection
24
  from optillm.leap import leap
25
  from optillm.plansearch import plansearch
 
52
  class SessionWithID(requests.Session):
53
  def __init__(self):
54
  super().__init__()
 
55
  self.session_id = str(uuid.uuid4())
56
 
57
  def create_session():
 
126
  messages = [{"role": "user", "content": user} for user, _ in history]
127
  messages += [{"role": "assistant", "content": assistant} for _, assistant in history if assistant]
128
  messages.append({"role": "user", "content": user_input})
 
129
  random.shuffle(LINUX_SERVER_PROVIDER_KEYS)
130
  random.shuffle(LINUX_SERVER_HOSTS)
131
+ client = OpenAI(base_url=LINUX_SERVER_HOSTS[0], api_key=LINUX_SERVER_PROVIDER_KEYS[0])
132
+ data = {"model": selected_model, "messages": messages, **model_config}
133
+ response = client.chat.completions.create(extra_body={"optillm_approach": "rto|re2|cot_reflection|self_consistency|plansearch|leap|z3|bon|moa|mcts|mcp|router|privacy|executecode|json", "session_id": sess.session_id}, **data)
134
+ ai_text = response.choices[0].message.content if response.choices and response.choices[0].message and response.choices[0].message.content else RESPONSES["RESPONSE_2"]
135
+ processed_text = process_ai_response(ai_text)
136
+ return processed_text
 
 
 
 
 
137
 
138
  def respond(multi_input, history, selected_model_display, sess):
139
  message = {"text": multi_input.get("text", "").strip(), "files": multi_input.get("files", [])}
requirements.txt CHANGED
@@ -9,3 +9,4 @@ PyMuPDF
9
  Pillow
10
  optillm
11
  json5
 
 
9
  Pillow
10
  optillm
11
  json5
12
+ openai