Spaces:
Sleeping
Sleeping
def chat_stream(self, message, history): | |
messages = [{"role": "system", "content": self.system_prompt()}] | |
for msg in history: | |
if isinstance(msg, dict) and msg.get("role") in ["user", "assistant"]: | |
messages.append(msg) | |
messages.append({"role": "user", "content": message}) | |
self.session_log.append({"role": "user", "content": message}) | |
# First non-streamed call to check for tool calls | |
response = self.openai.chat.completions.create( | |
model="gpt-4o", | |
messages=messages, | |
tools=tools, | |
stream=False # Check for tool calls | |
) | |
reply = response.choices[0].message | |
if reply.tool_calls: | |
tool_results = self.handle_tool_call(reply.tool_calls) | |
messages.append(reply) | |
messages.extend(tool_results) | |
# Retry final response after tool call | |
final_response = self.openai.chat.completions.create( | |
model="gpt-4o", | |
messages=messages, | |
tools=tools, | |
stream=True | |
) | |
full_response = "" | |
for chunk in final_response: | |
delta = chunk.choices[0].delta | |
if hasattr(delta, "content") and delta.content: | |
full_response += delta.content | |
yield full_response | |
else: | |
# Normal streaming response | |
stream = self.openai.chat.completions.create( | |
model="gpt-4o", | |
messages=messages, | |
tools=tools, | |
stream=True | |
) | |
full_response = "" | |
for chunk in stream: | |
delta = chunk.choices[0].delta | |
if hasattr(delta, "content") and delta.content: | |
full_response += delta.content | |
yield full_response | |
# Always add follow-up message | |
full_response += "\n\n💬 Let me know if you’d like to follow up or need help connecting with Jacob." | |
self.session_log.append({"role": "assistant", "content": full_response}) | |
self.save_session_log() | |