Spaces:
Runtime error
Runtime error
from openai import OpenAI | |
import gradio as gr | |
import json | |
from bot_actions import functions_dictionary | |
import os | |
CSS =""" | |
.contain { display: flex; flex-direction: column; } | |
.svelte-vt1mxs div:first-child { flex-grow: 1; overflow: auto;} | |
#chatbot { flex-grow: 1; overflow: auto;} | |
footer {display: none !important;} | |
.app.svelte-182fdeq.svelte-182fdeq { | |
max-width: 100vw !important; | |
} | |
#main_container { | |
height: 95vh; | |
} | |
#markup_container { | |
height: 100%; | |
overflow:auto; | |
} | |
""" | |
openAIToken = os.environ['openAIToken'] | |
assistantId = os.environ['assistantId'] | |
initial_message = os.environ['initialMessage'] | |
client = OpenAI(api_key=openAIToken) | |
def handle_requires_action(data): | |
actions_results = [] | |
for tool in data.required_action.submit_tool_outputs.tool_calls: | |
function_name = tool.function.name | |
function_args = json.loads(tool.function.arguments) | |
print(function_name) | |
print(function_args) | |
try: | |
result = functions_dictionary[tool.function.name](**function_args) | |
print("Function result:", result) | |
actions_results.append({"tool_output" : {"tool_call_id": tool.id, "output": result["message"]}}) | |
except Exception as e: | |
print(e) | |
# Submit all tool_outputs at the same time | |
return actions_results | |
def create_thread_openai(sessionStorage): | |
streaming_thread = client.beta.threads.create() | |
sessionStorage["threadId"] = streaming_thread.id | |
return sessionStorage | |
def add_message_to_openai(text, threadId): | |
print("User message: ", text) | |
return client.beta.threads.messages.create( | |
thread_id=threadId, | |
role="user", | |
content=text | |
) | |
def process_text_chunk(text, storage): | |
print(text, end="", flush=True) | |
local_message = None | |
accumulative_string = storage["accumulative_string"] + text | |
local_message = accumulative_string | |
return local_message, storage | |
def handle_events(threadId, chat_history, storage): | |
storage.update({ | |
"accumulative_string" : "", | |
"markup_string": "", | |
}) | |
try: | |
with client.beta.threads.runs.stream( | |
thread_id=threadId, | |
assistant_id=assistantId | |
) as stream: | |
for event in stream: | |
if event.event == "thread.message.delta" and event.data.delta.content: | |
text = event.data.delta.content[0].text.value | |
local_message, storage = process_text_chunk(text, storage) | |
if local_message is not None: | |
chat_history[-1][1] += local_message | |
yield [chat_history, storage] | |
if event.event == 'thread.run.requires_action': | |
result = handle_requires_action(event.data) | |
tool_outputs = [x["tool_output"] for x in result] | |
with client.beta.threads.runs.submit_tool_outputs_stream( | |
thread_id=stream.current_run.thread_id, | |
run_id=event.data.id, | |
tool_outputs=tool_outputs, | |
) as action_stream: | |
for text in action_stream.text_deltas: | |
local_message, storage = process_text_chunk(text, storage) | |
if local_message is not None: | |
chat_history[-1][1] += local_message | |
yield [chat_history, storage] | |
action_stream.close() | |
stream.until_done() | |
print("") | |
return [chat_history, storage] | |
except Exception as e: | |
print(e) | |
chat_history[-1][1] = "Error occured during processing your message. Please try again" | |
yield [chat_history, storage] | |
def initiate_chatting(chat_history, storage): | |
threadId = storage["threadId"] | |
chat_history = [[None, ""]] | |
add_message_to_openai(initial_message, threadId) | |
for response in handle_events(threadId, chat_history, storage): | |
yield response | |
def respond_on_user_msg(chat_history, storage): | |
message = chat_history[-1][0] | |
threadId = storage["threadId"] | |
print("Responding for threadId: ", threadId) | |
chat_history[-1][1] = "" | |
add_message_to_openai(message, threadId) | |
for response in handle_events(threadId, chat_history, storage): | |
yield response | |
def create_tabs(): | |
pass | |
def create_login_tab(): | |
with gr.Blocks(fill_height=True) as login: | |
with gr.Row(): | |
login_input = gr.Textbox(label="Login") | |
with gr.Row(): | |
password_input = gr.Textbox(label="Password", type="password") | |
return login | |
def create_chat_tab(): | |
with gr.Blocks(css=CSS, fill_height=True) as demo: | |
storage = gr.State({"accumulative_string": ""}) | |
btn_list = [] | |
with gr.Row(elem_id="main_container"): | |
with gr.Column(scale=4): | |
chatbot = gr.Chatbot(label="Facility managment bot", line_breaks=False, height=300, show_label=False, show_share_button=False, elem_id="chatbot") | |
with gr.Row(): | |
for i in range(6): | |
btn = gr.Button(visible=False, size="sm") | |
btn_list.append(btn) | |
msg = gr.Textbox(label="Answer", interactive=False) | |
def user(user_message, history): | |
return "", history + [[user_message, None]] | |
def disable_msg(): | |
message_box = gr.Textbox(value=None, interactive=False) | |
return message_box | |
def enable_msg(): | |
message_box = gr.Textbox(value=None, interactive=True) | |
return message_box | |
add_user_message_flow = [user, [msg,chatbot], [msg,chatbot]] | |
chat_response_flow = [respond_on_user_msg, [chatbot, storage], [chatbot, storage]] | |
disable_msg_flow = [disable_msg, None, msg] | |
enable_msg_flow = [enable_msg, None, msg] | |
msg.submit(*add_user_message_flow | |
).then(*disable_msg_flow | |
).then(*chat_response_flow | |
).then(*enable_msg_flow) | |
demo.load(create_thread_openai, inputs=storage, outputs=storage | |
).then(initiate_chatting, inputs=[chatbot, storage], outputs=[chatbot, storage] | |
).then(*enable_msg_flow) | |
return demo | |
demo = create_chat_tab() | |
if __name__ == "__main__": | |
demo.launch(auth=(os.environ['login'], os.environ['password']), auth_message="Enter your Username and Password") |