oleksandrburlakov's picture
Implemented facility chat
c85385e
raw
history blame
4.34 kB
from openai import OpenAI
import gradio as gr
import os
client = OpenAI(api_key=os.environ['openAIToken'])
assistantId = os.environ['assistantId']
def createThread():
return client.beta.threads.create()
def addMessage(text, threadId):
return client.beta.threads.messages.create(
thread_id=threadId,
role="user",
content=text
)
list_of_suggestions = []
latest_suggestions = ""
def handle_suggestions(string_of_suggestions):
local_message = None
parts = string_of_suggestions.split('#s#')
list_of_suggestions = [x.strip('"') for x in parts[0].strip('][').split('", ') if x.strip('"')]
print(list_of_suggestions)
if len(parts) > 1:
local_message = parts[1]
return list_of_suggestions, local_message
def create_suggestions_list(suggestions):
update_show = [gr.update(visible=True, value=w) for w in suggestions]
update_hide = [gr.update(visible=False, value="") for _ in range(6-len(suggestions))]
return update_show + update_hide
btn_list = []
list_of_suggestions = ""
CSS ="""
.contain { display: flex; flex-direction: column; }
.gradio-container { height: 100vh !important; }
#chatbot { flex-grow: 1; overflow: auto;}
"""
with gr.Blocks(css=CSS, fill_height=True) as demo:
streaming_thread = createThread()
chatbot = gr.Chatbot(label="Facility managment bot", elem_id="chatbot") #just to fit the notebook
with gr.Row():
for i in range(6):
btn = gr.Button(visible=False)
btn_list.append(btn)
msg = gr.Textbox(label="Answer")
with gr.Row():
clear_btn = gr.ClearButton(chatbot)
btn = gr.Button("Submit")
def user(user_message, history):
return "", history + [[user_message, None]]
def respond(chat_history):
print("Responding")
global btn_list
message = chat_history[-1][0]
threadId = streaming_thread.id
chat_history[-1][1] = ""
addMessage(message, threadId)
global list_of_suggestions
list_of_suggestions = []
string_of_suggestions = ""
is_loading_suggestions = False
is_it_first_response = True
with client.beta.threads.runs.stream(
thread_id=threadId,
assistant_id=assistantId,
) as stream:
for text in stream.text_deltas:
print(text, end="")
local_message = None
if "[" in text and is_it_first_response:
is_loading_suggestions = True
is_it_first_response = False
if is_loading_suggestions != True:
local_message = text
else:
string_of_suggestions = string_of_suggestions + text
if "#s#" in string_of_suggestions:
is_loading_suggestions = False
list_of_suggestions, local_message = handle_suggestions(string_of_suggestions)
if local_message is not None:
chat_history[-1][1] += local_message
yield {chatbot: chat_history}
stream.until_done()
def update_suggestions():
global list_of_suggestions
btn_list = create_suggestions_list(list_of_suggestions)
return btn_list
def hide_suggestions():
return [gr.update(visible=False, value="") for _ in range(6)]
def disable_msg():
message_box = gr.Textbox(value=None, interactive=False)
return message_box
def enable_msg():
message_box = gr.Textbox(value=None, interactive=True)
return message_box
add_user_message_flow = [user, [msg,chatbot], [msg,chatbot]]
chat_response_flow = [respond, [chatbot], [chatbot]]
update_suggestions_flow = [update_suggestions, None, btn_list]
hide_suggestions_flow = [hide_suggestions, None, btn_list]
disable_msg_flow = [disable_msg, None, msg]
enable_msg_flow = [enable_msg, None, msg]
btn.click(*add_user_message_flow).then(*hide_suggestions_flow).then(*disable_msg_flow).then(*chat_response_flow).then(*update_suggestions_flow).then(*enable_msg_flow)
msg.submit(*add_user_message_flow).then(*hide_suggestions_flow).then(*disable_msg_flow).then(*chat_response_flow).then(*update_suggestions_flow).then(*enable_msg_flow)
for sug_btn in btn_list:
add_suggestion_message_flow = [user, [sug_btn, chatbot], [msg, chatbot]]
sug_btn.click(*add_suggestion_message_flow).then(*hide_suggestions_flow).then(*disable_msg_flow).then(*chat_response_flow).then(*update_suggestions_flow).then(*enable_msg_flow)
if __name__ == "__main__":
demo.launch()
gr.close_all()