File size: 4,338 Bytes
c85385e
142ba07
d4a2e62
c85385e
 
 
142ba07
c85385e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142ba07
 
c85385e
 
 
 
 
 
 
 
 
 
 
142ba07
 
c85385e
 
142ba07
c85385e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142ba07
c85385e
142ba07
c85385e
 
 
 
 
 
 
 
 
 
142ba07
c85385e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142ba07
 
 
 
c85385e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
from openai import OpenAI
import gradio as gr
import os
 
client = OpenAI(api_key=os.environ['openAIToken'])
assistantId = os.environ['assistantId']

def createThread():
  return client.beta.threads.create()

def addMessage(text, threadId):
  return client.beta.threads.messages.create(
    thread_id=threadId,
    role="user",
    content=text
  )

list_of_suggestions = []
latest_suggestions = ""

def handle_suggestions(string_of_suggestions):
  local_message = None
  parts = string_of_suggestions.split('#s#')
  list_of_suggestions = [x.strip('"') for x in parts[0].strip('][').split('", ') if x.strip('"')]
  print(list_of_suggestions)
  if len(parts) > 1:
    local_message = parts[1]
  return list_of_suggestions, local_message

def create_suggestions_list(suggestions):
  update_show = [gr.update(visible=True, value=w) for w in suggestions]
  update_hide = [gr.update(visible=False, value="") for _ in range(6-len(suggestions))]
  return update_show + update_hide

btn_list = []
list_of_suggestions = ""

CSS ="""
.contain { display: flex; flex-direction: column; }
.gradio-container { height: 100vh !important; }
#chatbot { flex-grow: 1; overflow: auto;}
"""

with gr.Blocks(css=CSS, fill_height=True) as demo:
  streaming_thread = createThread()
  chatbot = gr.Chatbot(label="Facility managment bot", elem_id="chatbot") #just to fit the notebook
  with gr.Row():
      for i in range(6):
          btn = gr.Button(visible=False)
          btn_list.append(btn)
  msg = gr.Textbox(label="Answer")
  with gr.Row():
    clear_btn = gr.ClearButton(chatbot)
    btn = gr.Button("Submit")


  def user(user_message, history):
      return "", history + [[user_message, None]]

  def respond(chat_history):
    print("Responding")
    global btn_list
    message = chat_history[-1][0]
    threadId = streaming_thread.id
    chat_history[-1][1] = ""
    addMessage(message, threadId)
    global list_of_suggestions
    list_of_suggestions = []
    string_of_suggestions = ""
    is_loading_suggestions = False
    is_it_first_response = True
    with client.beta.threads.runs.stream(
      thread_id=threadId,
      assistant_id=assistantId,
    ) as stream:
      for text in stream.text_deltas:
        print(text, end="")
        local_message = None
        if "[" in text and is_it_first_response:
          is_loading_suggestions = True

        is_it_first_response = False

        if is_loading_suggestions != True:
          local_message = text
        else:
          string_of_suggestions = string_of_suggestions + text
          if "#s#" in string_of_suggestions:
            is_loading_suggestions = False
            list_of_suggestions, local_message = handle_suggestions(string_of_suggestions)
        if local_message is not None:
          chat_history[-1][1] += local_message 
          yield {chatbot: chat_history}

      stream.until_done()

  def update_suggestions():
    global list_of_suggestions
    btn_list = create_suggestions_list(list_of_suggestions)
    return btn_list
  
  def hide_suggestions(): 
    return [gr.update(visible=False, value="") for _ in range(6)]

  def disable_msg():
    message_box = gr.Textbox(value=None, interactive=False)
    return message_box
  
  def enable_msg():
    message_box = gr.Textbox(value=None, interactive=True)
    return message_box

  add_user_message_flow = [user, [msg,chatbot],  [msg,chatbot]]
  chat_response_flow = [respond, [chatbot], [chatbot]]
  update_suggestions_flow = [update_suggestions, None, btn_list]
  hide_suggestions_flow = [hide_suggestions, None, btn_list]
  disable_msg_flow = [disable_msg, None, msg] 
  enable_msg_flow = [enable_msg, None, msg]

  btn.click(*add_user_message_flow).then(*hide_suggestions_flow).then(*disable_msg_flow).then(*chat_response_flow).then(*update_suggestions_flow).then(*enable_msg_flow)
  msg.submit(*add_user_message_flow).then(*hide_suggestions_flow).then(*disable_msg_flow).then(*chat_response_flow).then(*update_suggestions_flow).then(*enable_msg_flow)
  for sug_btn in btn_list:
    add_suggestion_message_flow = [user, [sug_btn, chatbot], [msg, chatbot]]
    sug_btn.click(*add_suggestion_message_flow).then(*hide_suggestions_flow).then(*disable_msg_flow).then(*chat_response_flow).then(*update_suggestions_flow).then(*enable_msg_flow)



if __name__ == "__main__":
  demo.launch()
  gr.close_all()