oleksandrburlakov commited on
Commit
ff2c5a2
·
1 Parent(s): 62c1c78

added more logs and inital message prompt

Browse files
Files changed (1) hide show
  1. app.py +38 -30
app.py CHANGED
@@ -4,6 +4,11 @@ import os
4
 
5
  client = OpenAI(api_key=os.environ['openAIToken'])
6
  assistantId = os.environ['assistantId']
 
 
 
 
 
7
 
8
  def createThread(sessionStorage):
9
  streaming_thread = client.beta.threads.create()
@@ -11,6 +16,7 @@ def createThread(sessionStorage):
11
  return sessionStorage
12
 
13
  def addMessage(text, threadId):
 
14
  return client.beta.threads.messages.create(
15
  thread_id=threadId,
16
  role="user",
@@ -24,7 +30,7 @@ def handle_suggestions(string_of_suggestions):
24
  local_message = None
25
  parts = string_of_suggestions.split('#s#')
26
  list_of_suggestions = [x.strip('"') for x in parts[0].strip('][').split('", ') if x.strip('"')]
27
- print(list_of_suggestions)
28
  if len(parts) > 1:
29
  local_message = parts[1]
30
  return list_of_suggestions, local_message
@@ -45,7 +51,7 @@ CSS ="""
45
 
46
  def initiate_chatting(storage):
47
  threadId = storage[0]
48
- addMessage("Hi", threadId)
49
  global list_of_suggestions
50
  list_of_suggestions = []
51
  string_of_suggestions = ""
@@ -57,7 +63,6 @@ def initiate_chatting(storage):
57
  assistant_id=assistantId,
58
  ) as stream:
59
  for text in stream.text_deltas:
60
- print(text, end="")
61
  local_message = None
62
  if "[" in text and is_it_first_response:
63
  is_loading_suggestions = True
@@ -80,7 +85,6 @@ def initiate_chatting(storage):
80
  def createDemo():
81
  with gr.Blocks(css=CSS, fill_height=True) as demo:
82
  storage = gr.State([])
83
- inital_response_message = ''
84
  chatbot = gr.Chatbot(label="Facility managment bot", elem_id="chatbot")
85
  with gr.Row():
86
  for i in range(6):
@@ -96,11 +100,10 @@ def createDemo():
96
  return "", history + [[user_message, None]]
97
 
98
  def respond(chat_history, storage):
99
- print("Responding")
100
  global btn_list
101
  message = chat_history[-1][0]
102
  threadId = storage[0]
103
- print("THREAD_ID:", threadId)
104
  chat_history[-1][1] = ""
105
  addMessage(message, threadId)
106
  global list_of_suggestions
@@ -108,30 +111,35 @@ def createDemo():
108
  string_of_suggestions = ""
109
  is_loading_suggestions = False
110
  is_it_first_response = True
111
- with client.beta.threads.runs.stream(
112
- thread_id=threadId,
113
- assistant_id=assistantId,
114
- ) as stream:
115
- for text in stream.text_deltas:
116
- print(text, end="")
117
- local_message = None
118
- if "[" in text and is_it_first_response:
119
- is_loading_suggestions = True
120
-
121
- is_it_first_response = False
122
-
123
- if is_loading_suggestions != True:
124
- local_message = text
125
- else:
126
- string_of_suggestions = string_of_suggestions + text
127
- if "#s#" in string_of_suggestions:
128
- is_loading_suggestions = False
129
- list_of_suggestions, local_message = handle_suggestions(string_of_suggestions)
130
- if local_message is not None:
131
- chat_history[-1][1] += local_message
132
- yield {chatbot: chat_history}
133
-
134
- stream.until_done()
 
 
 
 
 
135
 
136
  def update_suggestions():
137
  global list_of_suggestions
 
4
 
5
  client = OpenAI(api_key=os.environ['openAIToken'])
6
  assistantId = os.environ['assistantId']
7
+ initial_message = os.environ['initialMessage']
8
+
9
+ # client = OpenAI(api_key="sk-proj-4GPZmEnOFsZx0fZeQ9B3T3BlbkFJkXHmGCuW0rGmK0RAavtY")
10
+ # assistantId = "asst_Rce5MptxfyEwp4NjImSDvXGD"
11
+ # initial_message = "Hello. Let's start defining new facility. Also provide options."
12
 
13
  def createThread(sessionStorage):
14
  streaming_thread = client.beta.threads.create()
 
16
  return sessionStorage
17
 
18
  def addMessage(text, threadId):
19
+ print("User message: ", text)
20
  return client.beta.threads.messages.create(
21
  thread_id=threadId,
22
  role="user",
 
30
  local_message = None
31
  parts = string_of_suggestions.split('#s#')
32
  list_of_suggestions = [x.strip('"') for x in parts[0].strip('][').split('", ') if x.strip('"')]
33
+ list_of_suggestions = [ x for x in list_of_suggestions if x]
34
  if len(parts) > 1:
35
  local_message = parts[1]
36
  return list_of_suggestions, local_message
 
51
 
52
  def initiate_chatting(storage):
53
  threadId = storage[0]
54
+ addMessage(initial_message, threadId)
55
  global list_of_suggestions
56
  list_of_suggestions = []
57
  string_of_suggestions = ""
 
63
  assistant_id=assistantId,
64
  ) as stream:
65
  for text in stream.text_deltas:
 
66
  local_message = None
67
  if "[" in text and is_it_first_response:
68
  is_loading_suggestions = True
 
85
  def createDemo():
86
  with gr.Blocks(css=CSS, fill_height=True) as demo:
87
  storage = gr.State([])
 
88
  chatbot = gr.Chatbot(label="Facility managment bot", elem_id="chatbot")
89
  with gr.Row():
90
  for i in range(6):
 
100
  return "", history + [[user_message, None]]
101
 
102
  def respond(chat_history, storage):
 
103
  global btn_list
104
  message = chat_history[-1][0]
105
  threadId = storage[0]
106
+ print("Responding for threadId: ", threadId)
107
  chat_history[-1][1] = ""
108
  addMessage(message, threadId)
109
  global list_of_suggestions
 
111
  string_of_suggestions = ""
112
  is_loading_suggestions = False
113
  is_it_first_response = True
114
+ try:
115
+ with client.beta.threads.runs.stream(
116
+ thread_id=threadId,
117
+ assistant_id=assistantId,
118
+ ) as stream:
119
+ for text in stream.text_deltas:
120
+ print(text, end="")
121
+ local_message = None
122
+ if "[" in text and is_it_first_response:
123
+ is_loading_suggestions = True
124
+
125
+ is_it_first_response = False
126
+
127
+ if is_loading_suggestions != True:
128
+ local_message = text
129
+ else:
130
+ string_of_suggestions = string_of_suggestions + text
131
+ if "#s#" in string_of_suggestions:
132
+ is_loading_suggestions = False
133
+ list_of_suggestions, local_message = handle_suggestions(string_of_suggestions)
134
+ if local_message is not None:
135
+ chat_history[-1][1] += local_message
136
+ yield {chatbot: chat_history}
137
+ stream.until_done()
138
+ print("")
139
+ except:
140
+ list_of_suggestions = []
141
+ chat_history[-1][1] = "Error occured during processing your message. Please try again"
142
+ yield {chatbot: chat_history}
143
 
144
  def update_suggestions():
145
  global list_of_suggestions