andriydovgal commited on
Commit
528e8ec
·
verified ·
1 Parent(s): f47f108

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +187 -177
app.py CHANGED
@@ -1,178 +1,188 @@
1
- from openai import OpenAI
2
- import gradio as gr
3
- import json
4
- from bot_actions import functions_dictionary
5
- import os
6
-
7
- CSS ="""
8
- .contain { display: flex; flex-direction: column; }
9
- .svelte-vt1mxs div:first-child { flex-grow: 1; overflow: auto;}
10
- #chatbot { flex-grow: 1; overflow: auto;}
11
- footer {display: none !important;}
12
- .app.svelte-182fdeq.svelte-182fdeq {
13
- max-width: 100vw !important;
14
- }
15
- #main_container {
16
- height: 95vh;
17
- }
18
- #markup_container {
19
- height: 100%;
20
- overflow:auto;
21
- }
22
- """
23
-
24
- openAIToken = os.environ['openAIToken']
25
- assistantId = os.environ['assistantId']
26
- initial_message = os.environ['initialMessage']
27
-
28
- client = OpenAI(api_key=openAIToken)
29
-
30
- def handle_requires_action(data):
31
- actions_results = []
32
- for tool in data.required_action.submit_tool_outputs.tool_calls:
33
- function_name = tool.function.name
34
- function_args = json.loads(tool.function.arguments)
35
- print(function_name)
36
- print(function_args)
37
- try:
38
- result = functions_dictionary[tool.function.name](**function_args)
39
- print("Function result:", result)
40
- actions_results.append({"tool_output" : {"tool_call_id": tool.id, "output": result["message"]}})
41
- except Exception as e:
42
- print(e)
43
-
44
-
45
- # Submit all tool_outputs at the same time
46
- return actions_results
47
-
48
-
49
- def create_thread_openai(sessionStorage):
50
- streaming_thread = client.beta.threads.create()
51
- sessionStorage["threadId"] = streaming_thread.id
52
- return sessionStorage
53
-
54
- def add_message_to_openai(text, threadId):
55
- print("User message: ", text)
56
- return client.beta.threads.messages.create(
57
- thread_id=threadId,
58
- role="user",
59
- content=text
60
- )
61
-
62
-
63
- def process_text_chunk(text, storage):
64
- print(text, end="", flush=True)
65
- local_message = None
66
- accumulative_string = storage["accumulative_string"] + text
67
- local_message = accumulative_string
68
- return local_message, storage
69
-
70
- def handle_events(threadId, chat_history, storage):
71
- storage.update({
72
- "accumulative_string" : "",
73
- "markup_string": "",
74
- })
75
- try:
76
- with client.beta.threads.runs.stream(
77
- thread_id=threadId,
78
- assistant_id=assistantId
79
- ) as stream:
80
- for event in stream:
81
- if event.event == "thread.message.delta" and event.data.delta.content:
82
- text = event.data.delta.content[0].text.value
83
- local_message, storage = process_text_chunk(text, storage)
84
- if local_message is not None:
85
- chat_history[-1][1] += local_message
86
- yield [chat_history, storage]
87
- if event.event == 'thread.run.requires_action':
88
- result = handle_requires_action(event.data)
89
- tool_outputs = [x["tool_output"] for x in result]
90
- with client.beta.threads.runs.submit_tool_outputs_stream(
91
- thread_id=stream.current_run.thread_id,
92
- run_id=event.data.id,
93
- tool_outputs=tool_outputs,
94
- ) as action_stream:
95
- for text in action_stream.text_deltas:
96
- local_message, storage = process_text_chunk(text, storage)
97
- if local_message is not None:
98
- chat_history[-1][1] += local_message
99
- yield [chat_history, storage]
100
- action_stream.close()
101
- stream.until_done()
102
- print("")
103
- return [chat_history, storage]
104
- except Exception as e:
105
- print(e)
106
- chat_history[-1][1] = "Error occured during processing your message. Please try again"
107
- yield [chat_history, storage]
108
-
109
- def initiate_chatting(chat_history, storage):
110
- threadId = storage["threadId"]
111
- chat_history = [[None, ""]]
112
- add_message_to_openai(initial_message, threadId)
113
- for response in handle_events(threadId, chat_history, storage):
114
- yield response
115
-
116
- def respond_on_user_msg(chat_history, storage):
117
- message = chat_history[-1][0]
118
- threadId = storage["threadId"]
119
- print("Responding for threadId: ", threadId)
120
- chat_history[-1][1] = ""
121
- add_message_to_openai(message, threadId)
122
- for response in handle_events(threadId, chat_history, storage):
123
- yield response
124
-
125
- def create_tabs():
126
-
127
- pass
128
-
129
- def create_login_tab():
130
- with gr.Blocks(fill_height=True) as login:
131
- with gr.Row():
132
- login_input = gr.Textbox(label="Login")
133
- with gr.Row():
134
- password_input = gr.Textbox(label="Password", type="password")
135
- return login
136
-
137
- def create_chat_tab():
138
- with gr.Blocks(css=CSS, fill_height=True) as demo:
139
- storage = gr.State({"accumulative_string": ""})
140
- btn_list = []
141
- with gr.Row(elem_id="main_container"):
142
- with gr.Column(scale=4):
143
- chatbot = gr.Chatbot(label="Facility managment bot", line_breaks=False, height=300, show_label=False, show_share_button=False, elem_id="chatbot")
144
- with gr.Row():
145
- for i in range(6):
146
- btn = gr.Button(visible=False, size="sm")
147
- btn_list.append(btn)
148
- msg = gr.Textbox(label="Answer", interactive=False)
149
-
150
- def user(user_message, history):
151
- return "", history + [[user_message, None]]
152
-
153
- def disable_msg():
154
- message_box = gr.Textbox(value=None, interactive=False)
155
- return message_box
156
-
157
- def enable_msg():
158
- message_box = gr.Textbox(value=None, interactive=True)
159
- return message_box
160
-
161
- add_user_message_flow = [user, [msg,chatbot], [msg,chatbot]]
162
- chat_response_flow = [respond_on_user_msg, [chatbot, storage], [chatbot, storage]]
163
- disable_msg_flow = [disable_msg, None, msg]
164
- enable_msg_flow = [enable_msg, None, msg]
165
-
166
- msg.submit(*add_user_message_flow
167
- ).then(*disable_msg_flow
168
- ).then(*chat_response_flow
169
- ).then(*enable_msg_flow)
170
-
171
- demo.load(create_thread_openai, inputs=storage, outputs=storage
172
- ).then(initiate_chatting, inputs=[chatbot, storage], outputs=[chatbot, storage]
173
- ).then(*enable_msg_flow)
174
- return demo
175
-
176
- demo = create_chat_tab()
177
- if __name__ == "__main__":
 
 
 
 
 
 
 
 
 
 
178
  demo.launch(auth=(os.environ['login'], os.environ['password']), auth_message="Enter your Username and Password")
 
1
+ from openai import OpenAI
2
+ import gradio as gr
3
+ import json
4
+ from bot_actions import functions_dictionary
5
+ import os
6
+
7
+ CSS ="""
8
+ .contain { display: flex; flex-direction: column; }
9
+ .svelte-vt1mxs div:first-child { flex-grow: 1; overflow: auto;}
10
+ #chatbot { flex-grow: 1; overflow: auto;}
11
+ footer {display: none !important;}
12
+ .app.svelte-182fdeq.svelte-182fdeq {
13
+ max-width: 100vw !important;
14
+ }
15
+ #main_container {
16
+ height: 95vh;
17
+ }
18
+ #markup_container {
19
+ height: 100%;
20
+ overflow:auto;
21
+ }
22
+ """
23
+
24
+ openAIToken = os.environ['openAIToken']
25
+ assistantId = os.environ['assistantId']
26
+ initial_message = os.environ['initialMessage']
27
+
28
+ client = OpenAI(api_key=openAIToken)
29
+
30
+ def handle_requires_action(data):
31
+ actions_results = []
32
+ for tool in data.required_action.submit_tool_outputs.tool_calls:
33
+ function_name = tool.function.name
34
+ function_args = json.loads(tool.function.arguments)
35
+ print(function_name)
36
+ print(function_args)
37
+ try:
38
+ result = functions_dictionary[tool.function.name](**function_args)
39
+ print("Function result:", result)
40
+ actions_results.append({"tool_output" : {"tool_call_id": tool.id, "output": result["message"]}})
41
+ except Exception as e:
42
+ print(e)
43
+
44
+
45
+ # Submit all tool_outputs at the same time
46
+ return actions_results
47
+
48
+
49
+ def create_thread_openai(sessionStorage):
50
+ streaming_thread = client.beta.threads.create()
51
+ sessionStorage["threadId"] = streaming_thread.id
52
+ return sessionStorage
53
+
54
+ def add_message_to_openai(text, threadId):
55
+ print("User message: ", text)
56
+ return client.beta.threads.messages.create(
57
+ thread_id=threadId,
58
+ role="user",
59
+ content=text
60
+ )
61
+
62
+
63
+ def process_text_chunk(text, storage):
64
+ print(text, end="", flush=True)
65
+ local_message = None
66
+ accumulative_string = storage["accumulative_string"] + text
67
+ local_message = accumulative_string
68
+ return local_message, storage
69
+
70
+ def handle_events(threadId, chat_history, storage):
71
+ storage.update({
72
+ "accumulative_string" : "",
73
+ "markup_string": "",
74
+ })
75
+ try:
76
+ with client.beta.threads.runs.stream(
77
+ thread_id=threadId,
78
+ assistant_id=assistantId
79
+ ) as stream:
80
+ for event in stream:
81
+ if event.event == "thread.message.delta" and event.data.delta.content:
82
+ text = event.data.delta.content[0].text.value
83
+ local_message, storage = process_text_chunk(text, storage)
84
+ if local_message is not None:
85
+ chat_history[-1][1] += local_message
86
+ yield [chat_history, storage]
87
+ if event.event == 'thread.run.requires_action':
88
+ result = handle_requires_action(event.data)
89
+ tool_outputs = [x["tool_output"] for x in result]
90
+ with client.beta.threads.runs.submit_tool_outputs_stream(
91
+ thread_id=stream.current_run.thread_id,
92
+ run_id=event.data.id,
93
+ tool_outputs=tool_outputs,
94
+ ) as action_stream:
95
+ for text in action_stream.text_deltas:
96
+ local_message, storage = process_text_chunk(text, storage)
97
+ if local_message is not None:
98
+ chat_history[-1][1] += local_message
99
+ yield [chat_history, storage]
100
+ action_stream.close()
101
+ stream.until_done()
102
+ print("")
103
+ return [chat_history, storage]
104
+ except Exception as e:
105
+ print(e)
106
+ chat_history[-1][1] = "Error occured during processing your message. Please try again"
107
+ yield [chat_history, storage]
108
+
109
+ def initiate_chatting(chat_history, storage):
110
+ threadId = storage["threadId"]
111
+ chat_history = [[None, ""]]
112
+ add_message_to_openai(initial_message, threadId)
113
+ for response in handle_events(threadId, chat_history, storage):
114
+ yield response
115
+
116
+ def respond_on_user_msg(chat_history, storage):
117
+ message = chat_history[-1][0]
118
+ threadId = storage["threadId"]
119
+ print("Responding for threadId: ", threadId)
120
+ chat_history[-1][1] = ""
121
+ add_message_to_openai(message, threadId)
122
+ for response in handle_events(threadId, chat_history, storage):
123
+ yield response
124
+
125
+ def create_tabs():
126
+
127
+ pass
128
+
129
+ def create_login_tab():
130
+ with gr.Blocks(fill_height=True) as login:
131
+ with gr.Row():
132
+ login_input = gr.Textbox(label="Login")
133
+ with gr.Row():
134
+ password_input = gr.Textbox(label="Password", type="password")
135
+ return login
136
+
137
+ def create_chat_tab():
138
+ with gr.Blocks(css=CSS, fill_height=True) as demo:
139
+ storage = gr.State({"accumulative_string": ""})
140
+ btn_list = []
141
+ with gr.Row(elem_id="main_container"):
142
+ with gr.Column(scale=4):
143
+ chatbot = gr.Chatbot(label="Board of Advisors Assistant", line_breaks=False, height=300, show_label=False, show_share_button=False, elem_id="chatbot")
144
+ with gr.Row():
145
+ for i in range(6):
146
+ btn = gr.Button(visible=False, size="sm")
147
+ btn_list.append(btn)
148
+ msg = gr.Textbox(label="Prompt", interactive=False)
149
+ with gr.Row():
150
+ examples = gr.Examples(
151
+ [
152
+ "I need someone that can help me with real estate in Texas",
153
+ "I'm looking for help with a payment system for my business",
154
+ "I need help to develop my leadership skills"
155
+ ],
156
+ inputs=[textbox],
157
+ label="Example inputs"
158
+ )
159
+
160
+ def user(user_message, history):
161
+ return "", history + [[user_message, None]]
162
+
163
+ def disable_msg():
164
+ message_box = gr.Textbox(value=None, interactive=False)
165
+ return message_box
166
+
167
+ def enable_msg():
168
+ message_box = gr.Textbox(value=None, interactive=True)
169
+ return message_box
170
+
171
+ add_user_message_flow = [user, [msg,chatbot], [msg,chatbot]]
172
+ chat_response_flow = [respond_on_user_msg, [chatbot, storage], [chatbot, storage]]
173
+ disable_msg_flow = [disable_msg, None, msg]
174
+ enable_msg_flow = [enable_msg, None, msg]
175
+
176
+ msg.submit(*add_user_message_flow
177
+ ).then(*disable_msg_flow
178
+ ).then(*chat_response_flow
179
+ ).then(*enable_msg_flow)
180
+
181
+ demo.load(create_thread_openai, inputs=storage, outputs=storage
182
+ ).then(initiate_chatting, inputs=[chatbot, storage], outputs=[chatbot, storage]
183
+ ).then(*enable_msg_flow)
184
+ return demo
185
+
186
+ demo = create_chat_tab()
187
+ if __name__ == "__main__":
188
  demo.launch(auth=(os.environ['login'], os.environ['password']), auth_message="Enter your Username and Password")