andriydovgal commited on
Commit
c25e3b1
·
verified ·
1 Parent(s): e5c2637

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -157
app.py CHANGED
@@ -4,163 +4,14 @@ import json
4
  from bot_actions import functions_dictionary
5
  import os
6
 
7
- CSS ="""
8
- .contain { display: flex; flex-direction: column; }
9
- .svelte-vt1mxs div:first-child { flex-grow: 1; overflow: auto;}
10
- #chatbot { flex-grow: 1; overflow: auto;}
11
- footer {display: none !important;}
12
- .app.svelte-182fdeq.svelte-182fdeq {
13
- max-width: 100vw !important;
14
- }
15
- #main_container {
16
- height: 95vh;
17
- }
18
- #markup_container {
19
- height: 100%;
20
- overflow:auto;
21
- }
22
- """
23
 
24
- openAIToken = os.environ['openAIToken']
25
- assistantId = os.environ['assistantId']
26
- initial_message = os.environ['initialMessage']
27
 
28
- client = OpenAI(api_key=openAIToken)
 
 
 
 
29
 
30
- def handle_requires_action(data):
31
- actions_results = []
32
- for tool in data.required_action.submit_tool_outputs.tool_calls:
33
- function_name = tool.function.name
34
- function_args = json.loads(tool.function.arguments)
35
- print(function_name)
36
- print(function_args)
37
- try:
38
- result = functions_dictionary[tool.function.name](**function_args)
39
- print("Function result:", result)
40
- actions_results.append({"tool_output" : {"tool_call_id": tool.id, "output": result["message"]}})
41
- except Exception as e:
42
- print(e)
43
-
44
-
45
- # Submit all tool_outputs at the same time
46
- return actions_results
47
-
48
-
49
- def create_thread_openai(sessionStorage):
50
- streaming_thread = client.beta.threads.create()
51
- sessionStorage["threadId"] = streaming_thread.id
52
- return sessionStorage
53
-
54
- def add_message_to_openai(text, threadId):
55
- print("User message: ", text)
56
- return client.beta.threads.messages.create(
57
- thread_id=threadId,
58
- role="user",
59
- content=text
60
- )
61
-
62
-
63
- def process_text_chunk(text, storage):
64
- print(text, end="", flush=True)
65
- local_message = None
66
- accumulative_string = storage["accumulative_string"] + text
67
- local_message = accumulative_string
68
- return local_message, storage
69
-
70
- def handle_events(threadId, chat_history, storage):
71
- storage.update({
72
- "accumulative_string" : "",
73
- "markup_string": "",
74
- })
75
- try:
76
- with client.beta.threads.runs.stream(
77
- thread_id=threadId,
78
- assistant_id=assistantId
79
- ) as stream:
80
- for event in stream:
81
- if event.event == "thread.message.delta" and event.data.delta.content:
82
- text = event.data.delta.content[0].text.value
83
- local_message, storage = process_text_chunk(text, storage)
84
- if local_message is not None:
85
- chat_history[-1][1] += local_message
86
- yield [chat_history, storage]
87
- if event.event == 'thread.run.requires_action':
88
- result = handle_requires_action(event.data)
89
- tool_outputs = [x["tool_output"] for x in result]
90
- with client.beta.threads.runs.submit_tool_outputs_stream(
91
- thread_id=stream.current_run.thread_id,
92
- run_id=event.data.id,
93
- tool_outputs=tool_outputs,
94
- ) as action_stream:
95
- for text in action_stream.text_deltas:
96
- local_message, storage = process_text_chunk(text, storage)
97
- if local_message is not None:
98
- chat_history[-1][1] += local_message
99
- yield [chat_history, storage]
100
- action_stream.close()
101
- stream.until_done()
102
- print("")
103
- return [chat_history, storage]
104
- except Exception as e:
105
- print(e)
106
- chat_history[-1][1] = "Error occured during processing your message. Please try again"
107
- yield [chat_history, storage]
108
-
109
- def initiate_chatting(chat_history, storage):
110
- threadId = storage["threadId"]
111
- chat_history = [[None, ""]]
112
- add_message_to_openai(initial_message, threadId)
113
- for response in handle_events(threadId, chat_history, storage):
114
- yield response
115
-
116
- def respond_on_user_msg(chat_history, storage):
117
- message = chat_history[-1][0]
118
- threadId = storage["threadId"]
119
- print("Responding for threadId: ", threadId)
120
- chat_history[-1][1] = ""
121
- add_message_to_openai(message, threadId)
122
- for response in handle_events(threadId, chat_history, storage):
123
- yield response
124
-
125
- def create_chat_tab():
126
- msg = gr.Textbox(label="Answer")
127
- with gr.Blocks(css=CSS, fill_height=True) as chat_view:
128
- storage = gr.State({"accumulative_string": ""})
129
- with gr.Row(elem_id="main_container"):
130
- with gr.Column(scale=4):
131
- chatbot = gr.Chatbot(label="Board of Advisors Assistant", line_breaks=False, height=300, show_label=False, show_share_button=False, elem_id="chatbot")
132
- examples = gr.Examples(examples=["I need someone that can help me with real estate in Texas",
133
- "I'm looking for help with a payment system for my business",
134
- "I need help to develop my leadership skills"], inputs=msg)
135
- msg.render()
136
-
137
- def user(user_message, history):
138
- return "", history + [[user_message, None]]
139
-
140
- def disable_msg():
141
- message_box = gr.Textbox(value=None, interactive=False)
142
- return message_box
143
-
144
- def enable_msg():
145
- message_box = gr.Textbox(value=None, interactive=True)
146
- return message_box
147
-
148
- add_user_message_flow = [user, [msg,chatbot], [msg,chatbot]]
149
- chat_response_flow = [respond_on_user_msg, [chatbot, storage], [chatbot, storage]]
150
- disable_msg_flow = [disable_msg, None, msg]
151
- enable_msg_flow = [enable_msg, None, msg]
152
-
153
- msg.submit(*add_user_message_flow
154
- ).then(*disable_msg_flow
155
- ).then(*chat_response_flow
156
- ).then(*enable_msg_flow)
157
-
158
- chat_view.load(*disable_msg_flow
159
- ).then(create_thread_openai, inputs=storage, outputs=storage
160
- ).then(initiate_chatting, inputs=[chatbot, storage], outputs=[chatbot, storage]
161
- ).then(*enable_msg_flow)
162
- return chat_view
163
-
164
- if __name__ == "__main__":
165
- chat_view = create_chat_tab()
166
- chat_view.launch(auth=(os.environ['login'], os.environ['password']), auth_message="Enter your Username and Password")
 
4
  from bot_actions import functions_dictionary
5
  import os
6
 
7
+ def chatbot(input_text):
8
+ return input_text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
 
 
 
10
 
11
+ iface = gr.Interface(fn=chatbot,
12
+ inputs=gr.components.Textbox(lines=5, label="Enter your text", show_copy_button=True),
13
+ outputs=gr.components.Textbox(lines=5, label="Answer", show_copy_button=True),
14
+ examples=["What are the different types of 'work product' lifespan? Provide detailed answer", "Хто може бути наставником?", "Question3", "Question4", "Question5"],
15
+ title="PMO Documents AI Chatbot")
16
 
17
+ iface.launch()