andriydovgal commited on
Commit
8c31cdd
·
verified ·
1 Parent(s): 760de1f

Upload folder using huggingface_hub

Browse files
.github/workflows/update_space.yml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Run Python script
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+
8
+ jobs:
9
+ build:
10
+ runs-on: ubuntu-latest
11
+
12
+ steps:
13
+ - name: Checkout
14
+ uses: actions/checkout@v2
15
+
16
+ - name: Set up Python
17
+ uses: actions/setup-python@v2
18
+ with:
19
+ python-version: '3.9'
20
+
21
+ - name: Install Gradio
22
+ run: python -m pip install gradio
23
+
24
+ - name: Log in to Hugging Face
25
+ run: python -c 'import huggingface_hub; huggingface_hub.login(token="${{ secrets.hf_token }}")'
26
+
27
+ - name: Deploy to Spaces
28
+ run: gradio deploy
README.md CHANGED
@@ -1,12 +1,6 @@
1
  ---
2
- title: Board Of Advisors Assistant
3
- emoji: 🌍
4
- colorFrom: gray
5
- colorTo: blue
6
- sdk: gradio
7
- sdk_version: 4.44.1
8
  app_file: app.py
9
- pinned: false
 
10
  ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: Board_of_Advisors_Assistant
 
 
 
 
 
3
  app_file: app.py
4
+ sdk: gradio
5
+ sdk_version: 3.35.2
6
  ---
 
 
app.py ADDED
@@ -0,0 +1,178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from openai import OpenAI
2
+ import gradio as gr
3
+ import json
4
+ from bot_actions import functions_dictionary
5
+ import os
6
+
7
+ CSS ="""
8
+ .contain { display: flex; flex-direction: column; }
9
+ .svelte-vt1mxs div:first-child { flex-grow: 1; overflow: auto;}
10
+ #chatbot { flex-grow: 1; overflow: auto;}
11
+ footer {display: none !important;}
12
+ .app.svelte-182fdeq.svelte-182fdeq {
13
+ max-width: 100vw !important;
14
+ }
15
+ #main_container {
16
+ height: 95vh;
17
+ }
18
+ #markup_container {
19
+ height: 100%;
20
+ overflow:auto;
21
+ }
22
+ """
23
+
24
+ openAIToken = os.environ['openAIToken']
25
+ assistantId = os.environ['assistantId']
26
+ initial_message = os.environ['initialMessage']
27
+
28
+ client = OpenAI(api_key=openAIToken)
29
+
30
+ def handle_requires_action(data):
31
+ actions_results = []
32
+ for tool in data.required_action.submit_tool_outputs.tool_calls:
33
+ function_name = tool.function.name
34
+ function_args = json.loads(tool.function.arguments)
35
+ print(function_name)
36
+ print(function_args)
37
+ try:
38
+ result = functions_dictionary[tool.function.name](**function_args)
39
+ print("Function result:", result)
40
+ actions_results.append({"tool_output" : {"tool_call_id": tool.id, "output": result["message"]}})
41
+ except Exception as e:
42
+ print(e)
43
+
44
+
45
+ # Submit all tool_outputs at the same time
46
+ return actions_results
47
+
48
+
49
+ def create_thread_openai(sessionStorage):
50
+ streaming_thread = client.beta.threads.create()
51
+ sessionStorage["threadId"] = streaming_thread.id
52
+ return sessionStorage
53
+
54
+ def add_message_to_openai(text, threadId):
55
+ print("User message: ", text)
56
+ return client.beta.threads.messages.create(
57
+ thread_id=threadId,
58
+ role="user",
59
+ content=text
60
+ )
61
+
62
+
63
+ def process_text_chunk(text, storage):
64
+ print(text, end="", flush=True)
65
+ local_message = None
66
+ accumulative_string = storage["accumulative_string"] + text
67
+ local_message = accumulative_string
68
+ return local_message, storage
69
+
70
+ def handle_events(threadId, chat_history, storage):
71
+ storage.update({
72
+ "accumulative_string" : "",
73
+ "markup_string": "",
74
+ })
75
+ try:
76
+ with client.beta.threads.runs.stream(
77
+ thread_id=threadId,
78
+ assistant_id=assistantId
79
+ ) as stream:
80
+ for event in stream:
81
+ if event.event == "thread.message.delta" and event.data.delta.content:
82
+ text = event.data.delta.content[0].text.value
83
+ local_message, storage = process_text_chunk(text, storage)
84
+ if local_message is not None:
85
+ chat_history[-1][1] += local_message
86
+ yield [chat_history, storage]
87
+ if event.event == 'thread.run.requires_action':
88
+ result = handle_requires_action(event.data)
89
+ tool_outputs = [x["tool_output"] for x in result]
90
+ with client.beta.threads.runs.submit_tool_outputs_stream(
91
+ thread_id=stream.current_run.thread_id,
92
+ run_id=event.data.id,
93
+ tool_outputs=tool_outputs,
94
+ ) as action_stream:
95
+ for text in action_stream.text_deltas:
96
+ local_message, storage = process_text_chunk(text, storage)
97
+ if local_message is not None:
98
+ chat_history[-1][1] += local_message
99
+ yield [chat_history, storage]
100
+ action_stream.close()
101
+ stream.until_done()
102
+ print("")
103
+ return [chat_history, storage]
104
+ except Exception as e:
105
+ print(e)
106
+ chat_history[-1][1] = "Error occured during processing your message. Please try again"
107
+ yield [chat_history, storage]
108
+
109
+ def initiate_chatting(chat_history, storage):
110
+ threadId = storage["threadId"]
111
+ chat_history = [[None, ""]]
112
+ add_message_to_openai(initial_message, threadId)
113
+ for response in handle_events(threadId, chat_history, storage):
114
+ yield response
115
+
116
+ def respond_on_user_msg(chat_history, storage):
117
+ message = chat_history[-1][0]
118
+ threadId = storage["threadId"]
119
+ print("Responding for threadId: ", threadId)
120
+ chat_history[-1][1] = ""
121
+ add_message_to_openai(message, threadId)
122
+ for response in handle_events(threadId, chat_history, storage):
123
+ yield response
124
+
125
+ def create_tabs():
126
+
127
+ pass
128
+
129
+ def create_login_tab():
130
+ with gr.Blocks(fill_height=True) as login:
131
+ with gr.Row():
132
+ login_input = gr.Textbox(label="Login")
133
+ with gr.Row():
134
+ password_input = gr.Textbox(label="Password", type="password")
135
+ return login
136
+
137
+ def create_chat_tab():
138
+ with gr.Blocks(css=CSS, fill_height=True) as demo:
139
+ storage = gr.State({"accumulative_string": ""})
140
+ btn_list = []
141
+ with gr.Row(elem_id="main_container"):
142
+ with gr.Column(scale=4):
143
+ chatbot = gr.Chatbot(label="Facility managment bot", line_breaks=False, height=300, show_label=False, show_share_button=False, elem_id="chatbot")
144
+ with gr.Row():
145
+ for i in range(6):
146
+ btn = gr.Button(visible=False, size="sm")
147
+ btn_list.append(btn)
148
+ msg = gr.Textbox(label="Answer", interactive=False)
149
+
150
+ def user(user_message, history):
151
+ return "", history + [[user_message, None]]
152
+
153
+ def disable_msg():
154
+ message_box = gr.Textbox(value=None, interactive=False)
155
+ return message_box
156
+
157
+ def enable_msg():
158
+ message_box = gr.Textbox(value=None, interactive=True)
159
+ return message_box
160
+
161
+ add_user_message_flow = [user, [msg,chatbot], [msg,chatbot]]
162
+ chat_response_flow = [respond_on_user_msg, [chatbot, storage], [chatbot, storage]]
163
+ disable_msg_flow = [disable_msg, None, msg]
164
+ enable_msg_flow = [enable_msg, None, msg]
165
+
166
+ msg.submit(*add_user_message_flow
167
+ ).then(*disable_msg_flow
168
+ ).then(*chat_response_flow
169
+ ).then(*enable_msg_flow)
170
+
171
+ demo.load(create_thread_openai, inputs=storage, outputs=storage
172
+ ).then(initiate_chatting, inputs=[chatbot, storage], outputs=[chatbot, storage]
173
+ ).then(*enable_msg_flow)
174
+ return demo
175
+
176
+ demo = create_chat_tab()
177
+ if __name__ == "__main__":
178
+ demo.launch(auth=(os.environ['login'], os.environ['password']), auth_message="Enter your Username and Password")
app.pyZone.Identifier ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [ZoneTransfer]
2
+ ZoneId=3
3
+ ReferrerUrl=https://huggingface.co/spaces/oleksandrburlakov/boardOfAdvisors/tree/main
4
+ HostUrl=https://huggingface.co/spaces/oleksandrburlakov/boardOfAdvisors/resolve/main/app.py?download=true
bot_actions.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ def method_example(arg_json):
2
+ pass
3
+
4
+ functions_dictionary = {
5
+ "method_example": method_example,
6
+ }
bot_actions.pyZone.Identifier ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [ZoneTransfer]
2
+ ZoneId=3
3
+ ReferrerUrl=https://huggingface.co/spaces/oleksandrburlakov/boardOfAdvisors/tree/main
4
+ HostUrl=https://huggingface.co/spaces/oleksandrburlakov/boardOfAdvisors/resolve/main/bot_actions.py?download=true
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ huggingface_hub==0.22.2
2
+ openai==1.28.0