Spaces:
Sleeping
Sleeping
Commit
·
ca36ef4
1
Parent(s):
ece0caf
added dell-e into chat
Browse files- __pycache__/bot_actions.cpython-38.pyc +0 -0
- app.py +17 -12
- bot_actions.py +25 -2
- temp.txt +0 -0
__pycache__/bot_actions.cpython-38.pyc
CHANGED
|
Binary files a/__pycache__/bot_actions.cpython-38.pyc and b/__pycache__/bot_actions.cpython-38.pyc differ
|
|
|
app.py
CHANGED
|
@@ -10,26 +10,29 @@ CSS ="""
|
|
| 10 |
#chatbot { flex-grow: 1; overflow: auto;}
|
| 11 |
"""
|
| 12 |
|
| 13 |
-
|
| 14 |
assistantId = os.environ['assistantId']
|
| 15 |
initial_message = os.environ['initialMessage']
|
| 16 |
|
|
|
|
|
|
|
| 17 |
def handle_requires_action(data):
|
| 18 |
-
|
| 19 |
for tool in data.required_action.submit_tool_outputs.tool_calls:
|
| 20 |
function_name = tool.function.name
|
| 21 |
function_args = json.loads(tool.function.arguments)
|
| 22 |
print(function_name)
|
| 23 |
print(function_args)
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
|
|
|
| 30 |
|
| 31 |
# Submit all tool_outputs at the same time
|
| 32 |
-
return
|
| 33 |
|
| 34 |
|
| 35 |
def create_thread_openai(sessionStorage):
|
|
@@ -96,17 +99,19 @@ def handle_events(threadId, chat_history, storage):
|
|
| 96 |
chat_history[-1][1] += local_message
|
| 97 |
yield [ chat_history, storage]
|
| 98 |
if event.event == 'thread.run.requires_action':
|
| 99 |
-
|
|
|
|
| 100 |
with client.beta.threads.runs.submit_tool_outputs_stream(
|
| 101 |
thread_id=stream.current_run.thread_id,
|
| 102 |
run_id=event.data.id,
|
| 103 |
tool_outputs=tool_outputs,
|
| 104 |
-
) as
|
| 105 |
-
for text in
|
| 106 |
local_message, list_of_suggestions, string_of_suggestions, is_loading_suggestions = process_text_chunk(text, list_of_suggestions, string_of_suggestions, is_loading_suggestions)
|
| 107 |
if local_message is not None:
|
| 108 |
chat_history[-1][1] += local_message
|
| 109 |
yield [chat_history, storage]
|
|
|
|
| 110 |
stream.until_done()
|
| 111 |
print("")
|
| 112 |
storage["list_of_suggestions"] = list_of_suggestions
|
|
|
|
| 10 |
#chatbot { flex-grow: 1; overflow: auto;}
|
| 11 |
"""
|
| 12 |
|
| 13 |
+
openAIToken = os.environ['openAIToken']
|
| 14 |
assistantId = os.environ['assistantId']
|
| 15 |
initial_message = os.environ['initialMessage']
|
| 16 |
|
| 17 |
+
client = OpenAI(api_key=openAIToken)
|
| 18 |
+
|
| 19 |
def handle_requires_action(data):
|
| 20 |
+
actions_results = []
|
| 21 |
for tool in data.required_action.submit_tool_outputs.tool_calls:
|
| 22 |
function_name = tool.function.name
|
| 23 |
function_args = json.loads(tool.function.arguments)
|
| 24 |
print(function_name)
|
| 25 |
print(function_args)
|
| 26 |
+
try:
|
| 27 |
+
result = functions_dictionary[tool.function.name](**function_args)
|
| 28 |
+
print("Function result:", result)
|
| 29 |
+
actions_results.append({"tool_output" : {"tool_call_id": tool.id, "output": result["message"]}})
|
| 30 |
+
except Exception as e:
|
| 31 |
+
print(e)
|
| 32 |
+
|
| 33 |
|
| 34 |
# Submit all tool_outputs at the same time
|
| 35 |
+
return actions_results
|
| 36 |
|
| 37 |
|
| 38 |
def create_thread_openai(sessionStorage):
|
|
|
|
| 99 |
chat_history[-1][1] += local_message
|
| 100 |
yield [ chat_history, storage]
|
| 101 |
if event.event == 'thread.run.requires_action':
|
| 102 |
+
result = handle_requires_action(event.data)
|
| 103 |
+
tool_outputs = [x["tool_output"] for x in result]
|
| 104 |
with client.beta.threads.runs.submit_tool_outputs_stream(
|
| 105 |
thread_id=stream.current_run.thread_id,
|
| 106 |
run_id=event.data.id,
|
| 107 |
tool_outputs=tool_outputs,
|
| 108 |
+
) as action_stream:
|
| 109 |
+
for text in action_stream.text_deltas:
|
| 110 |
local_message, list_of_suggestions, string_of_suggestions, is_loading_suggestions = process_text_chunk(text, list_of_suggestions, string_of_suggestions, is_loading_suggestions)
|
| 111 |
if local_message is not None:
|
| 112 |
chat_history[-1][1] += local_message
|
| 113 |
yield [chat_history, storage]
|
| 114 |
+
action_stream.close()
|
| 115 |
stream.until_done()
|
| 116 |
print("")
|
| 117 |
storage["list_of_suggestions"] = list_of_suggestions
|
bot_actions.py
CHANGED
|
@@ -6,8 +6,31 @@ def save_record(arg_json):
|
|
| 6 |
request_url = os.environ['facilityURL'] + "/facilities"
|
| 7 |
r = requests.post(request_url, json={'pk': round(time.time()), 'json': arg_json})
|
| 8 |
print(r.status_code, r.reason)
|
| 9 |
-
return "Done"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
functions_dictionary = {
|
| 12 |
-
"save_record": save_record
|
|
|
|
| 13 |
}
|
|
|
|
| 6 |
request_url = os.environ['facilityURL'] + "/facilities"
|
| 7 |
r = requests.post(request_url, json={'pk': round(time.time()), 'json': arg_json})
|
| 8 |
print(r.status_code, r.reason)
|
| 9 |
+
return { "message": "Done" }
|
| 10 |
+
|
| 11 |
+
instruction = '''black-and-white building plan, 2d, flat, house-building plan, top view. Rooms according to locations.
|
| 12 |
+
Write room-id in the center of room. Write room and floor size inside of room/floor at the top.
|
| 13 |
+
Write floor description outside above the top border with margin 5px. Sizes must be proportional. Information should be based on next json: '''
|
| 14 |
+
|
| 15 |
+
def generate_image(json_prompt):
|
| 16 |
+
prompt = 'black and white building plan, 2d, top view:' + json_prompt
|
| 17 |
+
openAIToken = os.environ['openAIToken']
|
| 18 |
+
client = OpenAI(api_key=openAIToken)
|
| 19 |
+
try:
|
| 20 |
+
response = client.images.generate(
|
| 21 |
+
n=1,
|
| 22 |
+
prompt=prompt,
|
| 23 |
+
model="dall-e-3",
|
| 24 |
+
quality='hd',
|
| 25 |
+
style='natural',
|
| 26 |
+
)
|
| 27 |
+
done_msg ="Done. Url:" + response.data[0].url
|
| 28 |
+
return { "message": done_msg}
|
| 29 |
+
except Exception as e:
|
| 30 |
+
print(e)
|
| 31 |
+
|
| 32 |
|
| 33 |
functions_dictionary = {
|
| 34 |
+
"save_record": save_record,
|
| 35 |
+
"generate_image": generate_image
|
| 36 |
}
|
temp.txt
ADDED
|
File without changes
|