Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -62,17 +62,36 @@ def fill_up_placeholders(txt):
|
|
| 62 |
"" if len(placeholders) >= 1 else txt
|
| 63 |
)
|
| 64 |
|
| 65 |
-
def rollback_last(
|
| 66 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
last_user_message = res[idx].pingpongs[-1].ping
|
| 68 |
res[idx].pingpongs = res[idx].pingpongs[:-1]
|
| 69 |
|
| 70 |
-
|
| 71 |
-
last_user_message,
|
| 72 |
-
res[idx].build_uis(),
|
| 73 |
-
str(res),
|
| 74 |
-
gr.update(interactive=False)
|
| 75 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
|
| 77 |
def reset_chat(idx, ld, state):
|
| 78 |
res = [state["ppmanager_type"].from_json(json.dumps(ppm_str)) for ppm_str in ld]
|
|
@@ -112,7 +131,7 @@ async def chat_stream(
|
|
| 112 |
}
|
| 113 |
):
|
| 114 |
ppm.append_pong(result)
|
| 115 |
-
yield prompt, ppm.build_uis(), str(res)
|
| 116 |
|
| 117 |
def channel_num(btn_title):
|
| 118 |
choice = 0
|
|
@@ -281,33 +300,47 @@ with gr.Blocks(css=MODEL_SELECTION_CSS, theme='gradio/soft') as demo:
|
|
| 281 |
chat_stream,
|
| 282 |
[idx, local_data, instruction_txtbox, chat_state,
|
| 283 |
global_context, res_temp, res_topk, res_rpen, res_mnts, res_sample, ctx_num_lconv],
|
| 284 |
-
[context_inspector, chatbot, local_data]
|
| 285 |
)
|
| 286 |
|
| 287 |
-
regen_event1 = regenerate.click(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 288 |
rollback_last,
|
| 289 |
-
[idx, local_data, chat_state
|
| 290 |
-
[instruction_txtbox, chatbot, local_data, regenerate]
|
| 291 |
-
)
|
| 292 |
-
regen_event2 = regen_event1.then(
|
| 293 |
-
chat_stream,
|
| 294 |
-
[idx, local_data, instruction_txtbox, chat_state,
|
| 295 |
global_context, res_temp, res_topk, res_rpen, res_mnts, res_sample, ctx_num_lconv],
|
| 296 |
[context_inspector, chatbot, local_data]
|
| 297 |
-
)
|
| 298 |
-
regen_event3 = regen_event2.then(
|
| 299 |
lambda: gr.update(interactive=True),
|
| 300 |
None,
|
| 301 |
regenerate
|
| 302 |
-
)
|
| 303 |
-
regen_event4 = regen_event3.then(
|
| 304 |
None, local_data, None,
|
| 305 |
_js="(v)=>{ setStorage('local_data',v) }"
|
| 306 |
)
|
| 307 |
|
| 308 |
stop.click(
|
| 309 |
None, None, None,
|
| 310 |
-
cancels=[send_event,
|
| 311 |
)
|
| 312 |
|
| 313 |
for btn in channel_btns:
|
|
|
|
| 62 |
"" if len(placeholders) >= 1 else txt
|
| 63 |
)
|
| 64 |
|
| 65 |
+
def rollback_last(
|
| 66 |
+
idx, local_data, chat_state,
|
| 67 |
+
global_context, res_temp, res_topk, res_rpen, res_mnts, res_sample, ctx_num_lconv
|
| 68 |
+
):
|
| 69 |
+
res = [
|
| 70 |
+
chat_state["ppmanager_type"].from_json(json.dumps(ppm))
|
| 71 |
+
for ppm in local_data
|
| 72 |
+
]
|
| 73 |
+
|
| 74 |
+
ppm = res[idx]
|
| 75 |
last_user_message = res[idx].pingpongs[-1].ping
|
| 76 |
res[idx].pingpongs = res[idx].pingpongs[:-1]
|
| 77 |
|
| 78 |
+
ppm.add_pingpong(
|
| 79 |
+
PingPong(last_user_message, "")
|
|
|
|
|
|
|
|
|
|
| 80 |
)
|
| 81 |
+
prompt = build_prompts(ppm, global_context, ctx_num_lconv)
|
| 82 |
+
async for result in gen_text(
|
| 83 |
+
prompt, hf_model=MODEL_ID, hf_token=TOKEN,
|
| 84 |
+
parameters={
|
| 85 |
+
'max_new_tokens': res_mnts,
|
| 86 |
+
'do_sample': res_sample,
|
| 87 |
+
'return_full_text': False,
|
| 88 |
+
'temperature': res_temp,
|
| 89 |
+
'top_k': res_topk,
|
| 90 |
+
'repetition_penalty': res_rpen
|
| 91 |
+
}
|
| 92 |
+
):
|
| 93 |
+
ppm.append_pong(result)
|
| 94 |
+
yield "", prompt, ppm.build_uis(), str(res)
|
| 95 |
|
| 96 |
def reset_chat(idx, ld, state):
|
| 97 |
res = [state["ppmanager_type"].from_json(json.dumps(ppm_str)) for ppm_str in ld]
|
|
|
|
| 131 |
}
|
| 132 |
):
|
| 133 |
ppm.append_pong(result)
|
| 134 |
+
yield "", prompt, ppm.build_uis(), str(res)
|
| 135 |
|
| 136 |
def channel_num(btn_title):
|
| 137 |
choice = 0
|
|
|
|
| 300 |
chat_stream,
|
| 301 |
[idx, local_data, instruction_txtbox, chat_state,
|
| 302 |
global_context, res_temp, res_topk, res_rpen, res_mnts, res_sample, ctx_num_lconv],
|
| 303 |
+
[instruction_txtbox, context_inspector, chatbot, local_data]
|
| 304 |
)
|
| 305 |
|
| 306 |
+
# regen_event1 = regenerate.click(
|
| 307 |
+
# rollback_last,
|
| 308 |
+
# [idx, local_data, chat_state],
|
| 309 |
+
# [instruction_txtbox, chatbot, local_data, regenerate]
|
| 310 |
+
# )
|
| 311 |
+
# regen_event2 = regen_event1.then(
|
| 312 |
+
# chat_stream,
|
| 313 |
+
# [idx, local_data, instruction_txtbox, chat_state,
|
| 314 |
+
# global_context, res_temp, res_topk, res_rpen, res_mnts, res_sample, ctx_num_lconv],
|
| 315 |
+
# [context_inspector, chatbot, local_data]
|
| 316 |
+
# )
|
| 317 |
+
# regen_event3 = regen_event2.then(
|
| 318 |
+
# lambda: gr.update(interactive=True),
|
| 319 |
+
# None,
|
| 320 |
+
# regenerate
|
| 321 |
+
# )
|
| 322 |
+
# regen_event4 = regen_event3.then(
|
| 323 |
+
# None, local_data, None,
|
| 324 |
+
# _js="(v)=>{ setStorage('local_data',v) }"
|
| 325 |
+
# )
|
| 326 |
+
|
| 327 |
+
regen_event = regenerate.click(
|
| 328 |
rollback_last,
|
| 329 |
+
[idx, local_data, chat_state,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 330 |
global_context, res_temp, res_topk, res_rpen, res_mnts, res_sample, ctx_num_lconv],
|
| 331 |
[context_inspector, chatbot, local_data]
|
| 332 |
+
).then(
|
|
|
|
| 333 |
lambda: gr.update(interactive=True),
|
| 334 |
None,
|
| 335 |
regenerate
|
| 336 |
+
).then(
|
|
|
|
| 337 |
None, local_data, None,
|
| 338 |
_js="(v)=>{ setStorage('local_data',v) }"
|
| 339 |
)
|
| 340 |
|
| 341 |
stop.click(
|
| 342 |
None, None, None,
|
| 343 |
+
cancels=[send_event, regen_event]
|
| 344 |
)
|
| 345 |
|
| 346 |
for btn in channel_btns:
|