Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -34,13 +34,14 @@ def generate(prompt,history,max_new_tokens,seed):
|
|
34 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
35 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
36 |
output = ""
|
37 |
-
|
38 |
for response in stream:
|
39 |
if response.token.text == "\n":
|
40 |
-
yield from pp.stream_tts(
|
41 |
-
|
|
|
42 |
output += response.token.text
|
43 |
-
|
44 |
#yield output
|
45 |
|
46 |
with gr.Blocks() as iface:
|
|
|
34 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
35 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
36 |
output = ""
|
37 |
+
buf = ""
|
38 |
for response in stream:
|
39 |
if response.token.text == "\n":
|
40 |
+
yield from pp.stream_tts(buf)
|
41 |
+
buf=""
|
42 |
+
buf += response.token.text
|
43 |
output += response.token.text
|
44 |
+
print(output)
|
45 |
#yield output
|
46 |
|
47 |
with gr.Blocks() as iface:
|