lemonteaa commited on
Commit
5856dbe
·
verified ·
1 Parent(s): 438d4d3

fix streaming None object (openai sdk)

Browse files
Files changed (1) hide show
  1. chat_demo.py +6 -5
chat_demo.py CHANGED
@@ -57,10 +57,11 @@ def openai_call(message, history, system_prompt, max_new_tokens):
57
  )
58
  reply = ""
59
  for chunk in response:
60
- delta = chunk.choices[0].delta.content
61
- if delta is not None:
62
- reply = reply + delta
63
- yield reply, None
 
64
  history.append({ "role": "assistant", "content": reply })
65
  yield reply, gr.State(history)
66
 
@@ -97,7 +98,7 @@ with gr.Blocks() as demo:
97
  type="messages",
98
  additional_inputs=[
99
  gr.Textbox("You are a helpful AI assistant.", label="System Prompt"),
100
- gr.Slider(30, 8192, label="Max new tokens"),
101
  ],
102
  additional_outputs=[conv_state],
103
  title="Edge level LLM Chat demo",
 
57
  )
58
  reply = ""
59
  for chunk in response:
60
+ if len(chunk.choices) > 0:
61
+ delta = chunk.choices[0].delta.content
62
+ if delta is not None:
63
+ reply = reply + delta
64
+ yield reply, None
65
  history.append({ "role": "assistant", "content": reply })
66
  yield reply, gr.State(history)
67
 
 
98
  type="messages",
99
  additional_inputs=[
100
  gr.Textbox("You are a helpful AI assistant.", label="System Prompt"),
101
+ gr.Slider(30, 8192, value=2048, label="Max new tokens"),
102
  ],
103
  additional_outputs=[conv_state],
104
  title="Edge level LLM Chat demo",