K00B404 commited on
Commit
268d349
·
verified ·
1 Parent(s): 71516c7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -11
app.py CHANGED
@@ -7,7 +7,7 @@ client = InferenceClient("HuggingFaceH4/starchat2-15b-v0.1")
7
 
8
  def respond(
9
  message,
10
- history: list[tuple[str, str]],
11
  system_message,
12
  max_tokens,
13
  temperature,
@@ -24,9 +24,9 @@ def respond(
24
  messages = [{"role": "system", "content": system_message}]
25
 
26
  # Build conversation history
27
- for user_msg, assistant_msg in history:
28
- if user_msg:
29
- messages.append({"role": "user", "content": user_msg})
30
  if assistant_msg:
31
  messages.append({"role": "assistant", "content": assistant_msg})
32
 
@@ -43,9 +43,11 @@ def respond(
43
  ):
44
  token = message.choices[0].delta.content
45
  response += token
46
- yield response
 
47
  except Exception as e:
48
- yield f"Error: {str(e)}"
 
49
 
50
  def create_chat_interface():
51
  """
@@ -71,7 +73,8 @@ def create_chat_interface():
71
  height=600,
72
  show_label=False,
73
  container=True,
74
- scale=2
 
75
  )
76
  msg = gr.Textbox(
77
  show_label=False,
@@ -116,17 +119,24 @@ def create_chat_interface():
116
  with gr.Row():
117
  clear = gr.Button("Clear Chat")
118
  stop = gr.Button("Stop")
 
 
 
119
 
120
  # Handle sending messages
121
  msg.submit(
122
  respond,
123
- [msg, chatbot, system_msg, max_tokens, temperature, top_p, model],
124
- [chatbot],
125
  api_name="chat"
 
 
 
 
126
  )
127
 
128
  # Clear chat history
129
- clear.click(lambda: None, None, chatbot, queue=False)
130
 
131
  # Example prompts
132
  gr.Examples(
@@ -144,4 +154,8 @@ def create_chat_interface():
144
  if __name__ == "__main__":
145
  demo = create_chat_interface()
146
  demo.queue()
147
- demo.launch(share=True)
 
 
 
 
 
7
 
8
  def respond(
9
  message,
10
+ chat_history,
11
  system_message,
12
  max_tokens,
13
  temperature,
 
24
  messages = [{"role": "system", "content": system_message}]
25
 
26
  # Build conversation history
27
+ for human_msg, assistant_msg in chat_history:
28
+ if human_msg:
29
+ messages.append({"role": "user", "content": human_msg})
30
  if assistant_msg:
31
  messages.append({"role": "assistant", "content": assistant_msg})
32
 
 
43
  ):
44
  token = message.choices[0].delta.content
45
  response += token
46
+ chat_history = chat_history + [(message, response)]
47
+ yield chat_history
48
  except Exception as e:
49
+ chat_history = chat_history + [(message, f"Error: {str(e)}")]
50
+ yield chat_history
51
 
52
  def create_chat_interface():
53
  """
 
73
  height=600,
74
  show_label=False,
75
  container=True,
76
+ scale=2,
77
+ type="messages" # Set type to messages format
78
  )
79
  msg = gr.Textbox(
80
  show_label=False,
 
119
  with gr.Row():
120
  clear = gr.Button("Clear Chat")
121
  stop = gr.Button("Stop")
122
+
123
+ # Initialize chat history
124
+ state = gr.State([])
125
 
126
  # Handle sending messages
127
  msg.submit(
128
  respond,
129
+ [msg, state, system_msg, max_tokens, temperature, top_p, model],
130
+ [state],
131
  api_name="chat"
132
+ ).then(
133
+ lambda x: "",
134
+ [msg],
135
+ [msg]
136
  )
137
 
138
  # Clear chat history
139
+ clear.click(lambda: [], None, state, queue=False)
140
 
141
  # Example prompts
142
  gr.Examples(
 
154
  if __name__ == "__main__":
155
  demo = create_chat_interface()
156
  demo.queue()
157
+ # Disable SSR and sharing for Spaces
158
+ demo.launch(
159
+ share=False, # Disable sharing on Spaces
160
+ ssr=False # Disable SSR to prevent warnings
161
+ )