AbstractPhil commited on
Commit
9864aee
·
verified ·
1 Parent(s): f3fa540

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -5,6 +5,7 @@ from tokenizers import Tokenizer
5
  from huggingface_hub import hf_hub_download
6
  from safetensors.torch import load_file as load_safetensors
7
 
 
8
  # ----------------------------
9
  # 🔧 Model versions configuration
10
  # ----------------------------
@@ -41,7 +42,6 @@ config = {
41
  "tokenizer_path": "beeper.tokenizer.json"
42
  }
43
 
44
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
45
 
46
  # Global model and tokenizer variables
47
  infer = None
@@ -185,7 +185,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
185
  )
186
 
187
  # Chat interface
188
- chatbot = gr.Chatbot(label="Chat with Beeper", type="messages", height=400)
189
  msg = gr.Textbox(label="Message", placeholder="Type your message here...")
190
 
191
  with gr.Row():
@@ -214,6 +214,8 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
214
 
215
  # Handle chat
216
  def respond(message, chat_history, model_version, temperature, top_k, top_p):
 
 
217
  response = beeper_reply(message, chat_history, model_version, temperature, top_k, top_p)
218
  chat_history.append([message, response])
219
  return "", chat_history
 
5
  from huggingface_hub import hf_hub_download
6
  from safetensors.torch import load_file as load_safetensors
7
 
8
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
9
  # ----------------------------
10
  # 🔧 Model versions configuration
11
  # ----------------------------
 
42
  "tokenizer_path": "beeper.tokenizer.json"
43
  }
44
 
 
45
 
46
  # Global model and tokenizer variables
47
  infer = None
 
185
  )
186
 
187
  # Chat interface
188
+ chatbot = gr.Chatbot(label="Chat with Beeper", type="tuples", height=400)
189
  msg = gr.Textbox(label="Message", placeholder="Type your message here...")
190
 
191
  with gr.Row():
 
214
 
215
  # Handle chat
216
  def respond(message, chat_history, model_version, temperature, top_k, top_p):
217
+ if not chat_history:
218
+ chat_history = []
219
  response = beeper_reply(message, chat_history, model_version, temperature, top_k, top_p)
220
  chat_history.append([message, response])
221
  return "", chat_history