Athspi commited on
Commit
e83210b
·
verified ·
1 Parent(s): f8b5f9f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -7
app.py CHANGED
@@ -5,8 +5,7 @@ import os
5
 
6
  # --- Configuration (Read from Environment Variables) ---
7
 
8
- # Get the model path from an environment variable. Default to a placeholder
9
- # if the environment variable is not set. This is important for deployment.
10
  model_path = os.environ.get("MODEL_PATH", "Athspi/Athspiv2new")
11
  deepseek_tokenizer_path = os.environ.get("TOKENIZER_PATH", "deepseek-ai/DeepSeek-R1")
12
  # Get the Hugging Face token from an environment variable (for gated models).
@@ -67,21 +66,26 @@ def chat_with_llm(prompt, history):
67
  except Exception as e:
68
  return f"Error during generation: {e}"
69
 
70
-
71
  # --- Gradio Interface ---
72
-
73
  def predict(message, history):
74
  history = history or []
75
  response = chat_with_llm(message, history)
76
- history.append((message, response))
 
 
77
  return "", history
78
 
79
  with gr.Blocks() as demo:
80
- chatbot = gr.Chatbot(label="Athspi Chat", height=500, show_label=True, value=[[None, "Hi! I'm Athspi. How can I help you today?"]])
 
 
81
  msg = gr.Textbox(label="Your Message", placeholder="Type your message here...")
82
  clear = gr.Button("Clear")
83
 
 
84
  msg.submit(predict, [msg, chatbot], [msg, chatbot])
85
- clear.click(lambda: None, None, chatbot, queue=False)
 
86
 
87
  demo.launch(share=True)
 
5
 
6
  # --- Configuration (Read from Environment Variables) ---
7
 
8
+ # Get the model path from an environment variable.
 
9
  model_path = os.environ.get("MODEL_PATH", "Athspi/Athspiv2new")
10
  deepseek_tokenizer_path = os.environ.get("TOKENIZER_PATH", "deepseek-ai/DeepSeek-R1")
11
  # Get the Hugging Face token from an environment variable (for gated models).
 
66
  except Exception as e:
67
  return f"Error during generation: {e}"
68
 
 
69
  # --- Gradio Interface ---
70
+ # Use the 'messages' format for chatbot
71
  def predict(message, history):
72
  history = history or []
73
  response = chat_with_llm(message, history)
74
+ # history.append((message, response)) # Old format
75
+ history.append({"role": "user", "content": message}) # Append user message in 'messages' format
76
+ history.append({"role": "assistant", "content": response}) # Append assistant (AI) message
77
  return "", history
78
 
79
  with gr.Blocks() as demo:
80
+ chatbot = gr.Chatbot(label="Athspi Chat", height=500, show_label=True,
81
+ value=[{"role": "assistant", "content": "Hi! I'm Athspi. How can I help you today?"}],
82
+ type="messages") # Set type to "messages"
83
  msg = gr.Textbox(label="Your Message", placeholder="Type your message here...")
84
  clear = gr.Button("Clear")
85
 
86
+
87
  msg.submit(predict, [msg, chatbot], [msg, chatbot])
88
+ # Clear needs to be updated for the messages format
89
+ clear.click(lambda: [], [], chatbot, queue=False) # Return empty list for history
90
 
91
  demo.launch(share=True)