Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,13 +1,14 @@
|
|
1 |
from llama_cpp import Llama
|
2 |
import gradio as gr
|
3 |
|
4 |
-
# Load
|
5 |
llm = Llama(
|
6 |
-
model_path="mental-health-chatbot-i1.Q4_K_M.gguf", #
|
7 |
n_ctx=2048,
|
8 |
-
n_threads=4,
|
9 |
)
|
10 |
|
|
|
11 |
def chat(message, history):
|
12 |
full_prompt = ""
|
13 |
for user, bot in history:
|
@@ -18,5 +19,7 @@ def chat(message, history):
|
|
18 |
reply = output["choices"][0]["text"].strip()
|
19 |
return reply
|
20 |
|
21 |
-
#
|
22 |
-
gr.ChatInterface(fn=chat).launch(
|
|
|
|
|
|
1 |
from llama_cpp import Llama
|
2 |
import gradio as gr
|
3 |
|
4 |
+
# Load GGUF model
|
5 |
llm = Llama(
|
6 |
+
model_path="mental-health-chatbot-i1.Q4_K_M.gguf", # Make sure this filename matches exactly
|
7 |
n_ctx=2048,
|
8 |
+
n_threads=4,
|
9 |
)
|
10 |
|
11 |
+
# Chat logic
|
12 |
def chat(message, history):
|
13 |
full_prompt = ""
|
14 |
for user, bot in history:
|
|
|
19 |
reply = output["choices"][0]["text"].strip()
|
20 |
return reply
|
21 |
|
22 |
+
# Chat Interface
|
23 |
+
gr.ChatInterface(fn=chat, title="Mental Health Llama Chatbot").launch(
|
24 |
+
server_name="0.0.0.0", server_port=7860
|
25 |
+
)
|