Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,8 @@ model_file_path = hf_hub_download(
|
|
12 |
try:
|
13 |
llm_llama_cpp = Llama(
|
14 |
model_path=model_file_path, # Path where the model is downloaded
|
15 |
-
verbose=False # Suppress llama.cpp's own informational prints
|
|
|
16 |
)
|
17 |
|
18 |
# Function that generates a response using the Llama model
|
@@ -46,14 +47,13 @@ except Exception as e:
|
|
46 |
TITLE = "AI Copilot for Diabetes Patients"
|
47 |
DESCRIPTION = "I provide answers to concerns related to Diabetes"
|
48 |
|
49 |
-
# Design chatbot interface
|
50 |
demo = gr.ChatInterface(
|
51 |
fn=talk, # The function that processes user input and returns the response
|
52 |
chatbot=gr.Chatbot(
|
53 |
show_label=True,
|
54 |
show_share_button=True,
|
55 |
show_copy_button=True,
|
56 |
-
likeable=True,
|
57 |
layout="bubble", # Display messages in bubble format
|
58 |
bubble_full_width=False,
|
59 |
),
|
|
|
12 |
try:
|
13 |
llm_llama_cpp = Llama(
|
14 |
model_path=model_file_path, # Path where the model is downloaded
|
15 |
+
verbose=False, # Suppress llama.cpp's own informational prints
|
16 |
+
n_ctx=4096 # Set context window to match model's full capacity
|
17 |
)
|
18 |
|
19 |
# Function that generates a response using the Llama model
|
|
|
47 |
TITLE = "AI Copilot for Diabetes Patients"
|
48 |
DESCRIPTION = "I provide answers to concerns related to Diabetes"
|
49 |
|
50 |
+
# Design chatbot interface (fixed `likeable` argument)
|
51 |
demo = gr.ChatInterface(
|
52 |
fn=talk, # The function that processes user input and returns the response
|
53 |
chatbot=gr.Chatbot(
|
54 |
show_label=True,
|
55 |
show_share_button=True,
|
56 |
show_copy_button=True,
|
|
|
57 |
layout="bubble", # Display messages in bubble format
|
58 |
bubble_full_width=False,
|
59 |
),
|