Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -8,17 +8,18 @@ def predict(input, history=[]):
|
|
8 |
history (list, optional): List of previous inputs and outputs for context (default: []).
|
9 |
|
10 |
Returns:
|
11 |
-
tuple: A tuple containing
|
12 |
"""
|
13 |
|
14 |
# Replace with your actual Gemma prediction logic here
|
15 |
-
chatbot_response = "This is
|
16 |
|
17 |
-
|
18 |
-
if history:
|
19 |
-
history.append((input, chatbot_response))
|
20 |
|
21 |
-
|
|
|
|
|
|
|
22 |
|
23 |
# Create the Gradio interface
|
24 |
interface = gr.Interface(
|
@@ -27,11 +28,12 @@ interface = gr.Interface(
|
|
27 |
outputs=["chatbot", "state"] # Remove "state" output if history is not used
|
28 |
)
|
29 |
|
30 |
-
# Load
|
31 |
try:
|
32 |
gr.load("models/google/gemma-1.1-7b-it") # Assuming model weights are available
|
33 |
except Exception as e:
|
34 |
-
print(f"
|
35 |
|
36 |
# Launch the Gradio interface
|
37 |
interface.launch()
|
|
|
|
8 |
history (list, optional): List of previous inputs and outputs for context (default: []).
|
9 |
|
10 |
Returns:
|
11 |
+
tuple: A tuple containing a list of chatbot responses (wrapped in lists) and the updated history (optional).
|
12 |
"""
|
13 |
|
14 |
# Replace with your actual Gemma prediction logic here
|
15 |
+
chatbot_response = "This is your Gemma-powered chatbot response."
|
16 |
|
17 |
+
return [[chatbot_response]], history # Wrap response in a list of lists (Option A)
|
|
|
|
|
18 |
|
19 |
+
# Alternatively, for multiple responses:
|
20 |
+
# chatbot_response1 = "First Gemma response."
|
21 |
+
# chatbot_response2 = "Second Gemma response."
|
22 |
+
# return [[chatbot_response1], [chatbot_response2]], history
|
23 |
|
24 |
# Create the Gradio interface
|
25 |
interface = gr.Interface(
|
|
|
28 |
outputs=["chatbot", "state"] # Remove "state" output if history is not used
|
29 |
)
|
30 |
|
31 |
+
# Load the model within the Gradio interface context
|
32 |
try:
|
33 |
gr.load("models/google/gemma-1.1-7b-it") # Assuming model weights are available
|
34 |
except Exception as e:
|
35 |
+
print(f"An error occurred while loading the model: {e}") # Improved error handling
|
36 |
|
37 |
# Launch the Gradio interface
|
38 |
interface.launch()
|
39 |
+
|