Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
|
|
3 |
|
4 |
-
#
|
5 |
client = InferenceClient("xezpeleta/latxa-7b-v1-gguf")
|
6 |
|
|
|
7 |
def respond(
|
8 |
message,
|
9 |
history: list[tuple[str, str]],
|
@@ -35,6 +37,7 @@ def respond(
|
|
35 |
response += token
|
36 |
yield response
|
37 |
|
|
|
38 |
demo = gr.ChatInterface(
|
39 |
respond,
|
40 |
additional_inputs=[
|
@@ -61,13 +64,11 @@ demo = gr.ChatInterface(
|
|
61 |
maximum=1.0,
|
62 |
value=0.95,
|
63 |
step=0.05,
|
64 |
-
label="Top-p
|
65 |
),
|
66 |
],
|
67 |
-
title="Latxa LLM Chatbot"
|
68 |
-
description="Euskarazko chatbot adimentsua"
|
69 |
)
|
70 |
|
71 |
if __name__ == "__main__":
|
72 |
demo.launch()
|
73 |
-
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
+
import spaces
|
4 |
|
5 |
+
# Importar spaces para ZeroGPU
|
6 |
client = InferenceClient("xezpeleta/latxa-7b-v1-gguf")
|
7 |
|
8 |
+
@spaces.GPU # Añadir el decorador para ZeroGPU
|
9 |
def respond(
|
10 |
message,
|
11 |
history: list[tuple[str, str]],
|
|
|
37 |
response += token
|
38 |
yield response
|
39 |
|
40 |
+
# Configurar la interfaz
|
41 |
demo = gr.ChatInterface(
|
42 |
respond,
|
43 |
additional_inputs=[
|
|
|
64 |
maximum=1.0,
|
65 |
value=0.95,
|
66 |
step=0.05,
|
67 |
+
label="Top-p"
|
68 |
),
|
69 |
],
|
70 |
+
title="Latxa LLM Chatbot"
|
|
|
71 |
)
|
72 |
|
73 |
if __name__ == "__main__":
|
74 |
demo.launch()
|
|