Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,8 +1,24 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
-
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
| 4 |
|
| 5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
if not history:
|
| 7 |
history = []
|
| 8 |
hist_len=0
|
|
@@ -42,6 +58,6 @@ with gr.Blocks() as app:
|
|
| 42 |
with gr.Group():
|
| 43 |
stop_btn=gr.Button("Stop")
|
| 44 |
clear_btn=gr.Button("Clear")
|
| 45 |
-
chatblock=gr.Dropdown(label="Chatblocks",choices=[c for c in
|
| 46 |
btn.click(chat_inf,[sys_inp,inp,chat_b],chat_b)
|
| 47 |
app.launch()
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
from huggingface_hub import InferenceClient
|
|
|
|
| 3 |
|
| 4 |
+
models=[
|
| 5 |
+
"google/gemma-7b",
|
| 6 |
+
"google/gemma-7b-it",
|
| 7 |
+
"google/gemma-2b",
|
| 8 |
+
"google/gemma-2b-it"
|
| 9 |
+
]
|
| 10 |
+
clients=[
|
| 11 |
+
InferenceClient(models[0]),
|
| 12 |
+
InferenceClient(models[1]),
|
| 13 |
+
InferenceClient(models[2]),
|
| 14 |
+
InferenceClient(models[3]),
|
| 15 |
+
]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def chat_inf(system_prompt,prompt,history,client_choice):
|
| 21 |
+
client=clients[int(client_choice)-1]
|
| 22 |
if not history:
|
| 23 |
history = []
|
| 24 |
hist_len=0
|
|
|
|
| 58 |
with gr.Group():
|
| 59 |
stop_btn=gr.Button("Stop")
|
| 60 |
clear_btn=gr.Button("Clear")
|
| 61 |
+
chatblock=gr.Dropdown(label="Chatblocks",type='index',choices=[c for c in models],value=1)
|
| 62 |
btn.click(chat_inf,[sys_inp,inp,chat_b],chat_b)
|
| 63 |
app.launch()
|