FlameF0X commited on
Commit
3f01f23
·
verified ·
1 Parent(s): 992cb1e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -4,7 +4,6 @@ import torch
4
 
5
  # Define available models
6
  model_options = {
7
- "VML-1.1-K1-mini":"VortexIntelligence/VLM-1.1-K1-Preview",
8
  "VLM-1-K3": "VortexIntelligence/VLM-1-K3",
9
  "VLM-1-K2": "VortexIntelligence/VLM-1-K2",
10
  "VLM-1-K1": "VortexIntelligence/VLM-1-K1",
@@ -39,7 +38,7 @@ def generate_response(message, history, model_choice):
39
 
40
  # Create the Gradio interface
41
  with gr.Blocks() as demo:
42
- model_choice = gr.Dropdown(choices=list(model_options.keys()), label="Select Model", value="VML-1.1-K1-mini")
43
  chatbot = gr.ChatInterface(
44
  lambda message, history: generate_response(message, history, model_choice.value),
45
  theme="soft",
 
4
 
5
  # Define available models
6
  model_options = {
 
7
  "VLM-1-K3": "VortexIntelligence/VLM-1-K3",
8
  "VLM-1-K2": "VortexIntelligence/VLM-1-K2",
9
  "VLM-1-K1": "VortexIntelligence/VLM-1-K1",
 
38
 
39
  # Create the Gradio interface
40
  with gr.Blocks() as demo:
41
+ model_choice = gr.Dropdown(choices=list(model_options.keys()), label="Select Model", value="VLM-1-K3")
42
  chatbot = gr.ChatInterface(
43
  lambda message, history: generate_response(message, history, model_choice.value),
44
  theme="soft",