Tech-Meld commited on
Commit
b366dea
·
verified ·
1 Parent(s): d69301c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -4,7 +4,7 @@ import time
4
  import random
5
 
6
  # Load the model and tokenizer for GGUF
7
- model_id = "Tech-Meld/Hajax_Chat_1.0-Q3_K_S-GGUF"
8
  tokenizer = AutoTokenizer.from_pretrained(model_id)
9
  model = AutoModelForCausalLM.from_pretrained(model_id)
10
 
@@ -78,10 +78,10 @@ iface = gr.Interface(
78
  gr.Slider(label="Max length", minimum=10, maximum=1000, step=10, value=250),
79
  ],
80
  outputs=[
81
- gr.TextArea(label="AI Response:", lines=10),
82
  gr.Label(label="Text Analysis", elem_id="analysis"),
83
  ],
84
- title="Chat with AI",
85
  description="Engage in a conversation with our advanced model. Customize the response using various parameters.",
86
  theme="default", # Use a custom theme to override the default Gradio styling
87
  css=css, # Apply the CSS styles defined earlier
 
4
  import random
5
 
6
  # Load the model and tokenizer for GGUF
7
+ model_id = "nvidia/Llama3-ChatQA-1.5-8B"
8
  tokenizer = AutoTokenizer.from_pretrained(model_id)
9
  model = AutoModelForCausalLM.from_pretrained(model_id)
10
 
 
78
  gr.Slider(label="Max length", minimum=10, maximum=1000, step=10, value=250),
79
  ],
80
  outputs=[
81
+ gr.TextArea(label="Response:", lines=10),
82
  gr.Label(label="Text Analysis", elem_id="analysis"),
83
  ],
84
+ title="Chat with Hajax",
85
  description="Engage in a conversation with our advanced model. Customize the response using various parameters.",
86
  theme="default", # Use a custom theme to override the default Gradio styling
87
  css=css, # Apply the CSS styles defined earlier