Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -2,8 +2,8 @@ import gradio as gr
|
|
| 2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 3 |
|
| 4 |
# Load tokenizer and model
|
| 5 |
-
tokenizer = AutoTokenizer.from_pretrained("TuringsSolutions/
|
| 6 |
-
model = AutoModelForCausalLM.from_pretrained("TuringsSolutions/
|
| 7 |
|
| 8 |
def predict(prompt, temperature, max_tokens):
|
| 9 |
inputs = tokenizer(prompt, return_tensors="pt")
|
|
|
|
| 2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 3 |
|
| 4 |
# Load tokenizer and model
|
| 5 |
+
tokenizer = AutoTokenizer.from_pretrained("TuringsSolutions/Gemma2LegalEdition", trust_remote_code=True)
|
| 6 |
+
model = AutoModelForCausalLM.from_pretrained("TuringsSolutions/Gemma2LegalEdition", trust_remote_code=True)
|
| 7 |
|
| 8 |
def predict(prompt, temperature, max_tokens):
|
| 9 |
inputs = tokenizer(prompt, return_tensors="pt")
|