Spaces:
Sleeping
Sleeping
Update analysis.py
Browse files- analysis.py +4 -1
analysis.py
CHANGED
@@ -6,7 +6,10 @@ HF_TOKEN = os.getenv("HF_TOKEN")
|
|
6 |
MODEL_NAME = "TheBloke/Mistral-7B-Instruct-v0.1-GGUF" # Use a smaller model
|
7 |
|
8 |
# Load tokenizer
|
9 |
-
|
|
|
|
|
|
|
10 |
|
11 |
# Load model (use torch.float16 if on GPU, otherwise use torch.float32 for CPU)
|
12 |
model = AutoModelForCausalLM.from_pretrained(
|
|
|
6 |
MODEL_NAME = "TheBloke/Mistral-7B-Instruct-v0.1-GGUF" # Use a smaller model
|
7 |
|
8 |
# Load tokenizer
|
9 |
+
from transformers import AutoTokenizer
|
10 |
+
|
11 |
+
# Load tokenizer from the official Mistral model
|
12 |
+
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct")
|
13 |
|
14 |
# Load model (use torch.float16 if on GPU, otherwise use torch.float32 for CPU)
|
15 |
model = AutoModelForCausalLM.from_pretrained(
|