Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,6 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
|
3 |
from peft import PeftModel
|
4 |
import torch
|
5 |
import os
|
6 |
-
from huggingface_hub import login
|
7 |
|
8 |
"""
|
9 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
@@ -11,10 +10,8 @@ For more information on `huggingface_hub` Inference API support, please check th
|
|
11 |
|
12 |
# Set your model and adapter paths
|
13 |
API_KEY = os.environ.get("ACESS_TOKEN")
|
14 |
-
BASE_MODEL = "
|
15 |
-
PEFT_ADAPTER = "asdc/
|
16 |
-
|
17 |
-
login(token=API_KEY)
|
18 |
|
19 |
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL, token=API_KEY)
|
20 |
base_model = AutoModelForCausalLM.from_pretrained(
|
|
|
3 |
from peft import PeftModel
|
4 |
import torch
|
5 |
import os
|
|
|
6 |
|
7 |
"""
|
8 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
|
|
10 |
|
11 |
# Set your model and adapter paths
|
12 |
API_KEY = os.environ.get("ACESS_TOKEN")
|
13 |
+
BASE_MODEL = "meta-llama/Meta-Llama-3-8B"
|
14 |
+
PEFT_ADAPTER = "asdc/Llama-3-8B-multilingual-temporal-expression-normalization"
|
|
|
|
|
15 |
|
16 |
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL, token=API_KEY)
|
17 |
base_model = AutoModelForCausalLM.from_pretrained(
|