Update app.py
Browse files
app.py
CHANGED
@@ -7,13 +7,14 @@ import torch
|
|
7 |
model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
9 |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
|
|
|
|
|
10 |
|
11 |
#hugging face ๋ก๊ทธ์ธ (ํ ํฐ ๊ฐ์ ธ์ค๊ธฐ ์ํด์)
|
12 |
from huggingface_hub import login
|
13 |
import os
|
14 |
|
15 |
-
|
16 |
-
hf_token = os.environ.get("llama3.31-8b-token-new")
|
17 |
|
18 |
# ํ ํฐ์ ์ฌ์ฉํ์ฌ ๋ก๊ทธ์ธ
|
19 |
if hf_token:
|
|
|
7 |
model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
9 |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
|
10 |
+
# ํ๊ฒฝ ๋ณ์์์ ํ ํฐ์ ๊ฐ์ ธ์ค๊ธฐ
|
11 |
+
hf_token = os.environ.get("HF_TOKEN",None)
|
12 |
|
13 |
#hugging face ๋ก๊ทธ์ธ (ํ ํฐ ๊ฐ์ ธ์ค๊ธฐ ์ํด์)
|
14 |
from huggingface_hub import login
|
15 |
import os
|
16 |
|
17 |
+
|
|
|
18 |
|
19 |
# ํ ํฐ์ ์ฌ์ฉํ์ฌ ๋ก๊ทธ์ธ
|
20 |
if hf_token:
|