Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
4 |
import torch
|
5 |
|
6 |
# ๋ชจ๋ธ๊ณผ ํ ํฌ๋์ด์ ๋ก๋
|
7 |
-
model_name = "meta-llama/Meta-Llama-3
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
9 |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
|
10 |
|
@@ -57,7 +57,7 @@ iface = gr.Interface(
|
|
57 |
fn=run_kmmlu_test,
|
58 |
inputs=gr.Dropdown(choices=subjects, label="์ฃผ์ ์ ํ"),
|
59 |
outputs="text",
|
60 |
-
title="Llama 3
|
61 |
description="์ ํํ ์ฃผ์ ์ ๋ํด KMMLU ํ
์คํธ๋ฅผ ์คํํฉ๋๋ค."
|
62 |
)
|
63 |
|
|
|
4 |
import torch
|
5 |
|
6 |
# ๋ชจ๋ธ๊ณผ ํ ํฌ๋์ด์ ๋ก๋
|
7 |
+
model_name = "meta-llama/Meta-Llama-3-8B-Instruct"
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
9 |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
|
10 |
|
|
|
57 |
fn=run_kmmlu_test,
|
58 |
inputs=gr.Dropdown(choices=subjects, label="์ฃผ์ ์ ํ"),
|
59 |
outputs="text",
|
60 |
+
title="Llama 3์ ์ด์ฉํ KMMLU ํ
์คํธ",
|
61 |
description="์ ํํ ์ฃผ์ ์ ๋ํด KMMLU ํ
์คํธ๋ฅผ ์คํํฉ๋๋ค."
|
62 |
)
|
63 |
|