wabang commited on
Commit
6cc26cb
ยท
verified ยท
1 Parent(s): f2f721f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -4,7 +4,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
4
  import torch
5
 
6
  # ๋ชจ๋ธ๊ณผ ํ† ํฌ๋‚˜์ด์ € ๋กœ๋“œ
7
- model_name = "meta-llama/Meta-Llama-3.1-8B"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name)
9
  model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
10
 
@@ -57,7 +57,7 @@ iface = gr.Interface(
57
  fn=run_kmmlu_test,
58
  inputs=gr.Dropdown(choices=subjects, label="์ฃผ์ œ ์„ ํƒ"),
59
  outputs="text",
60
- title="Llama 3.1์„ ์ด์šฉํ•œ KMMLU ํ…Œ์ŠคํŠธ",
61
  description="์„ ํƒํ•œ ์ฃผ์ œ์— ๋Œ€ํ•ด KMMLU ํ…Œ์ŠคํŠธ๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค."
62
  )
63
 
 
4
  import torch
5
 
6
  # ๋ชจ๋ธ๊ณผ ํ† ํฌ๋‚˜์ด์ € ๋กœ๋“œ
7
+ model_name = "meta-llama/Meta-Llama-3-8B-Instruct"
8
  tokenizer = AutoTokenizer.from_pretrained(model_name)
9
  model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
10
 
 
57
  fn=run_kmmlu_test,
58
  inputs=gr.Dropdown(choices=subjects, label="์ฃผ์ œ ์„ ํƒ"),
59
  outputs="text",
60
+ title="Llama 3์„ ์ด์šฉํ•œ KMMLU ํ…Œ์ŠคํŠธ",
61
  description="์„ ํƒํ•œ ์ฃผ์ œ์— ๋Œ€ํ•ด KMMLU ํ…Œ์ŠคํŠธ๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค."
62
  )
63