wabang commited on
Commit
a0333c0
·
verified ·
1 Parent(s): 6406ce3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -16,7 +16,7 @@ else:
16
  print("HF_TOKEN 환경 변수가 설정되지 않았습니다.")
17
 
18
  # model, tokenizer 셋팅
19
- model_name = "meta-llama/Llama-2-7b-chat-hf"
20
  tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
21
  model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto", token=hf_token)
22
 
@@ -66,7 +66,7 @@ iface = gr.Interface(
66
  fn=run_kmmlu_test,
67
  inputs=gr.Dropdown(choices=subjects, label="주제 선택"),
68
  outputs="text",
69
- title="Llama 2를 이용한 KMMLU 테스트",
70
  description="선택한 주제에 대해 KMMLU 테스트를 실행합니다."
71
  )
72
 
 
16
  print("HF_TOKEN 환경 변수가 설정되지 않았습니다.")
17
 
18
  # model, tokenizer 셋팅
19
+ model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
20
  tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
21
  model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto", token=hf_token)
22
 
 
66
  fn=run_kmmlu_test,
67
  inputs=gr.Dropdown(choices=subjects, label="주제 선택"),
68
  outputs="text",
69
+ title="Llama 3를 이용한 KMMLU 테스트",
70
  description="선택한 주제에 대해 KMMLU 테스트를 실행합니다."
71
  )
72