ziwaixian009 commited on
Commit
c8ccf65
·
verified ·
1 Parent(s): 88e0950

Update dam/model/language_model/builder.py

Browse files
dam/model/language_model/builder.py CHANGED
@@ -73,9 +73,12 @@ def build_llm_and_tokenizer(
73
  llm_cfg.model_max_length = model_max_length
74
  if model_max_length is not None:
75
  context_length_extension(llm_cfg)
76
-
77
  llm = AutoModelForCausalLM.from_pretrained(
78
- model_name_or_path, config=llm_cfg, torch_dtype=eval(config.model_dtype), *args, **kwargs
 
 
 
79
  )
80
 
81
  llm_path = model_name_or_path
 
73
  llm_cfg.model_max_length = model_max_length
74
  if model_max_length is not None:
75
  context_length_extension(llm_cfg)
76
+ # model_name_or_path, config=llm_cfg, torch_dtype=eval(config.model_dtype), *args, **kwargs
77
  llm = AutoModelForCausalLM.from_pretrained(
78
+
79
+ llm_cfg["model_name_or_path"],
80
+ device_map="cpu",
81
+ torch_dtype=torch.float32 # 避免使用 fp16 权重导致报错
82
  )
83
 
84
  llm_path = model_name_or_path