from transformers import AutoModelForCausalLM, AutoTokenizer | |
from huggingface_hub import HfApi, HfFolder | |
import torch | |
import os | |
import glob | |
from tqdm import tqdm | |
output_dir = "./checkpoint-200" | |
model = AutoModelForCausalLM.from_pretrained(output_dir, torch_dtype=torch.bfloat16) | |
print(model) | |
tokenizer = AutoTokenizer.from_pretrained(output_dir) | |
# # Save the updated model and tokenizer locally | |
model.push_to_hub("homebrewltd/Ichigo-Qwen2.5-32B-s-base") | |
tokenizer.push_to_hub("homebrewltd/Ichigo-Qwen2.5-32B-s-base") | |