File size: 519 Bytes
faabbd4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
from transformers import AutoModelForCausalLM, AutoTokenizer
from huggingface_hub import HfApi, HfFolder
import torch
import os
import glob
from tqdm import tqdm

output_dir = "./checkpoint-200"
model = AutoModelForCausalLM.from_pretrained(output_dir, torch_dtype=torch.bfloat16)
print(model)
tokenizer = AutoTokenizer.from_pretrained(output_dir)

# # Save the updated model and tokenizer locally
model.push_to_hub("homebrewltd/Ichigo-Qwen2.5-32B-s-base")
tokenizer.push_to_hub("homebrewltd/Ichigo-Qwen2.5-32B-s-base")