|
|
|
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
import torch |
|
|
|
import argparse |
|
|
|
|
|
def get_args(): |
|
parser = argparse.ArgumentParser() |
|
parser.add_argument("--path", type=str) |
|
parser.add_argument("--hub-name", type=str) |
|
return parser.parse_args() |
|
|
|
|
|
def main(): |
|
args = get_args() |
|
print(f"Args: {args}") |
|
|
|
print(f"Loading tokenizer from path: {args.path}") |
|
tokenizer = AutoTokenizer.from_pretrained(args.path) |
|
print(f"Pushing the tokenizer to the Hub at {args.hub_name}") |
|
tokenizer.push_to_hub(args.hub_name, private=True) |
|
|
|
print(f"Loading model from path: {args.path}") |
|
model = AutoModelForCausalLM.from_pretrained( |
|
args.path, |
|
return_dict=True, |
|
torch_dtype=torch.bfloat16, |
|
device_map="auto", |
|
) |
|
print(f"Pushing the model to the Hub at {args.hub_name}") |
|
model.push_to_hub(args.hub_name, private=True) |
|
|
|
from huggingface_hub import HfApi |
|
|
|
api = HfApi() |
|
for file in ["training_args.bin", "all_results.json", "eval_results.json"]: |
|
try: |
|
api.upload_file( |
|
path_or_fileobj=f"{args.path}/{file}", |
|
path_in_repo=file, |
|
repo_id=args.hub_name, |
|
repo_type="model", |
|
) |
|
except Exception as e: |
|
print(f"Failed to upload {file}: {e}") |
|
|
|
|
|
if __name__ == "__main__": |
|
main() |