Update app.py
Browse files
app.py
CHANGED
@@ -50,7 +50,7 @@ DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'
|
|
50 |
DEFAULT_MODEL_PATH = "Gen-Verse/MMaDA-8B-Base" # Default
|
51 |
MASK_ID = 126336
|
52 |
MODEL = MMadaModelLM.from_pretrained(DEFAULT_MODEL_PATH, trust_remote_code=True, torch_dtype=torch.bfloat16).to(DEVICE).eval()
|
53 |
-
TOKENIZER =
|
54 |
uni_prompting = UniversalPrompting(TOKENIZER, max_text_len=512, special_tokens=("<|soi|>", "<|eoi|>", "<|sov|>", "<|eov|>", "<|t2i|>", "<|mmu|>", "<|t2v|>", "<|v2v|>", "<|lvg|>"),ignore_id=-100, cond_dropout_prob=0.1, use_reserved_token=True)
|
55 |
VQ_MODEL = MAGVITv2().from_pretrained("showlab/magvitv2").to(DEVICE)
|
56 |
|
|
|
50 |
DEFAULT_MODEL_PATH = "Gen-Verse/MMaDA-8B-Base" # Default
|
51 |
MASK_ID = 126336
|
52 |
MODEL = MMadaModelLM.from_pretrained(DEFAULT_MODEL_PATH, trust_remote_code=True, torch_dtype=torch.bfloat16).to(DEVICE).eval()
|
53 |
+
TOKENIZER = AutoTokenizer.from_pretrained(DEFAULT_MODEL_PATH, trust_remote_code=True)
|
54 |
uni_prompting = UniversalPrompting(TOKENIZER, max_text_len=512, special_tokens=("<|soi|>", "<|eoi|>", "<|sov|>", "<|eov|>", "<|t2i|>", "<|mmu|>", "<|t2v|>", "<|v2v|>", "<|lvg|>"),ignore_id=-100, cond_dropout_prob=0.1, use_reserved_token=True)
|
55 |
VQ_MODEL = MAGVITv2().from_pretrained("showlab/magvitv2").to(DEVICE)
|
56 |
|