Spaces:
Paused
Paused
update tokenizer
Browse files- app_dialogue.py +9 -1
app_dialogue.py
CHANGED
@@ -26,18 +26,26 @@ from transformers import AutoImageProcessor, TextIteratorStreamer
|
|
26 |
from transformers import AutoModelForVision2Seq
|
27 |
|
28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
DEVICE = torch.device("cuda")
|
30 |
MODELS = {
|
31 |
"xgen-mm-phi3-mini-instruct-interleave-r-v1.5": AutoModelForVision2Seq.from_pretrained(
|
32 |
"Salesforce/xgen-mm-phi3-mini-instruct-interleave-r-v1.5",
|
33 |
-
torch_dtype=torch.bfloat16,
|
34 |
# _attn_implementation="flash_attention_2",
|
35 |
trust_remote_code=True
|
36 |
).to(DEVICE),
|
37 |
}
|
|
|
38 |
PROCESSOR = AutoImageProcessor.from_pretrained(
|
39 |
"Salesforce/xgen-mm-phi3-mini-instruct-interleave-r-v1.5", trust_remote_code=True
|
40 |
)
|
|
|
41 |
|
42 |
SYSTEM_PROMPT = [
|
43 |
{
|
|
|
26 |
from transformers import AutoModelForVision2Seq
|
27 |
|
28 |
|
29 |
+
# model_name_or_path = "Salesforce/xgen-mm-phi3-mini-instruct-interleave-r-v1.5"
|
30 |
+
# model = AutoModelForVision2Seq.from_pretrained(model_name_or_path, trust_remote_code=True)
|
31 |
+
# tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=True, use_fast=False, legacy=False)
|
32 |
+
# image_processor = AutoImageProcessor.from_pretrained(model_name_or_path, trust_remote_code=True)
|
33 |
+
# tokenizer = model.update_special_tokens(tokenizer)
|
34 |
+
|
35 |
DEVICE = torch.device("cuda")
|
36 |
MODELS = {
|
37 |
"xgen-mm-phi3-mini-instruct-interleave-r-v1.5": AutoModelForVision2Seq.from_pretrained(
|
38 |
"Salesforce/xgen-mm-phi3-mini-instruct-interleave-r-v1.5",
|
39 |
+
# torch_dtype=torch.bfloat16,
|
40 |
# _attn_implementation="flash_attention_2",
|
41 |
trust_remote_code=True
|
42 |
).to(DEVICE),
|
43 |
}
|
44 |
+
TOKENIZER = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=True, use_fast=False, legacy=False)
|
45 |
PROCESSOR = AutoImageProcessor.from_pretrained(
|
46 |
"Salesforce/xgen-mm-phi3-mini-instruct-interleave-r-v1.5", trust_remote_code=True
|
47 |
)
|
48 |
+
TOKENIZER = model.update_special_tokens(TOKENIZER)
|
49 |
|
50 |
SYSTEM_PROMPT = [
|
51 |
{
|