try to use 1.3b version instead 7b
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ from deepseek_vl.models import VLChatProcessor, MultiModalityCausalLM
|
|
3 |
from deepseek_vl.utils.io import load_pil_images
|
4 |
import torch
|
5 |
|
6 |
-
model_path = "deepseek-ai/deepseek-vl-
|
7 |
vl_chat_processor = VLChatProcessor.from_pretrained(model_path)
|
8 |
tokenizer = vl_chat_processor.tokenizer
|
9 |
vl_gpt = MultiModalityCausalLM.from_pretrained(model_path, trust_remote_code=True).to("cpu")
|
|
|
3 |
from deepseek_vl.utils.io import load_pil_images
|
4 |
import torch
|
5 |
|
6 |
+
model_path = "deepseek-ai/deepseek-vl-1.3b-chat"
|
7 |
vl_chat_processor = VLChatProcessor.from_pretrained(model_path)
|
8 |
tokenizer = vl_chat_processor.tokenizer
|
9 |
vl_gpt = MultiModalityCausalLM.from_pretrained(model_path, trust_remote_code=True).to("cpu")
|