app.py
CHANGED
@@ -1,11 +1,11 @@
|
|
1 |
import gradio as gr
|
2 |
from PIL import Image
|
3 |
import torch
|
4 |
-
from transformers import
|
5 |
|
6 |
-
# Load
|
7 |
-
processor =
|
8 |
-
model =
|
9 |
|
10 |
# Use CPU explicitly
|
11 |
device = torch.device("cpu")
|
|
|
1 |
import gradio as gr
|
2 |
from PIL import Image
|
3 |
import torch
|
4 |
+
from transformers import BlipProcessor, BlipForQuestionAnswering
|
5 |
|
6 |
+
# Load processor and small BLIP VQA model
|
7 |
+
processor = BlipProcessor.from_pretrained("Salesforce/blip-vqa-base")
|
8 |
+
model = BlipForQuestionAnswering.from_pretrained("Salesforce/blip-vqa-base")
|
9 |
|
10 |
# Use CPU explicitly
|
11 |
device = torch.device("cpu")
|