Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
5017c3c
1
Parent(s):
cc31752
Removed blip
Browse files
app.py
CHANGED
@@ -43,8 +43,8 @@ from GroundingDINO.groundingdino.util.utils import clean_state_dict, get_phrases
|
|
43 |
# segment anything
|
44 |
from segment_anything import build_sam_vit_l, SamPredictor
|
45 |
|
46 |
-
# BLIP
|
47 |
-
from transformers import BlipProcessor, BlipForConditionalGeneration
|
48 |
|
49 |
# Constants
|
50 |
CONFIG_FILE = 'GroundingDINO/groundingdino/config/GroundingDINO_SwinT_OGC.py'
|
@@ -55,9 +55,7 @@ OUTPUT_DIR = "outputs"
|
|
55 |
# Global variables for model caching
|
56 |
_models = {
|
57 |
'groundingdino': None,
|
58 |
-
'sam_predictor': None
|
59 |
-
'blip_processor': None,
|
60 |
-
'blip_model': None
|
61 |
}
|
62 |
|
63 |
# Enable GPU if available with proper error handling
|
@@ -107,16 +105,16 @@ class ModelManager:
|
|
107 |
|
108 |
logger.info(f"SAM-HQ model loaded in {time.time() - start_time:.2f} seconds")
|
109 |
|
110 |
-
elif model_name == 'blip' and (_models['blip_processor'] is None or _models['blip_model'] is None):
|
111 |
-
|
112 |
-
|
113 |
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
|
119 |
-
|
120 |
|
121 |
except Exception as e:
|
122 |
logger.error(f"Error loading {model_name} model: {e}")
|
|
|
43 |
# segment anything
|
44 |
from segment_anything import build_sam_vit_l, SamPredictor
|
45 |
|
46 |
+
# # BLIP
|
47 |
+
# from transformers import BlipProcessor, BlipForConditionalGeneration
|
48 |
|
49 |
# Constants
|
50 |
CONFIG_FILE = 'GroundingDINO/groundingdino/config/GroundingDINO_SwinT_OGC.py'
|
|
|
55 |
# Global variables for model caching
|
56 |
_models = {
|
57 |
'groundingdino': None,
|
58 |
+
'sam_predictor': None
|
|
|
|
|
59 |
}
|
60 |
|
61 |
# Enable GPU if available with proper error handling
|
|
|
105 |
|
106 |
logger.info(f"SAM-HQ model loaded in {time.time() - start_time:.2f} seconds")
|
107 |
|
108 |
+
# elif model_name == 'blip' and (_models['blip_processor'] is None or _models['blip_model'] is None):
|
109 |
+
# logger.info("Loading BLIP model...")
|
110 |
+
# start_time = time.time()
|
111 |
|
112 |
+
# _models['blip_processor'] = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-large")
|
113 |
+
# _models['blip_model'] = BlipForConditionalGeneration.from_pretrained(
|
114 |
+
# "Salesforce/blip-image-captioning-large", torch_dtype=torch.float16
|
115 |
+
# ).to(device)
|
116 |
|
117 |
+
# logger.info(f"BLIP model loaded in {time.time() - start_time:.2f} seconds")
|
118 |
|
119 |
except Exception as e:
|
120 |
logger.error(f"Error loading {model_name} model: {e}")
|