Spaces:
Running
on
Zero
Running
on
Zero
Update sam2_mask.py
Browse files- sam2_mask.py +31 -35
sam2_mask.py
CHANGED
@@ -111,8 +111,8 @@ def show_masks(image, masks, scores, point_coords=None, box_coords=None, input_l
|
|
111 |
@spaces.GPU()
|
112 |
def sam_process(original_image, points, labels):
|
113 |
|
114 |
-
print(f"Points: {points}")
|
115 |
-
print(f"Labels: {labels}")
|
116 |
image = Image.open(original_image)
|
117 |
image = np.array(image.convert("RGB"))
|
118 |
|
@@ -120,7 +120,7 @@ def sam_process(original_image, points, labels):
|
|
120 |
print("No points or labels provided, returning None")
|
121 |
return None
|
122 |
# Convert image to numpy array for SAM2 processing
|
123 |
-
image = np.array(original_image)
|
124 |
predictor = SAM2ImagePredictor.from_pretrained("facebook/sam2.1-hiera-large")
|
125 |
predictor.set_image(image)
|
126 |
input_point = np.array(points.value)
|
@@ -166,38 +166,34 @@ def create_sam2_tab():
|
|
166 |
with gr.Column():
|
167 |
output_image = gr.Image("Segmented Output")
|
168 |
output_result_mask = gr.Image()
|
169 |
-
|
170 |
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
)
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
points_map
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
inputs=[sam_input_image, tracking_points, trackings_input_label],
|
200 |
-
outputs = [output_image, output_result_mask]
|
201 |
-
)
|
202 |
|
203 |
return sam_input_image, points_map, output_image
|
|
|
111 |
@spaces.GPU()
|
112 |
def sam_process(original_image, points, labels):
|
113 |
|
114 |
+
print(f"Points: {points.value}")
|
115 |
+
print(f"Labels: {labels.value}")
|
116 |
image = Image.open(original_image)
|
117 |
image = np.array(image.convert("RGB"))
|
118 |
|
|
|
120 |
print("No points or labels provided, returning None")
|
121 |
return None
|
122 |
# Convert image to numpy array for SAM2 processing
|
123 |
+
# image = np.array(original_image)
|
124 |
predictor = SAM2ImagePredictor.from_pretrained("facebook/sam2.1-hiera-large")
|
125 |
predictor.set_image(image)
|
126 |
input_point = np.array(points.value)
|
|
|
166 |
with gr.Column():
|
167 |
output_image = gr.Image("Segmented Output")
|
168 |
output_result_mask = gr.Image()
|
|
|
169 |
|
170 |
+
# Event handlers
|
171 |
+
points_map.upload(
|
172 |
+
fn = preprocess_image,
|
173 |
+
inputs = [points_map],
|
174 |
+
outputs=[sam_input_image, first_frame, tracking_points, trackings_input_label],
|
175 |
+
# outputs = [first_frame_path, tracking_points, trackings_input_label, input_image]
|
176 |
+
queue=False
|
177 |
+
)
|
178 |
+
|
179 |
+
clear_button.click(
|
180 |
+
lambda img: ([], [], img),
|
181 |
+
inputs=first_frame,
|
182 |
+
outputs=[tracking_points, trackings_input_label, points_map],
|
183 |
+
queue=False
|
184 |
+
)
|
185 |
+
|
186 |
+
points_map.select(
|
187 |
+
get_point,
|
188 |
+
inputs=[point_type, tracking_points, trackings_input_label, first_frame],
|
189 |
+
outputs=[tracking_points, trackings_input_label, points_map],
|
190 |
+
queue = False
|
191 |
+
)
|
192 |
+
|
193 |
+
submit_button.click(
|
194 |
+
sam_process,
|
195 |
+
inputs=[sam_input_image, tracking_points, trackings_input_label],
|
196 |
+
outputs = [output_image, output_result_mask]
|
197 |
+
)
|
|
|
|
|
|
|
198 |
|
199 |
return sam_input_image, points_map, output_image
|