LPX55 commited on
Commit
3db3041
·
verified ·
1 Parent(s): 45a6514

Update sam2_mask.py

Browse files
Files changed (1) hide show
  1. sam2_mask.py +31 -35
sam2_mask.py CHANGED
@@ -111,8 +111,8 @@ def show_masks(image, masks, scores, point_coords=None, box_coords=None, input_l
111
  @spaces.GPU()
112
  def sam_process(original_image, points, labels):
113
 
114
- print(f"Points: {points}")
115
- print(f"Labels: {labels}")
116
  image = Image.open(original_image)
117
  image = np.array(image.convert("RGB"))
118
 
@@ -120,7 +120,7 @@ def sam_process(original_image, points, labels):
120
  print("No points or labels provided, returning None")
121
  return None
122
  # Convert image to numpy array for SAM2 processing
123
- image = np.array(original_image)
124
  predictor = SAM2ImagePredictor.from_pretrained("facebook/sam2.1-hiera-large")
125
  predictor.set_image(image)
126
  input_point = np.array(points.value)
@@ -166,38 +166,34 @@ def create_sam2_tab():
166
  with gr.Column():
167
  output_image = gr.Image("Segmented Output")
168
  output_result_mask = gr.Image()
169
-
170
 
171
-
172
-
173
-
174
- # Event handlers
175
- points_map.upload(
176
- fn = preprocess_image,
177
- inputs = [points_map],
178
- outputs=[sam_input_image, first_frame, tracking_points, trackings_input_label],
179
- # outputs = [first_frame_path, tracking_points, trackings_input_label, input_image]
180
- queue=False
181
- )
182
-
183
- clear_button.click(
184
- lambda img: ([], [], img),
185
- inputs=first_frame,
186
- outputs=[tracking_points, trackings_input_label, points_map],
187
- queue=False
188
- )
189
-
190
- points_map.select(
191
- get_point,
192
- inputs=[point_type, tracking_points, trackings_input_label, first_frame],
193
- outputs=[tracking_points, trackings_input_label, points_map],
194
- queue = False
195
- )
196
-
197
- submit_button.click(
198
- sam_process,
199
- inputs=[sam_input_image, tracking_points, trackings_input_label],
200
- outputs = [output_image, output_result_mask]
201
- )
202
 
203
  return sam_input_image, points_map, output_image
 
111
  @spaces.GPU()
112
  def sam_process(original_image, points, labels):
113
 
114
+ print(f"Points: {points.value}")
115
+ print(f"Labels: {labels.value}")
116
  image = Image.open(original_image)
117
  image = np.array(image.convert("RGB"))
118
 
 
120
  print("No points or labels provided, returning None")
121
  return None
122
  # Convert image to numpy array for SAM2 processing
123
+ # image = np.array(original_image)
124
  predictor = SAM2ImagePredictor.from_pretrained("facebook/sam2.1-hiera-large")
125
  predictor.set_image(image)
126
  input_point = np.array(points.value)
 
166
  with gr.Column():
167
  output_image = gr.Image("Segmented Output")
168
  output_result_mask = gr.Image()
 
169
 
170
+ # Event handlers
171
+ points_map.upload(
172
+ fn = preprocess_image,
173
+ inputs = [points_map],
174
+ outputs=[sam_input_image, first_frame, tracking_points, trackings_input_label],
175
+ # outputs = [first_frame_path, tracking_points, trackings_input_label, input_image]
176
+ queue=False
177
+ )
178
+
179
+ clear_button.click(
180
+ lambda img: ([], [], img),
181
+ inputs=first_frame,
182
+ outputs=[tracking_points, trackings_input_label, points_map],
183
+ queue=False
184
+ )
185
+
186
+ points_map.select(
187
+ get_point,
188
+ inputs=[point_type, tracking_points, trackings_input_label, first_frame],
189
+ outputs=[tracking_points, trackings_input_label, points_map],
190
+ queue = False
191
+ )
192
+
193
+ submit_button.click(
194
+ sam_process,
195
+ inputs=[sam_input_image, tracking_points, trackings_input_label],
196
+ outputs = [output_image, output_result_mask]
197
+ )
 
 
 
198
 
199
  return sam_input_image, points_map, output_image