stalyn314 commited on
Commit
c4bf26d
·
verified ·
1 Parent(s): a8b50ec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -25
app.py CHANGED
@@ -55,32 +55,30 @@ for model in models:
55
  model["cfg"].MODEL.DEVICE = "cpu"
56
 
57
 
58
- def inference(image_url, image, min_score, model_name):
59
- if image_url:
60
- r = requests.get(image_url)
61
- if r:
62
- im = np.frombuffer(r.content, dtype="uint8")
63
- im = cv2.imdecode(im, cv2.IMREAD_COLOR_BGR2RGB)
64
- else:
65
- # Model expect BGR!
66
- im = image[:,:,::-1]
67
-
68
  model_id = model_name_to_id[model_name]
69
-
70
  models[model_id]["cfg"].MODEL.ROI_HEADS.SCORE_THRESH_TEST = min_score
71
  predictor = DefaultPredictor(models[model_id]["cfg"])
72
 
73
- outputs = predictor(im)
 
 
74
 
75
- v = Visualizer(im, models[model_id]["metadata"], scale=1.2)
76
- out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
77
-
78
- # Convertir la imagen de salida de BGR a RGB
79
- result_image = out.get_image() # Esto sigue estando en BGR
80
- result_image_rgb = result_image[:, :, ::-1] # Convertir BGR a RGB
81
 
82
- return result_image_rgb
 
 
 
 
 
83
 
 
 
 
84
 
85
  title = "# DBMDZ Detectron2 Model Demo"
86
  description = """
@@ -95,21 +93,18 @@ with gr.Blocks() as demo:
95
  gr.Markdown(title)
96
  gr.Markdown(description)
97
 
98
- with gr.Tab("From URL"):
99
- url_input = gr.Textbox(label="Image URL", placeholder="https://api.digitale-sammlungen.de/iiif/image/v2/bsb10483966_00008/full/500,/0/default.jpg")
100
-
101
  with gr.Tab("From Image"):
102
- image_input = gr.Image(type="numpy", label="Input Image")
103
 
104
  min_score = gr.Slider(minimum=0.0, maximum=1.0, value=0.5, label="Minimum score")
105
 
106
  model_name = gr.Radio(choices=[model["name"] for model in models], value=models[0]["name"], label="Select Detectron2 model")
107
 
108
- output_image = gr.Image(type="pil", label="Output")
109
 
110
  inference_button = gr.Button("Submit")
111
 
112
- inference_button.click(fn=inference, inputs=[url_input, image_input, min_score, model_name], outputs=output_image)
113
 
114
  gr.Markdown(footer)
115
 
 
55
  model["cfg"].MODEL.DEVICE = "cpu"
56
 
57
 
58
+ def inference(image, min_score, model_name):
59
+ results = []
60
+
 
 
 
 
 
 
 
61
  model_id = model_name_to_id[model_name]
 
62
  models[model_id]["cfg"].MODEL.ROI_HEADS.SCORE_THRESH_TEST = min_score
63
  predictor = DefaultPredictor(models[model_id]["cfg"])
64
 
65
+ for img in image:
66
+ # Convert image from BGR to RGB if necessary
67
+ im = img[:,:,::-1]
68
 
69
+ # Make the prediction
70
+ outputs = predictor(im)
 
 
 
 
71
 
72
+ v = Visualizer(im, models[model_id]["metadata"], scale=1.2)
73
+ out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
74
+
75
+ # Convert the result from BGR to RGB
76
+ result_image = out.get_image()
77
+ result_image_rgb = result_image[:, :, ::-1] # Convert BGR to RGB
78
 
79
+ results.append(result_image_rgb) # Add the processed image to the list
80
+
81
+ return results # Return all the results
82
 
83
  title = "# DBMDZ Detectron2 Model Demo"
84
  description = """
 
93
  gr.Markdown(title)
94
  gr.Markdown(description)
95
 
 
 
 
96
  with gr.Tab("From Image"):
97
+ image_input = gr.Image(type="numpy", label="Input Images", elem_id="input_image", multiple=True)
98
 
99
  min_score = gr.Slider(minimum=0.0, maximum=1.0, value=0.5, label="Minimum score")
100
 
101
  model_name = gr.Radio(choices=[model["name"] for model in models], value=models[0]["name"], label="Select Detectron2 model")
102
 
103
+ output_gallery = gr.Gallery(label="Output Images", elem_id="output_images").style(grid=2)
104
 
105
  inference_button = gr.Button("Submit")
106
 
107
+ inference_button.click(fn=inference, inputs=[image_input, min_score, model_name], outputs=output_gallery)
108
 
109
  gr.Markdown(footer)
110