stalyn314 commited on
Commit
bf6140a
·
verified ·
1 Parent(s): 6670c52

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -19
app.py CHANGED
@@ -55,27 +55,28 @@ for model in models:
55
  model["cfg"].MODEL.DEVICE = "cpu"
56
 
57
 
58
- def inference(images, min_score, model_name):
59
- results = []
60
- for image in images:
61
- # Si la imagen es de tipo PIL, conviértela a NumPy
62
- if isinstance(image, np.ndarray):
63
- im = image[:, :, ::-1] # Convertir de RGB a BGR
64
- else:
65
- im = np.array(image)[:, :, ::-1] # Si es PIL, convertir a NumPy y cambiar a BGR
 
66
 
67
- model_id = model_name_to_id[model_name]
68
 
69
- models[model_id]["cfg"].MODEL.ROI_HEADS.SCORE_THRESH_TEST = min_score
70
- predictor = DefaultPredictor(models[model_id]["cfg"])
71
 
72
- outputs = predictor(im)
73
 
74
- v = Visualizer(im, models[model_id]["metadata"], scale=1.2)
75
- out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
 
 
76
 
77
- results.append(out.get_image())
78
- return results
79
 
80
  title = "# DBMDZ Detectron2 Model Demo"
81
  description = """
@@ -90,18 +91,21 @@ with gr.Blocks() as demo:
90
  gr.Markdown(title)
91
  gr.Markdown(description)
92
 
 
 
 
93
  with gr.Tab("From Image"):
94
- image_input = gr.Image(type="numpy", label="Input Image")
95
 
96
  min_score = gr.Slider(minimum=0.0, maximum=1.0, value=0.5, label="Minimum score")
97
 
98
  model_name = gr.Radio(choices=[model["name"] for model in models], value=models[0]["name"], label="Select Detectron2 model")
99
 
100
- output_gallery = gr.Gallery(label="Output Images")
101
 
102
  inference_button = gr.Button("Submit")
103
 
104
- inference_button.click(fn=inference, inputs=[image_input, min_score, model_name], outputs=output_gallery)
105
 
106
  gr.Markdown(footer)
107
 
 
55
  model["cfg"].MODEL.DEVICE = "cpu"
56
 
57
 
58
+ def inference(image_url, image, min_score, model_name):
59
+ if image_url:
60
+ r = requests.get(image_url)
61
+ if r:
62
+ im = np.frombuffer(r.content, dtype="uint8")
63
+ im = cv2.imdecode(im, cv2.IMREAD_COLOR_BGR2RGB)
64
+ else:
65
+ # Model expect BGR!
66
+ im = image[:,:,::-1]
67
 
68
+ model_id = model_name_to_id[model_name]
69
 
70
+ models[model_id]["cfg"].MODEL.ROI_HEADS.SCORE_THRESH_TEST = min_score
71
+ predictor = DefaultPredictor(models[model_id]["cfg"])
72
 
73
+ outputs = predictor(im)
74
 
75
+ v = Visualizer(im, models[model_id]["metadata"], scale=1.2)
76
+ out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
77
+
78
+ return out.get_image()
79
 
 
 
80
 
81
  title = "# DBMDZ Detectron2 Model Demo"
82
  description = """
 
91
  gr.Markdown(title)
92
  gr.Markdown(description)
93
 
94
+ with gr.Tab("From URL"):
95
+ url_input = gr.Textbox(label="Image URL", placeholder="https://api.digitale-sammlungen.de/iiif/image/v2/bsb10483966_00008/full/500,/0/default.jpg")
96
+
97
  with gr.Tab("From Image"):
98
+ image_input = gr.Gallery(type="numpy", label="Input Image")
99
 
100
  min_score = gr.Slider(minimum=0.0, maximum=1.0, value=0.5, label="Minimum score")
101
 
102
  model_name = gr.Radio(choices=[model["name"] for model in models], value=models[0]["name"], label="Select Detectron2 model")
103
 
104
+ output_image = gr.Gallery(type="pil", label="Output")
105
 
106
  inference_button = gr.Button("Submit")
107
 
108
+ inference_button.click(fn=inference, inputs=[url_input, image_input, min_score, model_name], outputs=output_image)
109
 
110
  gr.Markdown(footer)
111