aliabd HF Staff commited on
Commit
35826c6
·
1 Parent(s): 004cb04

Upload with huggingface_hub

Browse files
Files changed (4) hide show
  1. README.md +6 -7
  2. requirements.txt +2 -0
  3. run.ipynb +1 -0
  4. run.py +45 -0
README.md CHANGED
@@ -1,12 +1,11 @@
 
1
  ---
2
- title: Image Selections Main
3
- emoji:
4
  colorFrom: indigo
5
- colorTo: blue
6
  sdk: gradio
7
- sdk_version: 3.24.1
8
- app_file: app.py
9
  pinned: false
10
  ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+
2
  ---
3
+ title: image_selections_main
4
+ emoji: 🔥
5
  colorFrom: indigo
6
+ colorTo: indigo
7
  sdk: gradio
8
+ sdk_version: 3.25.1b1
9
+ app_file: run.py
10
  pinned: false
11
  ---
 
 
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+
2
+ https://gradio-main-build.s3.amazonaws.com/9682bc82db93f33acb68c41d37d4fdd3c80176e8/gradio-3.25.1b1-py3-none-any.whl
run.ipynb ADDED
@@ -0,0 +1 @@
 
 
1
+ {"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: image_selections"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import numpy as np\n", "\n", "with gr.Blocks() as demo:\n", " tolerance = gr.Slider(label=\"Tolerance\", info=\"How different colors can be in a segment.\", minimum=0, maximum=256*3, value=50)\n", " with gr.Row():\n", " input_img = gr.Image(label=\"Input\")\n", " output_img = gr.Image(label=\"Selected Segment\")\n", "\n", " def get_select_coords(img, tolerance, evt: gr.SelectData):\n", " visited_pixels = set()\n", " pixels_in_queue = set()\n", " pixels_in_segment = set()\n", " start_pixel = img[evt.index[1], evt.index[0]]\n", " pixels_in_queue.add((evt.index[1], evt.index[0]))\n", " while len(pixels_in_queue) > 0:\n", " pixel = pixels_in_queue.pop()\n", " visited_pixels.add(pixel)\n", " neighbors = []\n", " if pixel[0] > 0:\n", " neighbors.append((pixel[0] - 1, pixel[1]))\n", " if pixel[0] < img.shape[0] - 1:\n", " neighbors.append((pixel[0] + 1, pixel[1]))\n", " if pixel[1] > 0:\n", " neighbors.append((pixel[0], pixel[1] - 1))\n", " if pixel[1] < img.shape[1] - 1:\n", " neighbors.append((pixel[0], pixel[1] + 1))\n", " for neighbor in neighbors:\n", " if neighbor in visited_pixels:\n", " continue\n", " neighbor_pixel = img[neighbor[0], neighbor[1]]\n", " if np.abs(neighbor_pixel - start_pixel).sum() < tolerance:\n", " pixels_in_queue.add(neighbor)\n", " pixels_in_segment.add(neighbor)\n", "\n", " out = img.copy() * 0.2\n", " out = out.astype(np.uint8)\n", " for pixel in pixels_in_segment:\n", " out[pixel[0], pixel[1]] = img[pixel[0], pixel[1]]\n", " return out\n", " \n", " input_img.select(get_select_coords, [input_img, tolerance], output_img)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
run.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import numpy as np
3
+
4
+ with gr.Blocks() as demo:
5
+ tolerance = gr.Slider(label="Tolerance", info="How different colors can be in a segment.", minimum=0, maximum=256*3, value=50)
6
+ with gr.Row():
7
+ input_img = gr.Image(label="Input")
8
+ output_img = gr.Image(label="Selected Segment")
9
+
10
+ def get_select_coords(img, tolerance, evt: gr.SelectData):
11
+ visited_pixels = set()
12
+ pixels_in_queue = set()
13
+ pixels_in_segment = set()
14
+ start_pixel = img[evt.index[1], evt.index[0]]
15
+ pixels_in_queue.add((evt.index[1], evt.index[0]))
16
+ while len(pixels_in_queue) > 0:
17
+ pixel = pixels_in_queue.pop()
18
+ visited_pixels.add(pixel)
19
+ neighbors = []
20
+ if pixel[0] > 0:
21
+ neighbors.append((pixel[0] - 1, pixel[1]))
22
+ if pixel[0] < img.shape[0] - 1:
23
+ neighbors.append((pixel[0] + 1, pixel[1]))
24
+ if pixel[1] > 0:
25
+ neighbors.append((pixel[0], pixel[1] - 1))
26
+ if pixel[1] < img.shape[1] - 1:
27
+ neighbors.append((pixel[0], pixel[1] + 1))
28
+ for neighbor in neighbors:
29
+ if neighbor in visited_pixels:
30
+ continue
31
+ neighbor_pixel = img[neighbor[0], neighbor[1]]
32
+ if np.abs(neighbor_pixel - start_pixel).sum() < tolerance:
33
+ pixels_in_queue.add(neighbor)
34
+ pixels_in_segment.add(neighbor)
35
+
36
+ out = img.copy() * 0.2
37
+ out = out.astype(np.uint8)
38
+ for pixel in pixels_in_segment:
39
+ out[pixel[0], pixel[1]] = img[pixel[0], pixel[1]]
40
+ return out
41
+
42
+ input_img.select(get_select_coords, [input_img, tolerance], output_img)
43
+
44
+ if __name__ == "__main__":
45
+ demo.launch()