Commit
·
bd34ae9
0
Parent(s):
Duplicate from verkaDerkaDerk/face-image-to-face-obj
Browse files- .gitattributes +35 -0
- README.md +17 -0
- app.py +339 -0
- examples/blonde-00081-399357008.png +0 -0
- examples/character.png +0 -0
- examples/converted/README.md +5 -0
- examples/converted/blonde.mtl +3 -0
- examples/converted/blonde.obj +0 -0
- examples/converted/character.mtl +3 -0
- examples/converted/character.obj +0 -0
- examples/converted/converted.blend.gz +3 -0
- examples/converted/dude.mtl +3 -0
- examples/converted/dude.obj +0 -0
- examples/converted/granny.mtl +3 -0
- examples/converted/granny.obj +0 -0
- examples/converted/in-blonde.obj +0 -0
- examples/converted/in-character.obj +0 -0
- examples/converted/in-dude.obj +0 -0
- examples/converted/in-granny.obj +0 -0
- examples/converted/in-tuffie.obj +0 -0
- examples/converted/movie-gallery.mp4 +0 -0
- examples/converted/tuffie.mtl +3 -0
- examples/converted/tuffie.obj +0 -0
- examples/dude-00110-1227390728.png +0 -0
- examples/granny-00056-1867315302.png +0 -0
- examples/tuffie-00039-499759385.png +0 -0
- meshin-around.sh +37 -0
- quads.py +90 -0
- requirements.txt +25 -0
- utils.py +128 -0
.gitattributes
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: Face Image to Face Quad Mesh
|
3 |
+
emoji: 🐢
|
4 |
+
colorFrom: red
|
5 |
+
colorTo: pink
|
6 |
+
sdk: gradio
|
7 |
+
sdk_version: 3.35.2
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
duplicated_from: verkaDerkaDerk/face-image-to-face-obj
|
11 |
+
---
|
12 |
+
|
13 |
+
Uses MediaPipe to detect a face in an image and convert it to a (mostly) quad mesh.
|
14 |
+
Currently saves to OBJ, hopefully glb at some point with color data.
|
15 |
+
The 3d viewer has Y pointing the opposite direction from Blender, so ya hafta spin it.
|
16 |
+
|
17 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,339 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
########################################################################################
|
2 |
+
import gradio as gr
|
3 |
+
|
4 |
+
import cv2
|
5 |
+
import matplotlib
|
6 |
+
import matplotlib.cm
|
7 |
+
import mediapipe as mp
|
8 |
+
import numpy as np
|
9 |
+
import os
|
10 |
+
import struct
|
11 |
+
import tempfile
|
12 |
+
import torch
|
13 |
+
|
14 |
+
from mediapipe.framework.formats import landmark_pb2
|
15 |
+
from mediapipe.python.solutions.drawing_utils import _normalized_to_pixel_coordinates
|
16 |
+
from PIL import Image
|
17 |
+
from quads import QUADS
|
18 |
+
from typing import List, Mapping, Optional, Tuple, Union
|
19 |
+
from utils import colorize, get_most_recent_subdirectory
|
20 |
+
|
21 |
+
class face_image_to_face_mesh:
|
22 |
+
def __init__(self):
|
23 |
+
self.zoe_me = True
|
24 |
+
self.uvwrap = not True
|
25 |
+
|
26 |
+
def demo(self):
|
27 |
+
if self.zoe_me:
|
28 |
+
DEVICE = 'cuda' if torch.cuda.is_available() else 'cpu'
|
29 |
+
self.zoe = torch.hub.load('isl-org/ZoeDepth', "ZoeD_N", pretrained=True).to(DEVICE).eval()
|
30 |
+
|
31 |
+
demo = gr.Blocks(css=self.css(), cache_examples=True)
|
32 |
+
with demo:
|
33 |
+
gr.Markdown(self.header())
|
34 |
+
|
35 |
+
with gr.Row():
|
36 |
+
with gr.Column():
|
37 |
+
upload_image = gr.Image(label="Input image", type="numpy", source="upload")
|
38 |
+
self.temp_dir = get_most_recent_subdirectory( upload_image.DEFAULT_TEMP_DIR )
|
39 |
+
print( f'The temp_dir is {self.temp_dir}' )
|
40 |
+
|
41 |
+
gr.Examples( examples=[
|
42 |
+
'examples/blonde-00081-399357008.png',
|
43 |
+
'examples/dude-00110-1227390728.png',
|
44 |
+
'examples/granny-00056-1867315302.png',
|
45 |
+
'examples/tuffie-00039-499759385.png',
|
46 |
+
'examples/character.png',
|
47 |
+
], inputs=[upload_image] )
|
48 |
+
upload_image_btn = gr.Button(value="Detect faces")
|
49 |
+
if self.zoe_me:
|
50 |
+
with gr.Group():
|
51 |
+
zoe_scale = gr.Slider(label="Mix the ZoeDepth with the MediaPipe Depth", value=1, minimum=0, maximum=1, step=.01)
|
52 |
+
flat_scale = gr.Slider(label="Depth scale, smaller is flatter and possibly more flattering", value=1, minimum=0, maximum=1, step=.01)
|
53 |
+
min_detection_confidence = gr.Slider(label="Mininum face detection confidence", value=.5, minimum=0, maximum=1.0, step=0.01)
|
54 |
+
else:
|
55 |
+
use_zoe = False
|
56 |
+
zoe_scale = 0
|
57 |
+
with gr.Group():
|
58 |
+
gr.Markdown(self.footer())
|
59 |
+
|
60 |
+
with gr.Column():
|
61 |
+
with gr.Group():
|
62 |
+
output_mesh = gr.Model3D(clear_color=3*[0], label="3D Model",elem_id='mesh-display-output')
|
63 |
+
output_image = gr.Image(label="Output image",elem_id='img-display-output')
|
64 |
+
depth_image = gr.Image(label="Depth image",elem_id='img-display-output')
|
65 |
+
num_faces_detected = gr.Number(label="Number of faces detected", value=0)
|
66 |
+
|
67 |
+
upload_image_btn.click(
|
68 |
+
fn=self.detect,
|
69 |
+
inputs=[upload_image, min_detection_confidence,zoe_scale,flat_scale],
|
70 |
+
outputs=[output_mesh, output_image, depth_image, num_faces_detected]
|
71 |
+
)
|
72 |
+
demo.launch()
|
73 |
+
|
74 |
+
|
75 |
+
def detect(self, image, min_detection_confidence, zoe_scale, flat_scale):
|
76 |
+
width = image.shape[1]
|
77 |
+
height = image.shape[0]
|
78 |
+
ratio = width / height
|
79 |
+
|
80 |
+
mp_drawing = mp.solutions.drawing_utils
|
81 |
+
mp_drawing_styles = mp.solutions.drawing_styles
|
82 |
+
mp_face_mesh = mp.solutions.face_mesh
|
83 |
+
|
84 |
+
mesh = "examples/converted/in-granny.obj"
|
85 |
+
|
86 |
+
if self.zoe_me and 0 < zoe_scale:
|
87 |
+
depth = self.zoe.infer_pil(image)
|
88 |
+
idepth = colorize(depth, cmap='gray_r')
|
89 |
+
else:
|
90 |
+
depth = None
|
91 |
+
idepth = image
|
92 |
+
|
93 |
+
drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1)
|
94 |
+
with mp_face_mesh.FaceMesh(
|
95 |
+
static_image_mode=True,
|
96 |
+
max_num_faces=1,
|
97 |
+
min_detection_confidence=min_detection_confidence) as face_mesh:
|
98 |
+
results = face_mesh.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
|
99 |
+
if not results.multi_face_landmarks:
|
100 |
+
return mesh, image, idepth, 0
|
101 |
+
|
102 |
+
annotated_image = image.copy()
|
103 |
+
for face_landmarks in results.multi_face_landmarks:
|
104 |
+
(mesh,mtl,png) = self.toObj(image=image, width=width, height=height, ratio=ratio, landmark_list=face_landmarks, depth=depth, zoe_scale=zoe_scale, flat_scale=flat_scale)
|
105 |
+
|
106 |
+
mp_drawing.draw_landmarks(
|
107 |
+
image=annotated_image,
|
108 |
+
landmark_list=face_landmarks,
|
109 |
+
connections=mp_face_mesh.FACEMESH_TESSELATION,
|
110 |
+
landmark_drawing_spec=None,
|
111 |
+
connection_drawing_spec=mp_drawing_styles
|
112 |
+
.get_default_face_mesh_tesselation_style())
|
113 |
+
mp_drawing.draw_landmarks(
|
114 |
+
image=annotated_image,
|
115 |
+
landmark_list=face_landmarks,
|
116 |
+
connections=mp_face_mesh.FACEMESH_CONTOURS,
|
117 |
+
landmark_drawing_spec=None,
|
118 |
+
connection_drawing_spec=mp_drawing_styles
|
119 |
+
.get_default_face_mesh_contours_style())
|
120 |
+
|
121 |
+
return mesh, annotated_image, idepth, 1
|
122 |
+
|
123 |
+
def toObj( self, image: np.ndarray, width:int, height:int, ratio: float, landmark_list: landmark_pb2.NormalizedLandmarkList, depth: np.ndarray, zoe_scale: float, flat_scale: float):
|
124 |
+
print( f'you have such pretty hair', self.temp_dir )
|
125 |
+
|
126 |
+
hf_hack = True
|
127 |
+
if hf_hack:
|
128 |
+
obj_file = tempfile.NamedTemporaryFile(suffix='.obj', delete=False)
|
129 |
+
mtl_file = tempfile.NamedTemporaryFile(suffix='.mtl', delete=False)
|
130 |
+
png_file = tempfile.NamedTemporaryFile(suffix='.png', delete=False)
|
131 |
+
else:
|
132 |
+
obj_file = tempfile.NamedTemporaryFile(suffix='.obj', dir=self.temp_dir, delete=False)
|
133 |
+
mtl_file = tempfile.NamedTemporaryFile(suffix='.mtl', dir=self.temp_dir, delete=False)
|
134 |
+
png_file = tempfile.NamedTemporaryFile(suffix='.png', dir=self.temp_dir, delete=False)
|
135 |
+
|
136 |
+
############################################
|
137 |
+
(points,coordinates,colors) = self.landmarksToPoints( image, width, height, ratio, landmark_list, depth, zoe_scale, flat_scale )
|
138 |
+
############################################
|
139 |
+
|
140 |
+
lines = []
|
141 |
+
|
142 |
+
lines.append( f'o MyMesh' )
|
143 |
+
|
144 |
+
if hf_hack:
|
145 |
+
# the 'file=' is a gradio hack
|
146 |
+
lines.append( f'#mtllib file={mtl_file.name}' )
|
147 |
+
else:
|
148 |
+
# the 'file=' is a gradio hack
|
149 |
+
lines.append( f'mtllib file={mtl_file.name}' )
|
150 |
+
|
151 |
+
for index, point in enumerate(points):
|
152 |
+
color = colors[index]
|
153 |
+
scaled_color = [value / 255 for value in color] # Scale colors down to 0-1 range
|
154 |
+
flipped = [-value for value in point]
|
155 |
+
flipped[ 0 ] = -flipped[ 0 ]
|
156 |
+
lines.append( "v " + " ".join(map(str, flipped + color)) )
|
157 |
+
|
158 |
+
for coordinate in coordinates:
|
159 |
+
lines.append( "vt " + " ".join([str(value) for value in coordinate]) )
|
160 |
+
|
161 |
+
for quad in QUADS:
|
162 |
+
#quad = list(reversed(quad))
|
163 |
+
normal = self.totallyNormal( points[ quad[ 0 ] -1 ], points[ quad[ 1 ] -1 ], points[ quad[ 2 ] -1 ] )
|
164 |
+
lines.append( "vn " + " ".join([str(value) for value in normal]) )
|
165 |
+
|
166 |
+
lines.append( 'usemtl MyMaterial' )
|
167 |
+
|
168 |
+
quadIndex = 0
|
169 |
+
for quad in QUADS:
|
170 |
+
quadIndex = 1 + quadIndex
|
171 |
+
face_uv = "f " + " ".join([f'{vertex}/{vertex}/{quadIndex}' for vertex in quad])
|
172 |
+
face_un = "f " + " ".join([str(vertex) for vertex in quad])
|
173 |
+
if self.uvwrap:
|
174 |
+
lines.append( face_uv )
|
175 |
+
else:
|
176 |
+
lines.append( f'#{face_uv}' )
|
177 |
+
lines.append( f'{face_un}' )
|
178 |
+
#"f " + " ".join([str(vertex) for vertex in quad]) )
|
179 |
+
|
180 |
+
out = open( obj_file.name, 'w' )
|
181 |
+
out.write( '\n'.join( lines ) + '\n' )
|
182 |
+
out.close()
|
183 |
+
|
184 |
+
############################################
|
185 |
+
|
186 |
+
lines = []
|
187 |
+
lines.append( 'newmtl MyMaterial' )
|
188 |
+
lines.append( f'map_Kd file={png_file.name}' ) # the 'file=' is a gradio hack
|
189 |
+
|
190 |
+
out = open( mtl_file.name, 'w' )
|
191 |
+
out.write( '\n'.join( lines ) + '\n' )
|
192 |
+
out.close()
|
193 |
+
|
194 |
+
############################################
|
195 |
+
|
196 |
+
cv2.imwrite(png_file.name, cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
|
197 |
+
|
198 |
+
############################################
|
199 |
+
|
200 |
+
print( f'I know it is special to you so I saved it to {obj_file.name} since we are friends' )
|
201 |
+
return (obj_file.name,mtl_file.name,png_file.name)
|
202 |
+
|
203 |
+
def landmarksToPoints( self, image:np.ndarray, width: int, height: int, ratio: float, landmark_list: landmark_pb2.NormalizedLandmarkList, depth: np.ndarray, zoe_scale: float, flat_scale: float ):
|
204 |
+
points = [] # 3d vertices
|
205 |
+
coordinates = [] # 2d texture coordinates
|
206 |
+
colors = [] # 3d rgb info
|
207 |
+
|
208 |
+
mins = [+np.inf] * 3
|
209 |
+
maxs = [-np.inf] * 3
|
210 |
+
|
211 |
+
mp_scale = 1 - zoe_scale
|
212 |
+
print( f'zoe_scale:{zoe_scale}, mp_scale:{mp_scale}' )
|
213 |
+
|
214 |
+
for idx, landmark in enumerate(landmark_list.landmark):
|
215 |
+
x, y = _normalized_to_pixel_coordinates(landmark.x,landmark.y,width,height)
|
216 |
+
color = image[y,x]
|
217 |
+
colors.append( [value / 255 for value in color ] )
|
218 |
+
coordinates.append( [x/width,1-y/height] )
|
219 |
+
|
220 |
+
if depth is not None:
|
221 |
+
landmark.z = depth[y, x] * zoe_scale + mp_scale * landmark.z
|
222 |
+
|
223 |
+
landmark.z = landmark.z * flat_scale
|
224 |
+
|
225 |
+
point = [landmark.x * ratio, landmark.y, landmark.z];
|
226 |
+
for pidx,value in enumerate( point ):
|
227 |
+
mins[pidx] = min(mins[pidx],value)
|
228 |
+
maxs[pidx] = max(maxs[pidx],value)
|
229 |
+
points.append( point )
|
230 |
+
|
231 |
+
mids = [(min_val + max_val) / 2 for min_val, max_val in zip(mins, maxs)]
|
232 |
+
for idx,point in enumerate( points ):
|
233 |
+
points[idx] = [(val-mid) for val, mid in zip(point,mids)]
|
234 |
+
|
235 |
+
print( f'mins: {mins}' )
|
236 |
+
print( f'mids: {mids}' )
|
237 |
+
print( f'maxs: {maxs}' )
|
238 |
+
return (points,coordinates,colors)
|
239 |
+
|
240 |
+
|
241 |
+
def totallyNormal(self, p0, p1, p2):
|
242 |
+
v1 = np.array(p1) - np.array(p0)
|
243 |
+
v2 = np.array(p2) - np.array(p0)
|
244 |
+
normal = np.cross(v1, v2)
|
245 |
+
normal = normal / np.linalg.norm(normal)
|
246 |
+
return normal.tolist()
|
247 |
+
|
248 |
+
|
249 |
+
def header(self):
|
250 |
+
return ("""
|
251 |
+
# Image to Quad Mesh
|
252 |
+
|
253 |
+
Uses MediaPipe to detect a face in an image and convert it to a quad mesh.
|
254 |
+
Saves to OBJ since gltf does not support quad faces. The 3d viewer has Y pointing the opposite direction from Blender, so ya hafta spin it.
|
255 |
+
|
256 |
+
The face depth with Zoe can be a bit much and without it is a bit generic. In blender you can fix this just by snapping to the high poly model. For photos turning it down to .4 helps, but may still need cleanup...
|
257 |
+
|
258 |
+
Highly recommend running it locally. The 3D model has uv values in the faces, but you will have to either use the script or do some manually tomfoolery.
|
259 |
+
|
260 |
+
Quick import result in examples/converted/movie-gallery.mp4 under files
|
261 |
+
""")
|
262 |
+
|
263 |
+
|
264 |
+
def footer(self):
|
265 |
+
return ( """
|
266 |
+
# Using the Textured Mesh in Blender
|
267 |
+
|
268 |
+
There a couple of annoying steps atm after you download the obj from the 3d viewer.
|
269 |
+
|
270 |
+
You can use the script meshin-around.sh in the files section to do the conversion or manually:
|
271 |
+
|
272 |
+
1. edit the file and change the mtllib line to use fun.mtl
|
273 |
+
2. replace / delete all lines that start with 'f', eg :%s,^f.*,,
|
274 |
+
3. uncomment all the lines that start with '#f', eg: :%s,^#f,f,
|
275 |
+
4. save and exit
|
276 |
+
5. create fun.mtl to point to the texture like:
|
277 |
+
|
278 |
+
```
|
279 |
+
newmtl MyMaterial
|
280 |
+
map_Kd fun.png
|
281 |
+
```
|
282 |
+
|
283 |
+
Make sure the obj, mtl and png are all in the same directory
|
284 |
+
|
285 |
+
Now the import will have the texture data: File -> Import -> Wavefront (obj) -> fun.obj
|
286 |
+
|
287 |
+
This is all a work around for a weird hf+gradios+babylonjs bug which seems to be related to the version
|
288 |
+
of babylonjs being used... It works fine in a local babylonjs sandbox...
|
289 |
+
|
290 |
+
# Suggested Workflows
|
291 |
+
|
292 |
+
Here are some workflow ideas.
|
293 |
+
|
294 |
+
## retopologize high poly face mesh
|
295 |
+
|
296 |
+
1. sculpt high poly mesh in blender
|
297 |
+
2. snapshot the face
|
298 |
+
3. generate the mesh using the mediapipe stuff
|
299 |
+
4. import the low poly mediapipe face
|
300 |
+
5. snap the mesh to the high poly model
|
301 |
+
6. model the rest of the low poly model
|
302 |
+
7. bake the normal / etc maps to the low poly face model
|
303 |
+
8. it's just that easy 😛
|
304 |
+
|
305 |
+
Ideally it would be a plugin...
|
306 |
+
|
307 |
+
## stable diffusion integration
|
308 |
+
|
309 |
+
1. generate a face in sd
|
310 |
+
2. generate the mesh
|
311 |
+
3. repose it and use it for further generation
|
312 |
+
|
313 |
+
May need to expanded the generated mesh to cover more, maybe with
|
314 |
+
<a href="https://github.com/shunsukesaito/PIFu" target="_blank">PIFu model</a>.
|
315 |
+
|
316 |
+
""")
|
317 |
+
|
318 |
+
|
319 |
+
def css(self):
|
320 |
+
return ("""
|
321 |
+
#mesh-display-output {
|
322 |
+
max-height: 44vh;
|
323 |
+
max-width: 44vh;
|
324 |
+
width:auto;
|
325 |
+
height:auto
|
326 |
+
}
|
327 |
+
#img-display-output {
|
328 |
+
max-height: 28vh;
|
329 |
+
max-width: 28vh;
|
330 |
+
width:auto;
|
331 |
+
height:auto
|
332 |
+
}
|
333 |
+
""")
|
334 |
+
|
335 |
+
|
336 |
+
face_image_to_face_mesh().demo()
|
337 |
+
|
338 |
+
# EOF
|
339 |
+
########################################################################################
|
examples/blonde-00081-399357008.png
ADDED
![]() |
examples/character.png
ADDED
![]() |
examples/converted/README.md
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
1. downloaded all the obj files
|
3 |
+
2. for i in in-*obj ; do o=$( echo ${i} | cut -f2- -d- ) ; ../../meshin-around.sh ${i} ${o} ; done
|
4 |
+
3. for i in ../*png ; do o=$(basename ${i} | sed 's,-[^.]*\.,.,' ) ; cp -i ${i} ${o} ; done
|
5 |
+
|
examples/converted/blonde.mtl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
newmtl MyMaterial
|
2 |
+
illum 2
|
3 |
+
map_Kd blonde.png
|
examples/converted/blonde.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/character.mtl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
newmtl MyMaterial
|
2 |
+
illum 2
|
3 |
+
map_Kd character.png
|
examples/converted/character.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/converted.blend.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8886653880134a39d43a39bd5b7a2b2d7554fed15a749d98962477272dc5cef9
|
3 |
+
size 310421
|
examples/converted/dude.mtl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
newmtl MyMaterial
|
2 |
+
illum 2
|
3 |
+
map_Kd dude.png
|
examples/converted/dude.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/granny.mtl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
newmtl MyMaterial
|
2 |
+
illum 2
|
3 |
+
map_Kd granny.png
|
examples/converted/granny.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/in-blonde.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/in-character.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/in-dude.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/in-granny.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/in-tuffie.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/converted/movie-gallery.mp4
ADDED
Binary file (340 kB). View file
|
|
examples/converted/tuffie.mtl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
newmtl MyMaterial
|
2 |
+
illum 2
|
3 |
+
map_Kd tuffie.png
|
examples/converted/tuffie.obj
ADDED
The diff for this file is too large to render.
See raw diff
|
|
examples/dude-00110-1227390728.png
ADDED
![]() |
examples/granny-00056-1867315302.png
ADDED
![]() |
examples/tuffie-00039-499759385.png
ADDED
![]() |
meshin-around.sh
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
_meshin_around_main() {
|
4 |
+
local mesh="${1}" ; shift
|
5 |
+
local mash="${1-fun.obj}" ; shift
|
6 |
+
|
7 |
+
if [ "" = "${mesh}" ] ; then
|
8 |
+
echo "usage: meshin-around.sh <download.obj>"
|
9 |
+
return 1
|
10 |
+
fi
|
11 |
+
|
12 |
+
local name=$( basename ${mash} | sed 's,\.obj$,,' )
|
13 |
+
local mtl="${name}.mtl"
|
14 |
+
local png="${name}.png"
|
15 |
+
|
16 |
+
if [ -f ${mash} ] ; then
|
17 |
+
echo "${mash} already exists"
|
18 |
+
else
|
19 |
+
echo "creating ${mash} for ${mesh}"
|
20 |
+
sed "s,^f.*,,;s,#f,f,;s,.*mtllib.*,mtllib ${mtl}," ${mesh} > ${mash} || exit ${?}
|
21 |
+
fi
|
22 |
+
|
23 |
+
if [ -f "${mtl}" ] ; then
|
24 |
+
echo "${mtl} already exists"
|
25 |
+
else
|
26 |
+
echo "creating ${mtl} for ${mash}"
|
27 |
+
echo -e "newmtl MyMaterial\nmap_Kd ${png}" > ${mtl} || exit ${?}
|
28 |
+
fi
|
29 |
+
|
30 |
+
if [ -f "${png}" ] ; then
|
31 |
+
echo "${png} looks good"
|
32 |
+
else
|
33 |
+
echo "be sure your texture is in pwd and named ${png} or edit ${mtl}"
|
34 |
+
fi
|
35 |
+
}
|
36 |
+
|
37 |
+
_meshin_around_main ${*}
|
quads.py
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This was created by importing a MediaPipe tesselated mesh and manually converting it in blender
|
2 |
+
|
3 |
+
QUADS = [
|
4 |
+
[300, 334, 333, 298] , [ 1, 12, 303, 268] , [234, 233, 122, 129] , [270, 304, 305, 271] , [246, 129, 115, 189] ,
|
5 |
+
[112, 118, 229, 32] , [104, 55, 69, 105] , [228, 35, 128, 235] , [120, 102, 101, 121] , [ 74, 73, 38, 40] ,
|
6 |
+
[ 71, 47, 54, 64] , [135, 132, 116, 221] , [335, 294, 299, 334] , [ 73, 12, 1, 38] , [ 42, 43, 81, 82] ,
|
7 |
+
[166, 93, 41, 40] , [122, 233, 232, 121] , [215, 213, 217, 208] , [183, 84, 85, 182] , [376, 308, 321, 322] ,
|
8 |
+
[ 30, 161, 160, 28] , [ 57, 29, 159, 158] , [ 84, 202, 201, 19] , [117, 144, 35, 228] , [204, 207, 93, 166] ,
|
9 |
+
[139, 216, 59, 173] , [276, 282, 6, 5] , [ 25, 145, 164, 111] , [292, 307, 308, 376] , [143, 127, 48, 101] ,
|
10 |
+
[419, 422, 429, 263] , [147, 44, 107, 92] , [ 17, 86, 85, 18] , [ 78, 77, 62, 147] , [127, 210, 199, 218] ,
|
11 |
+
[397, 378, 401, 370] , [166, 40, 38, 168] , [245, 234, 129, 246] , [ 31, 248, 247, 162] , [ 34, 247, 248, 131] ,
|
12 |
+
[175, 218, 199, 237] , [418, 352, 413, 466] , [125, 114, 226, 47] , [225, 224, 53, 54] , [ 99, 65, 103, 130] ,
|
13 |
+
[193, 215, 208, 188] , [219, 80, 240, 238] , [134, 156, 113, 244] , [345, 361, 364, 441] , [141, 171, 150, 177] ,
|
14 |
+
[400, 413, 352, 420] , [119, 230, 229, 118] , [282, 276, 441, 364] , [ 71, 64, 69, 72] , [315, 314, 407, 406] ,
|
15 |
+
[222, 190, 194, 56] , [114, 248, 31, 226] , [106, 53, 66, 67] , [236, 60, 167, 220] , [108, 56, 9, 10] ,
|
16 |
+
[ 67, 66, 56, 108] , [ 69, 64, 106, 105] , [120, 119, 51, 102] , [242, 126, 45, 238] , [ 6, 196, 4, 52] ,
|
17 |
+
[143, 130, 210, 127] , [ 34, 131, 26, 8] , [323, 271, 410, 411] , [ 33, 195, 205, 212] , [ 37, 102, 51, 206] ,
|
18 |
+
[195, 202, 84, 183] , [238, 240, 239, 242] , [ 26, 111, 164, 8] , [225, 54, 47, 226] , [154, 146, 24, 23] ,
|
19 |
+
[211, 203, 213, 215] , [246, 194, 190, 245] , [425, 336, 407, 419] , [318, 317, 404, 403] , [ 33, 212, 171, 141] ,
|
20 |
+
[ 12, 73, 39, 13] , [208, 217, 207, 206] , [238, 221, 116, 219] , [ 46, 221, 238, 45] , [184, 43, 75, 185] ,
|
21 |
+
[209, 202, 195, 33] , [269, 272, 304, 303] , [214, 148, 178, 216] , [235, 94, 138, 228] , [ 67, 108, 109, 70] ,
|
22 |
+
[ 7, 352, 418, 169] , [193, 188, 148, 214] , [ 97, 63, 77, 78] , [125, 47, 71, 157] , [317, 16, 17, 316] ,
|
23 |
+
[180, 87, 88, 179] , [106, 64, 54, 53] , [119, 118, 124, 51] , [146, 145, 25, 24] , [325, 319, 320, 326] ,
|
24 |
+
[123, 189, 175, 197] , [293, 309, 325, 326] , [150, 171, 170, 151] , [178, 138, 94, 133] , [328, 295, 456, 461] ,
|
25 |
+
[361, 421, 457, 364] , [336, 274, 376, 322] , [396, 395, 431, 432] , [ 13, 39, 83, 14] , [278, 330, 350, 351] ,
|
26 |
+
[191, 57, 158, 174] , [117, 112, 36, 144] , [224, 223, 66, 53] , [140, 72, 22, 163] , [163, 128, 35, 140] ,
|
27 |
+
[366, 365, 395, 380] , [219, 116, 49, 220] , [430, 359, 372, 356] , [157, 144, 36, 125] , [377, 353, 281, 412] ,
|
28 |
+
[125, 36, 227, 114] , [355, 20, 95, 371] , [120, 231, 230, 119] , [249, 457, 400, 420] , [162, 161, 30, 31] ,
|
29 |
+
[ 46, 45, 2, 5] , [141, 172, 209, 33] , [394, 392, 328, 327] , [ 32, 26, 131, 227] , [300, 298, 339, 338] ,
|
30 |
+
[395, 396, 379, 380] , [102, 37, 143, 101] , [217, 213, 58, 187] , [327, 3, 165, 394] , [242, 239, 21, 243] ,
|
31 |
+
[186, 41, 93, 187] , [269, 303, 12, 13] , [192, 81, 43, 184] , [140, 35, 144, 157] , [223, 222, 56, 66] ,
|
32 |
+
[189, 115, 218, 175] , [323, 427, 424, 392] , [ 37, 204, 130, 143] , [280, 430, 421, 361] , [ 2, 275, 276, 5] ,
|
33 |
+
[134, 244, 191, 174] , [241, 76, 60, 236] , [108, 10, 152, 109] , [ 27, 155, 154, 23] , [211, 215, 136, 170] ,
|
34 |
+
[355, 275, 2, 20] , [ 90, 89, 96, 97] , [321, 320, 404, 405] , [316, 315, 406, 405] , [107, 44, 203, 205] ,
|
35 |
+
[201, 422, 314, 19] , [153, 176, 172, 149] , [376, 274, 288, 292] , [292, 288, 411, 410] , [130, 204, 166, 99] ,
|
36 |
+
[115, 48, 127, 218] , [327, 328, 461, 329] , [105, 106, 67, 70] , [236, 65, 99, 241] , [200, 201, 202, 209] ,
|
37 |
+
[332, 295, 328, 359] , [100, 61, 76, 241] , [243, 142, 126, 242] , [329, 463, 371, 327] , [220, 167, 80, 219] ,
|
38 |
+
[233, 27, 23, 232] , [190, 222, 57, 191] , [223, 29, 57, 222] , [244, 113, 234, 245] , [ 32, 229, 111, 26] ,
|
39 |
+
[226, 31, 30, 225] , [232, 23, 24, 231] , [225, 30, 28, 224] , [114, 227, 131, 248] , [ 32, 227, 36, 112] ,
|
40 |
+
[234, 113, 27, 233] , [230, 25, 111, 229] , [224, 28, 29, 223] , [ 95, 20, 126, 142] , [239, 240, 80, 21] ,
|
41 |
+
[243, 21, 61, 100] , [157, 71, 72, 140] , [ 76, 61, 167, 60] , [189, 123, 194, 246] , [231, 24, 25, 230] ,
|
42 |
+
[232, 231, 120, 121] , [121, 101, 48, 122] , [208, 206, 51, 188] , [332, 280, 279, 295] , [196, 249, 420, 198] ,
|
43 |
+
[199, 210, 50, 132] , [177, 149, 172, 141] , [117, 124, 118, 112] , [ 28, 160, 159, 29] , [245, 190, 191, 244] ,
|
44 |
+
[379, 396, 370, 401] , [268, 303, 304, 270] , [351, 453, 454, 358] , [ 75, 74, 40, 41] , [169, 418, 286, 9] ,
|
45 |
+
[283, 444, 445, 284] , [397, 176, 153, 378] , [110, 68, 70, 109] , [301, 277, 354, 384] , [186, 62, 77, 185] ,
|
46 |
+
[299, 294, 301, 302] , [ 50, 49, 116, 132] , [422, 201, 200, 429] , [304, 272, 273, 305] , [271, 323, 392, 270] ,
|
47 |
+
[296, 443, 444, 283] , [427, 437, 428, 426] , [336, 322, 406, 407] , [ 19, 314, 315, 18] , [387, 388, 260, 258] ,
|
48 |
+
[255, 374, 375, 254] , [314, 422, 419, 407] , [297, 335, 334, 300] , [313, 312, 272, 269] , [ 55, 22, 72, 69] ,
|
49 |
+
[221, 46, 52, 135] , [391, 374, 255, 340] , [315, 316, 17, 18] , [372, 267, 331, 330] , [423, 274, 336, 425] ,
|
50 |
+
[ 58, 44, 147, 62] , [ 91, 78, 147, 92] , [182, 85, 86, 181] , [423, 425, 432, 431] , [357, 265, 448, 455] ,
|
51 |
+
[268, 270, 392, 394] , [358, 454, 465, 466] , [264, 360, 468, 467] , [264, 250, 256, 360] , [421, 430, 356, 438] ,
|
52 |
+
[194, 123, 7, 169] , [449, 450, 348, 347] , [277, 284, 445, 446] , [241, 99, 98, 100] , [281, 331, 267, 426] ,
|
53 |
+
[307, 292, 410, 409] , [260, 388, 389, 261] , [364, 457, 249, 282] , [338, 339, 11, 152] , [438, 344, 413, 400] ,
|
54 |
+
[349, 451, 452, 350] , [345, 279, 280, 361] , [402, 377, 434, 436] , [367, 324, 455, 448] , [182, 92, 107, 183] ,
|
55 |
+
[418, 414, 442, 286] , [360, 256, 262, 447] , [284, 277, 301, 294] , [291, 251, 463, 329] , [344, 358, 466, 413] ,
|
56 |
+
[179, 89, 90, 180] , [266, 341, 346, 373] , [429, 397, 370, 263] , [296, 283, 335, 297] , [275, 355, 462, 458] ,
|
57 |
+
[ 4, 237, 135, 52] , [359, 424, 267, 372] , [386, 387, 258, 259] , [394, 165, 1, 268] , [207, 217, 187, 93] ,
|
58 |
+
[278, 356, 372, 330] , [ 44, 58, 213, 203] , [459, 460, 458, 462] , [381, 382, 257, 253] , [266, 447, 262, 341] ,
|
59 |
+
[399, 385, 287, 415] , [437, 433, 435, 428] , [447, 266, 354, 343] , [183, 107, 205, 195] , [ 43, 42, 74, 75] ,
|
60 |
+
[302, 301, 384, 369] , [425, 419, 263, 432] , [295, 279, 440, 456] , [ 49, 50, 103, 65] , [ 74, 42, 39, 73] ,
|
61 |
+
[433, 423, 431, 435] , [311, 273, 272, 312] , [353, 367, 448, 346] , [252, 302, 369, 390] , [209, 172, 176, 200] ,
|
62 |
+
[ 56, 194, 169, 9] , [377, 412, 417, 434] , [ 90, 97, 78, 91] , [330, 331, 349, 350] , [180, 90, 91, 181] ,
|
63 |
+
[281, 348, 349, 331] , [265, 373, 346, 448] , [324, 367, 402, 362] , [308, 326, 320, 321] , [ 16, 15, 88, 87] ,
|
64 |
+
[266, 373, 384, 354] , [353, 347, 348, 281] , [363, 399, 415, 464] , [318, 15, 16, 317] , [356, 278, 344, 438] ,
|
65 |
+
[ 96, 79, 63, 97] , [ 11, 110, 109, 152] , [398, 368, 365, 366] , [ 2, 45, 126, 20] , [313, 269, 13, 14] ,
|
66 |
+
[237, 199, 132, 135] , [187, 58, 62, 186] , [152, 10, 337, 338] , [ 42, 82, 83, 39] , [414, 418, 466, 465] ,
|
67 |
+
[467, 468, 261, 389] , [ 9, 286, 337, 10] , [446, 343, 354, 277] , [265, 357, 390, 369] , [436, 434, 417, 368] ,
|
68 |
+
[170, 136, 137, 151] , [458, 441, 276, 275] , [212, 205, 203, 211] , [347, 353, 346, 341] , [284, 294, 335, 283] ,
|
69 |
+
[452, 453, 351, 350] , [ 95, 3, 327, 371] , [450, 451, 349, 348] , [197, 4, 196, 198] , [254, 375, 381, 253] ,
|
70 |
+
[345, 441, 458, 439] , [367, 353, 377, 402] , [449, 347, 341, 262] , [360, 447, 343, 468] , [136, 139, 173, 137] ,
|
71 |
+
[289, 436, 368, 398] , [281, 426, 428, 412] , [288, 433, 437, 411] , [ 99, 166, 168, 98] , [142, 243, 100, 98] ,
|
72 |
+
[175, 237, 4, 197] , [185, 75, 41, 186] , [307, 293, 326, 308] , [396, 432, 263, 370] , [286, 442, 443, 296] ,
|
73 |
+
[428, 435, 417, 412] , [411, 437, 427, 323] , [421, 438, 400, 457] , [165, 3, 98, 168] , [279, 345, 439, 440] ,
|
74 |
+
[391, 340, 256, 250] , [306, 291, 329, 461] , [373, 265, 369, 384] , [386, 259, 287, 385] , [435, 365, 368, 417] ,
|
75 |
+
[251, 459, 462, 463] , [320, 319, 403, 404] , [ 17, 16, 87, 86] , [322, 321, 405, 406] , [ 85, 84, 19, 18] ,
|
76 |
+
[433, 288, 274, 423] , [362, 402, 436, 289] , [185, 77, 63, 184] , [293, 307, 409, 408] , [392, 424, 359, 328] ,
|
77 |
+
[352, 7, 198, 420] , [228, 138, 124, 117] , [393, 290, 456, 440] , [176, 397, 429, 200] , [220, 49, 65, 236] ,
|
78 |
+
[424, 427, 426, 267] , [332, 359, 430, 280] , [365, 435, 431, 395] , [310, 251, 291, 393] , [355, 371, 463, 462] ,
|
79 |
+
[ 98, 3, 95, 142] , [255, 254, 451, 450] , [415, 414, 465, 464] , [254, 253, 452, 451] , [261, 468, 343, 446] ,
|
80 |
+
[260, 261, 446, 445] , [258, 260, 445, 444] , [454, 342, 464, 465] , [198, 7, 123, 197] , [259, 258, 444, 443] ,
|
81 |
+
[287, 442, 414, 415] , [340, 449, 262, 256] , [340, 255, 450, 449] , [257, 342, 454, 453] , [ 61, 21, 80, 167] ,
|
82 |
+
[310, 393, 440, 439] , [338, 337, 297, 300] , [310, 460, 459, 251] , [ 51, 124, 148, 188] , [253, 257, 453, 452] ,
|
83 |
+
[215, 193, 139, 136] , [351, 358, 344, 278] , [113, 156, 155, 27] , [ 6, 52, 46, 5] , [206, 207, 204, 37] ,
|
84 |
+
[249, 196, 6, 282] , [216, 178, 133, 59] , [286, 296, 297, 337] , [382, 383, 342, 257] , [287, 259, 443, 442] ,
|
85 |
+
[211, 170, 171, 212] , [306, 461, 456, 290] , [104, 105, 70, 68] , [271, 305, 409, 410] , [460, 310, 439, 458] ,
|
86 |
+
[214, 216, 139, 193] , [317, 316, 405, 404] , [181, 91, 92, 182] , [ 1, 165, 168, 38] , [363, 464, 342, 383] ,
|
87 |
+
[210, 130, 103, 50] , [305, 273, 408, 409] , [311, 416, 408, 273] , [309, 293, 408, 416] , [184, 63, 79, 192] ,
|
88 |
+
[115, 129, 122, 48] , [148, 124, 138, 178] , [181, 86, 87, 180] , [290, 393, 291, 306] , [252, 285, 299, 302] ,
|
89 |
+
[285, 333, 334, 299]
|
90 |
+
]
|
requirements.txt
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
########################################################################################
|
2 |
+
# :-D
|
3 |
+
|
4 |
+
############################################
|
5 |
+
# mediapipe up on this!
|
6 |
+
|
7 |
+
opencv-python>=4.7.0.72
|
8 |
+
mediapipe>=0.10.1
|
9 |
+
|
10 |
+
#
|
11 |
+
############################################
|
12 |
+
|
13 |
+
############################################
|
14 |
+
# zoe depth requirements
|
15 |
+
|
16 |
+
#--extra-index-url https://download.pytorch.org/whl/cu113
|
17 |
+
torch
|
18 |
+
torchvision>=0.11.2
|
19 |
+
timm==0.6.11
|
20 |
+
|
21 |
+
#
|
22 |
+
############################################
|
23 |
+
|
24 |
+
# EOF
|
25 |
+
########################################################################################
|
utils.py
ADDED
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# from https://huggingface.co/spaces/shariqfarooq/ZoeDepth/raw/main/utils.py
|
2 |
+
|
3 |
+
# MIT License
|
4 |
+
|
5 |
+
# Copyright (c) 2022 Intelligent Systems Lab Org
|
6 |
+
|
7 |
+
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
8 |
+
# of this software and associated documentation files (the "Software"), to deal
|
9 |
+
# in the Software without restriction, including without limitation the rights
|
10 |
+
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
11 |
+
# copies of the Software, and to permit persons to whom the Software is
|
12 |
+
# furnished to do so, subject to the following conditions:
|
13 |
+
|
14 |
+
# The above copyright notice and this permission notice shall be included in all
|
15 |
+
# copies or substantial portions of the Software.
|
16 |
+
|
17 |
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
18 |
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
19 |
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
20 |
+
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
21 |
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
22 |
+
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
23 |
+
# SOFTWARE.
|
24 |
+
|
25 |
+
# File author: Shariq Farooq Bhat
|
26 |
+
|
27 |
+
import matplotlib
|
28 |
+
import matplotlib.cm
|
29 |
+
import numpy as np
|
30 |
+
import torch
|
31 |
+
|
32 |
+
def colorize(value, vmin=None, vmax=None, cmap='magma_r', invalid_val=-99, invalid_mask=None, background_color=(128, 128, 128, 255), gamma_corrected=False, value_transform=None):
|
33 |
+
"""Converts a depth map to a color image.
|
34 |
+
|
35 |
+
Args:
|
36 |
+
value (torch.Tensor, numpy.ndarry): Input depth map. Shape: (H, W) or (1, H, W) or (1, 1, H, W). All singular dimensions are squeezed
|
37 |
+
vmin (float, optional): vmin-valued entries are mapped to start color of cmap. If None, value.min() is used. Defaults to None.
|
38 |
+
vmax (float, optional): vmax-valued entries are mapped to end color of cmap. If None, value.max() is used. Defaults to None.
|
39 |
+
cmap (str, optional): matplotlib colormap to use. Defaults to 'magma_r'.
|
40 |
+
invalid_val (int, optional): Specifies value of invalid pixels that should be colored as 'background_color'. Defaults to -99.
|
41 |
+
invalid_mask (numpy.ndarray, optional): Boolean mask for invalid regions. Defaults to None.
|
42 |
+
background_color (tuple[int], optional): 4-tuple RGB color to give to invalid pixels. Defaults to (128, 128, 128, 255).
|
43 |
+
gamma_corrected (bool, optional): Apply gamma correction to colored image. Defaults to False.
|
44 |
+
value_transform (Callable, optional): Apply transform function to valid pixels before coloring. Defaults to None.
|
45 |
+
|
46 |
+
Returns:
|
47 |
+
numpy.ndarray, dtype - uint8: Colored depth map. Shape: (H, W, 4)
|
48 |
+
"""
|
49 |
+
if isinstance(value, torch.Tensor):
|
50 |
+
value = value.detach().cpu().numpy()
|
51 |
+
|
52 |
+
value = value.squeeze()
|
53 |
+
if invalid_mask is None:
|
54 |
+
invalid_mask = value == invalid_val
|
55 |
+
mask = np.logical_not(invalid_mask)
|
56 |
+
|
57 |
+
# normalize
|
58 |
+
vmin = np.percentile(value[mask],2) if vmin is None else vmin
|
59 |
+
vmax = np.percentile(value[mask],85) if vmax is None else vmax
|
60 |
+
if vmin != vmax:
|
61 |
+
value = (value - vmin) / (vmax - vmin) # vmin..vmax
|
62 |
+
else:
|
63 |
+
# Avoid 0-division
|
64 |
+
value = value * 0.
|
65 |
+
|
66 |
+
# squeeze last dim if it exists
|
67 |
+
# grey out the invalid values
|
68 |
+
|
69 |
+
value[invalid_mask] = np.nan
|
70 |
+
cmapper = matplotlib.cm.get_cmap(cmap)
|
71 |
+
if value_transform:
|
72 |
+
value = value_transform(value)
|
73 |
+
# value = value / value.max()
|
74 |
+
value = cmapper(value, bytes=True) # (nxmx4)
|
75 |
+
|
76 |
+
# img = value[:, :, :]
|
77 |
+
img = value[...]
|
78 |
+
img[invalid_mask] = background_color
|
79 |
+
|
80 |
+
# return img.transpose((2, 0, 1))
|
81 |
+
if gamma_corrected:
|
82 |
+
# gamma correction
|
83 |
+
img = img / 255
|
84 |
+
img = np.power(img, 2.2)
|
85 |
+
img = img * 255
|
86 |
+
img = img.astype(np.uint8)
|
87 |
+
return img
|
88 |
+
|
89 |
+
|
90 |
+
import os
|
91 |
+
|
92 |
+
# bard...
|
93 |
+
def find_most_recently_created_directory(temp_dir):
|
94 |
+
"""Finds the most recently created directory in a directory.
|
95 |
+
|
96 |
+
Args:
|
97 |
+
temp_dir: The directory to search.
|
98 |
+
|
99 |
+
Returns:
|
100 |
+
The path to the most recently created directory.
|
101 |
+
"""
|
102 |
+
|
103 |
+
directories = os.listdir(temp_dir)
|
104 |
+
most_recently_created_directory = None
|
105 |
+
for directory in directories:
|
106 |
+
path = os.path.join(temp_dir, directory)
|
107 |
+
st = os.stat(path)
|
108 |
+
if most_recently_created_directory is None or st.mtime > most_recently_created_directory.mtime:
|
109 |
+
most_recently_created_directory = path
|
110 |
+
|
111 |
+
if most_recently_created_directory is None:
|
112 |
+
most_recently_created_directory = temp_dir
|
113 |
+
|
114 |
+
return most_recently_created_directory
|
115 |
+
|
116 |
+
|
117 |
+
#chatgpt
|
118 |
+
def get_most_recent_subdirectory(path):
|
119 |
+
if not os.path.isdir(path):
|
120 |
+
return path
|
121 |
+
|
122 |
+
subdirectories = [f for f in os.listdir(path) if os.path.isdir(os.path.join(path, f))]
|
123 |
+
if not subdirectories:
|
124 |
+
return path
|
125 |
+
|
126 |
+
most_recent_subdirectory = max(subdirectories, key=lambda d: os.path.getctime(os.path.join(path, d)))
|
127 |
+
return os.path.join(path, most_recent_subdirectory)
|
128 |
+
|