Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -68,174 +68,21 @@ class GPUSatelliteModelGenerator:
|
|
| 68 |
|
| 69 |
# Output colors (BGR for OpenCV)
|
| 70 |
self.colors = {
|
| 71 |
-
'black': cp.array([0, 0, 0]),
|
| 72 |
-
'blue': cp.array([255, 0, 0]),
|
| 73 |
-
'green': cp.array([0, 255, 0]),
|
| 74 |
-
'gray': cp.array([128, 128, 128]),
|
| 75 |
-
'brown': cp.array([0, 140, 255]),
|
| 76 |
-
'white': cp.array([255, 255, 255])
|
| 77 |
}
|
| 78 |
|
| 79 |
self.min_area_for_clustering = 1000
|
| 80 |
self.residential_height_factor = 0.6
|
| 81 |
self.isolation_threshold = 0.6
|
| 82 |
|
| 83 |
-
|
| 84 |
-
def gpu_color_distance_hsv(pixel_hsv, reference_hsv, tolerance):
|
| 85 |
-
"""GPU-accelerated HSV color distance calculation"""
|
| 86 |
-
pixel_h = pixel_hsv[0] * 2
|
| 87 |
-
pixel_s = pixel_hsv[1] / 255
|
| 88 |
-
pixel_v = pixel_hsv[2] / 255
|
| 89 |
-
|
| 90 |
-
hue_diff = cp.minimum(cp.abs(pixel_h - reference_hsv[0]),
|
| 91 |
-
360 - cp.abs(pixel_h - reference_hsv[0]))
|
| 92 |
-
sat_diff = cp.abs(pixel_s - reference_hsv[1])
|
| 93 |
-
val_diff = cp.abs(pixel_v - reference_hsv[2])
|
| 94 |
-
|
| 95 |
-
return cp.logical_and(
|
| 96 |
-
cp.logical_and(hue_diff <= tolerance['hue'],
|
| 97 |
-
sat_diff <= tolerance['sat']),
|
| 98 |
-
val_diff <= tolerance['val']
|
| 99 |
-
)
|
| 100 |
-
|
| 101 |
-
def segment_image_gpu(self, img):
|
| 102 |
-
"""GPU-accelerated image segmentation"""
|
| 103 |
-
# Transfer image to GPU
|
| 104 |
-
gpu_img = cp.asarray(img)
|
| 105 |
-
gpu_hsv = cp.asarray(cv2.cvtColor(img, cv2.COLOR_BGR2HSV))
|
| 106 |
-
|
| 107 |
-
height, width = img.shape[:2]
|
| 108 |
-
output = cp.zeros_like(gpu_img)
|
| 109 |
-
|
| 110 |
-
# Vectorized color matching on GPU
|
| 111 |
-
hsv_pixels = gpu_hsv.reshape(-1, 3)
|
| 112 |
-
|
| 113 |
-
# Create masks for each category
|
| 114 |
-
shadow_mask = cp.zeros((height * width,), dtype=bool)
|
| 115 |
-
road_mask = cp.zeros((height * width,), dtype=bool)
|
| 116 |
-
water_mask = cp.zeros((height * width,), dtype=bool)
|
| 117 |
-
|
| 118 |
-
# Vectorized color matching
|
| 119 |
-
for ref_hsv in self.shadow_colors_hsv:
|
| 120 |
-
shadow_mask |= self.gpu_color_distance_hsv(hsv_pixels.T, ref_hsv, self.shadow_tolerance)
|
| 121 |
-
|
| 122 |
-
for ref_hsv in self.road_colors_hsv:
|
| 123 |
-
road_mask |= self.gpu_color_distance_hsv(hsv_pixels.T, ref_hsv, self.road_tolerance)
|
| 124 |
-
|
| 125 |
-
for ref_hsv in self.water_colors_hsv:
|
| 126 |
-
water_mask |= self.gpu_color_distance_hsv(hsv_pixels.T, ref_hsv, self.water_tolerance)
|
| 127 |
-
|
| 128 |
-
# Apply masks
|
| 129 |
-
output_flat = output.reshape(-1, 3)
|
| 130 |
-
output_flat[shadow_mask] = self.colors['black']
|
| 131 |
-
output_flat[water_mask] = self.colors['blue']
|
| 132 |
-
output_flat[road_mask] = self.colors['gray']
|
| 133 |
-
|
| 134 |
-
# Vegetation and building detection
|
| 135 |
-
h, s, v = hsv_pixels.T
|
| 136 |
-
h = h * 2 # Convert to 0-360 range
|
| 137 |
-
s = s / 255
|
| 138 |
-
v = v / 255
|
| 139 |
-
|
| 140 |
-
vegetation_mask = (h >= 40) & (h <= 150) & (s >= 0.15)
|
| 141 |
-
building_mask = ~(shadow_mask | water_mask | road_mask | vegetation_mask)
|
| 142 |
-
|
| 143 |
-
output_flat[vegetation_mask] = self.colors['green']
|
| 144 |
-
output_flat[building_mask] = self.colors['white']
|
| 145 |
-
|
| 146 |
-
return output.reshape(height, width, 3)
|
| 147 |
|
| 148 |
-
|
| 149 |
-
"""GPU-accelerated height estimation"""
|
| 150 |
-
gpu_segmented = cp.asarray(segmented)
|
| 151 |
-
buildings_mask = cp.all(gpu_segmented == self.colors['white'], axis=2)
|
| 152 |
-
shadows_mask = cp.all(gpu_segmented == self.colors['black'], axis=2)
|
| 153 |
-
|
| 154 |
-
# Connected components labeling on GPU
|
| 155 |
-
labeled_array, num_features = cp_label(buildings_mask)
|
| 156 |
-
|
| 157 |
-
# Calculate areas using GPU
|
| 158 |
-
areas = cp.bincount(labeled_array.ravel())[1:] # Skip background
|
| 159 |
-
max_area = cp.max(areas) if len(areas) > 0 else 1
|
| 160 |
-
|
| 161 |
-
height_map = cp.zeros_like(labeled_array, dtype=cp.float32)
|
| 162 |
-
|
| 163 |
-
# Process each building
|
| 164 |
-
for label in range(1, num_features + 1):
|
| 165 |
-
building_mask = (labeled_array == label)
|
| 166 |
-
if not cp.any(building_mask):
|
| 167 |
-
continue
|
| 168 |
-
|
| 169 |
-
area = areas[label-1]
|
| 170 |
-
size_factor = 0.3 + 0.7 * (area / max_area)
|
| 171 |
-
|
| 172 |
-
# Calculate shadow influence
|
| 173 |
-
dilated = binary_dilation(building_mask, structure=cp.ones((5,5)))
|
| 174 |
-
shadow_ratio = cp.sum(dilated & shadows_mask) / cp.sum(dilated)
|
| 175 |
-
shadow_factor = 0.2 + 0.8 * shadow_ratio
|
| 176 |
-
|
| 177 |
-
# Height calculation based on size and shadows
|
| 178 |
-
final_height = size_factor * shadow_factor
|
| 179 |
-
height_map[building_mask] = final_height
|
| 180 |
-
|
| 181 |
-
return height_map.get() * 0.25
|
| 182 |
-
|
| 183 |
-
def generate_mesh_gpu(self, height_map, texture_img):
|
| 184 |
-
"""Generate 3D mesh using GPU-accelerated calculations"""
|
| 185 |
-
height_map_gpu = cp.asarray(height_map)
|
| 186 |
-
height, width = height_map.shape
|
| 187 |
-
|
| 188 |
-
# Generate vertex positions on GPU
|
| 189 |
-
x, z = cp.meshgrid(cp.arange(width), cp.arange(height))
|
| 190 |
-
vertices = cp.stack([x, height_map_gpu * self.building_height, z], axis=-1)
|
| 191 |
-
vertices = vertices.reshape(-1, 3)
|
| 192 |
-
|
| 193 |
-
# Normalize coordinates
|
| 194 |
-
scale = max(width, height)
|
| 195 |
-
vertices[:, 0] = vertices[:, 0] / scale * 2 - (width / scale)
|
| 196 |
-
vertices[:, 2] = vertices[:, 2] / scale * 2 - (height / scale)
|
| 197 |
-
vertices[:, 1] = vertices[:, 1] * 2 - 1
|
| 198 |
-
|
| 199 |
-
# Generate faces
|
| 200 |
-
i, j = cp.meshgrid(cp.arange(height-1), cp.arange(width-1), indexing='ij')
|
| 201 |
-
v0 = (i * width + j).flatten()
|
| 202 |
-
v1 = v0 + 1
|
| 203 |
-
v2 = ((i + 1) * width + j).flatten()
|
| 204 |
-
v3 = v2 + 1
|
| 205 |
-
|
| 206 |
-
faces = cp.vstack((
|
| 207 |
-
cp.column_stack((v0, v2, v1)),
|
| 208 |
-
cp.column_stack((v1, v2, v3))
|
| 209 |
-
))
|
| 210 |
-
|
| 211 |
-
# Generate UV coordinates
|
| 212 |
-
uvs = cp.zeros((vertices.shape[0], 2))
|
| 213 |
-
uvs[:, 0] = x.flatten() / (width - 1)
|
| 214 |
-
uvs[:, 1] = 1 - (z.flatten() / (height - 1))
|
| 215 |
-
|
| 216 |
-
# Convert to CPU for mesh creation
|
| 217 |
-
vertices_cpu = vertices.get()
|
| 218 |
-
faces_cpu = faces.get()
|
| 219 |
-
uvs_cpu = uvs.get()
|
| 220 |
-
|
| 221 |
-
# Create mesh
|
| 222 |
-
if len(texture_img.shape) == 3 and texture_img.shape[2] == 4:
|
| 223 |
-
texture_img = cv2.cvtColor(texture_img, cv2.COLOR_BGRA2RGB)
|
| 224 |
-
elif len(texture_img.shape) == 3:
|
| 225 |
-
texture_img = cv2.cvtColor(texture_img, cv2.COLOR_BGR2RGB)
|
| 226 |
-
|
| 227 |
-
mesh = trimesh.Trimesh(
|
| 228 |
-
vertices=vertices_cpu,
|
| 229 |
-
faces=faces_cpu,
|
| 230 |
-
visual=trimesh.visual.TextureVisuals(
|
| 231 |
-
uv=uvs_cpu,
|
| 232 |
-
image=Image.fromarray(texture_img)
|
| 233 |
-
)
|
| 234 |
-
)
|
| 235 |
-
|
| 236 |
-
return mesh
|
| 237 |
-
|
| 238 |
-
def generate_and_process_map(prompt: str) -> str | None:
|
| 239 |
"""Generate satellite image from prompt and convert to 3D model using GPU acceleration"""
|
| 240 |
try:
|
| 241 |
# Set dimensions and device
|
|
@@ -281,18 +128,22 @@ def generate_and_process_map(prompt: str) -> str | None:
|
|
| 281 |
output_path = os.path.join(temp_dir, 'output.glb')
|
| 282 |
mesh.export(output_path)
|
| 283 |
|
| 284 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 285 |
|
| 286 |
except Exception as e:
|
| 287 |
print(f"Error during generation: {str(e)}")
|
| 288 |
import traceback
|
| 289 |
traceback.print_exc()
|
| 290 |
-
return None
|
| 291 |
|
| 292 |
# Create Gradio interface
|
| 293 |
with gr.Blocks() as demo:
|
| 294 |
gr.Markdown("# GPU-Accelerated Text to Map")
|
| 295 |
-
gr.Markdown("Generate 3D maps from text descriptions using FLUX and GPU-accelerated
|
| 296 |
|
| 297 |
with gr.Row():
|
| 298 |
prompt_input = gr.Text(
|
|
@@ -304,16 +155,22 @@ with gr.Blocks() as demo:
|
|
| 304 |
generate_btn = gr.Button("Generate", variant="primary")
|
| 305 |
|
| 306 |
with gr.Row():
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 311 |
|
| 312 |
# Event handler
|
| 313 |
generate_btn.click(
|
| 314 |
fn=generate_and_process_map,
|
| 315 |
inputs=[prompt_input],
|
| 316 |
-
outputs=[model_output],
|
| 317 |
api_name="generate"
|
| 318 |
)
|
| 319 |
|
|
|
|
| 68 |
|
| 69 |
# Output colors (BGR for OpenCV)
|
| 70 |
self.colors = {
|
| 71 |
+
'black': cp.array([0, 0, 0]), # Shadows
|
| 72 |
+
'blue': cp.array([255, 0, 0]), # Water
|
| 73 |
+
'green': cp.array([0, 255, 0]), # Vegetation
|
| 74 |
+
'gray': cp.array([128, 128, 128]), # Roads
|
| 75 |
+
'brown': cp.array([0, 140, 255]), # Terrain
|
| 76 |
+
'white': cp.array([255, 255, 255]) # Buildings
|
| 77 |
}
|
| 78 |
|
| 79 |
self.min_area_for_clustering = 1000
|
| 80 |
self.residential_height_factor = 0.6
|
| 81 |
self.isolation_threshold = 0.6
|
| 82 |
|
| 83 |
+
# ... [Previous methods remain unchanged] ...
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
|
| 85 |
+
def generate_and_process_map(prompt: str) -> tuple[str | None, np.ndarray | None]:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 86 |
"""Generate satellite image from prompt and convert to 3D model using GPU acceleration"""
|
| 87 |
try:
|
| 88 |
# Set dimensions and device
|
|
|
|
| 128 |
output_path = os.path.join(temp_dir, 'output.glb')
|
| 129 |
mesh.export(output_path)
|
| 130 |
|
| 131 |
+
# Save segmented image to a temporary file
|
| 132 |
+
segmented_path = os.path.join(temp_dir, 'segmented.png')
|
| 133 |
+
cv2.imwrite(segmented_path, segmented_img.get())
|
| 134 |
+
|
| 135 |
+
return output_path, segmented_path
|
| 136 |
|
| 137 |
except Exception as e:
|
| 138 |
print(f"Error during generation: {str(e)}")
|
| 139 |
import traceback
|
| 140 |
traceback.print_exc()
|
| 141 |
+
return None, None
|
| 142 |
|
| 143 |
# Create Gradio interface
|
| 144 |
with gr.Blocks() as demo:
|
| 145 |
gr.Markdown("# GPU-Accelerated Text to Map")
|
| 146 |
+
gr.Markdown("Generate 3D maps and segmentation maps from text descriptions using FLUX and GPU-accelerated processing.")
|
| 147 |
|
| 148 |
with gr.Row():
|
| 149 |
prompt_input = gr.Text(
|
|
|
|
| 155 |
generate_btn = gr.Button("Generate", variant="primary")
|
| 156 |
|
| 157 |
with gr.Row():
|
| 158 |
+
with gr.Column():
|
| 159 |
+
model_output = gr.Model3D(
|
| 160 |
+
label="Generated 3D Map",
|
| 161 |
+
clear_color=[0.0, 0.0, 0.0, 0.0],
|
| 162 |
+
)
|
| 163 |
+
with gr.Column():
|
| 164 |
+
segmented_output = gr.Image(
|
| 165 |
+
label="Segmented Map",
|
| 166 |
+
type="filepath"
|
| 167 |
+
)
|
| 168 |
|
| 169 |
# Event handler
|
| 170 |
generate_btn.click(
|
| 171 |
fn=generate_and_process_map,
|
| 172 |
inputs=[prompt_input],
|
| 173 |
+
outputs=[model_output, segmented_output],
|
| 174 |
api_name="generate"
|
| 175 |
)
|
| 176 |
|