|
import os |
|
from typing import List, Optional, Callable |
|
from functools import partial |
|
|
|
import numpy as np |
|
import torch |
|
from scipy.interpolate import UnivariateSpline |
|
|
|
|
|
def smooth_3d_array(points, num=None, **kwargs): |
|
x, y, z = points[:, 0], points[:, 1], points[:, 2] |
|
points = np.zeros((num, 3)) |
|
if num is None: |
|
num = len(x) |
|
w = np.arange(0, len(x), 1) |
|
sx = UnivariateSpline(w, x, **kwargs) |
|
sy = UnivariateSpline(w, y, **kwargs) |
|
sz = UnivariateSpline(w, z, **kwargs) |
|
wnew = np.linspace(0, len(x), num) |
|
points[:, 0] = sx(wnew) |
|
points[:, 1] = sy(wnew) |
|
points[:, 2] = sz(wnew) |
|
return points |
|
|
|
|
|
def calculate_tnb_frame(curve, epsilon=1e-8): |
|
curve = np.asarray(curve) |
|
|
|
|
|
T = np.gradient(curve, axis=0) |
|
T_norms = np.linalg.norm(T, axis=1) |
|
T = T / T_norms[:, np.newaxis] |
|
|
|
|
|
is_straight = T_norms < epsilon |
|
|
|
|
|
dT = np.gradient(T, axis=0) |
|
N = dT - np.sum(dT * T, axis=1)[:, np.newaxis] * T |
|
N_norms = np.linalg.norm(N, axis=1) |
|
|
|
|
|
undefined_N = (N_norms < epsilon) | is_straight |
|
|
|
if np.all(undefined_N): |
|
|
|
|
|
N = np.zeros_like(T) |
|
N[:, 0] = T[:, 1] |
|
N[:, 1] = -T[:, 0] |
|
N = N / np.linalg.norm(N, axis=1)[:, np.newaxis] |
|
elif np.any(undefined_N): |
|
|
|
|
|
|
|
segment_changes = np.where(np.diff(undefined_N))[0] + 1 |
|
segments = np.split(np.arange(len(curve)), segment_changes) |
|
|
|
for segment in segments: |
|
if undefined_N[segment[0]]: |
|
|
|
left_curved = np.where(~undefined_N[: segment[0]])[0] |
|
right_curved = ( |
|
np.where(~undefined_N[segment[-1] + 1 :])[0] + segment[-1] + 1 |
|
) |
|
|
|
if len(left_curved) > 0 and len(right_curved) > 0: |
|
|
|
left_N = N[left_curved[-1]] |
|
right_N = N[right_curved[0]] |
|
t = np.linspace(0, 1, len(segment)) |
|
N[segment] = (1 - t[:, np.newaxis]) * left_N + t[ |
|
:, np.newaxis |
|
] * right_N |
|
elif len(left_curved) > 0: |
|
|
|
N[segment] = N[left_curved[-1]] |
|
elif len(right_curved) > 0: |
|
|
|
N[segment] = N[right_curved[0]] |
|
else: |
|
|
|
N[segment] = np.array([T[segment[0]][1], -T[segment[0]][0], 0]) |
|
|
|
|
|
N[segment] = ( |
|
N[segment] |
|
- np.sum(N[segment] * T[segment], axis=1)[:, np.newaxis] |
|
* T[segment] |
|
) |
|
N[segment] = ( |
|
N[segment] / np.linalg.norm(N[segment], axis=1)[:, np.newaxis] |
|
) |
|
else: |
|
|
|
pass |
|
|
|
|
|
|
|
|
|
B = np.cross(T, N) |
|
|
|
|
|
N = N - np.sum(N * T, axis=1)[:, np.newaxis] * T |
|
N = N / np.linalg.norm(N, axis=1)[:, np.newaxis] |
|
|
|
B = B - np.sum(B * T, axis=1)[:, np.newaxis] * T |
|
B = B - np.sum(B * N, axis=1)[:, np.newaxis] * N |
|
B = B / np.linalg.norm(B, axis=1)[:, np.newaxis] |
|
|
|
return T, N, B |
|
|
|
|
|
def get_closest(pc_a, pc_b): |
|
""" |
|
For each point in pc_a, find the closest point in pc_b |
|
Returns the distance and index of the closest point in pc_b for each point in pc_a |
|
Parameters |
|
---------- |
|
pc_a : [Mx3] |
|
pc_b : [Nx3] |
|
""" |
|
tree = KDTree(pc_b) |
|
dist, idx = tree.query(pc_a, workers=-1) |
|
|
|
if np.max(idx) >= pc_b.shape[0]: |
|
raise ValueError("idx is out of range") |
|
|
|
return dist, idx |
|
|
|
|
|
def straighten_using_frenet(helix, points): |
|
""" |
|
Straighten the structure based on the helix (skeleton) using the Frenet frame. |
|
|
|
Args: |
|
- helix (numpy array): Points forming the helix (skeleton). |
|
- points (numpy array): Points surrounding the helix. |
|
|
|
Returns: |
|
- straightened_helix (numpy array): Straightened version of the helix. |
|
- straightened_points (numpy array): Transformed surrounding points. |
|
""" |
|
|
|
T, N, B = calculate_tnb_frame(helix) |
|
|
|
|
|
deltas = np.diff(helix, axis=0) |
|
distances = np.linalg.norm(deltas, axis=1) |
|
cumulative_distances = np.insert(np.cumsum(distances), 0, 0) |
|
|
|
|
|
straightened_helix = np.column_stack( |
|
( |
|
np.zeros_like(cumulative_distances), |
|
np.zeros_like(cumulative_distances), |
|
cumulative_distances, |
|
) |
|
) |
|
|
|
distances_to_helix, closest_idxs = get_closest(points, helix) |
|
vectors = points - helix[closest_idxs] |
|
r = distances_to_helix |
|
T_closest = T[closest_idxs] |
|
N_closest = N[closest_idxs] |
|
B_closest = B[closest_idxs] |
|
theta = np.arctan2( |
|
np.einsum("ij,ij->i", vectors, N_closest), |
|
np.einsum("ij,ij->i", vectors, B_closest), |
|
) |
|
phi = np.arccos(np.einsum("ij,ij->i", vectors, T_closest) / r) |
|
x = r * np.sin(phi) * np.cos(theta) |
|
y = r * np.sin(phi) * np.sin(theta) |
|
z = cumulative_distances[closest_idxs] + r * np.cos(phi) |
|
straightened_points = np.column_stack((x, y, z)) |
|
|
|
return straightened_helix, np.array(straightened_points) |
|
|
|
|
|
def frenet_transformation(pc, skel, lb): |
|
skel_smooth = smooth_3d_array(skel, num=skel.shape[0] * 100, s=200000) |
|
skel_trans, pc_trans = straighten_using_frenet(skel_smooth, pc) |
|
return pc_trans, skel_trans, lb |
|
|
|
|
|
def transformation(trunk_id, pc, trunk_pc, label, frenet: bool): |
|
""" |
|
Normalize the point cloud to unit sphere |
|
do frenet transformation |
|
|
|
Parameters |
|
---------- |
|
trunk_id : int |
|
pc |
|
trunk_pc |
|
label |
|
frenet : whether not to do FreNet transformation |
|
""" |
|
|
|
unmodified_pc = pc.copy() |
|
if frenet: |
|
pc, trunk_pc, label = frenet_transformation(pc, trunk_pc, label) |
|
|
|
|
|
|
|
pc = pc - np.mean(pc, axis=0) |
|
m = np.max(np.sqrt(np.sum(pc**2, axis=1))) |
|
pc = pc / m |
|
|
|
|
|
label = label.astype(int) |
|
|
|
return trunk_id, pc, label, unmodified_pc |
|
|
|
|
|
class CachedDataset: |
|
def __init__( |
|
self, |
|
output_path: str, |
|
num_points: int, |
|
folds: List[List[int]], |
|
fold: int, |
|
is_train: bool, |
|
transform: Optional[Callable] = None, |
|
): |
|
self.num_points = num_points |
|
self.transform = transform |
|
self.spanning_paths = np.load( |
|
os.path.join(output_path, "spanning_paths.npz"), allow_pickle=True |
|
)["spanning_paths"].item() |
|
|
|
if fold == -1: |
|
print("Loading all folds, ignoring is_train") |
|
trunk_ids = self.spanning_paths.keys() |
|
else: |
|
if is_train: |
|
trunk_ids = [ |
|
item |
|
for idx, sublist in enumerate(folds) |
|
if idx != fold |
|
for item in sublist |
|
] |
|
else: |
|
trunk_ids = folds[fold] |
|
self.trunk_ids = sorted(trunk_ids) |
|
|
|
files = [] |
|
i = 0 |
|
for id in sorted(self.spanning_paths.keys()): |
|
for path in self.spanning_paths[id]: |
|
if id in self.trunk_ids: |
|
files.append(os.path.join(output_path, f"{i}.npz")) |
|
assert os.path.exists(files[-1]) |
|
i += 1 |
|
self.files = files |
|
|
|
def __len__(self): |
|
return len(self.files) |
|
|
|
def __getitem__(self, idx): |
|
data = np.load(self.files[idx]) |
|
trunk_id, pc, trunk_pc, label = ( |
|
data["trunk_id"], |
|
data["pc"], |
|
data["trunk_pc"], |
|
data["label"], |
|
) |
|
assert trunk_id in self.trunk_ids |
|
|
|
|
|
random_permutation = np.random.permutation(pc.shape[0]) |
|
pc = pc[random_permutation[: self.num_points]] |
|
label = label[random_permutation[: self.num_points]] |
|
|
|
if self.transform is None: |
|
return trunk_id, pc, trunk_pc, label |
|
else: |
|
return self.transform(trunk_id, pc, trunk_pc, label) |
|
|
|
|
|
def get_dataloader( |
|
species: str, |
|
num_points: int, |
|
fold: int, |
|
is_train: bool, |
|
batch_size: int, |
|
num_workers: int, |
|
frenet: bool, |
|
distributed: bool = False, |
|
collate_fn: Optional[Callable] = None, |
|
path_length=10000, |
|
): |
|
""" |
|
Returns FreSeg dataloader for the given species and fold |
|
|
|
Parameters |
|
---------- |
|
species: one of ["seg_den", "mouse", "human"] |
|
num_points: number of points to sample from the point cloud |
|
fold: -1 to fetch all folds, 0-4 for seg_den |
|
is_train: bool |
|
batch_size |
|
num_workers |
|
frenet: whether to do FreNet transformation |
|
distributed: bool |
|
collate_fn |
|
path_length : fixed length of skeleton path (not configurable) |
|
""" |
|
|
|
assert species in ["seg_den", "mouse", "human"] |
|
seg_den_folds = [ |
|
[3, 5, 11, 12, 23, 28, 29, 32, 39, 42], |
|
[8, 15, 19, 27, 30, 34, 35, 36, 46, 49], |
|
[9, 14, 16, 17, 21, 26, 31, 33, 43, 44], |
|
[2, 6, 7, 13, 18, 24, 25, 38, 41, 50], |
|
[1, 4, 10, 20, 22, 37, 40, 45, 47, 48], |
|
] |
|
|
|
if species != "seg_den": |
|
assert ( |
|
fold == -1 |
|
), "Fold must be -1 for mouse and human datasets, since no splits" |
|
|
|
dataset = CachedDataset( |
|
f"{species}_1000000_{path_length}", |
|
num_points=num_points, |
|
folds=seg_den_folds if species == "seg_den" else [], |
|
fold=fold, |
|
is_train=is_train, |
|
transform=partial(transformation, frenet=frenet), |
|
) |
|
|
|
dataloader = torch.utils.data.DataLoader( |
|
dataset, |
|
batch_size=batch_size, |
|
shuffle=is_train and not distributed, |
|
num_workers=num_workers, |
|
pin_memory=True, |
|
drop_last=is_train, |
|
sampler=torch.utils.data.DistributedSampler(dataset) if distributed else None, |
|
collate_fn=collate_fn, |
|
) |
|
|
|
return dataloader, dataset.files |
|
|
|
|
|
if __name__ == "__main__": |
|
human_loader, _ = get_dataloader( |
|
species="human", |
|
num_points=1024, |
|
fold=-1, |
|
is_train=True, |
|
batch_size=32, |
|
num_workers=8, |
|
frenet=False, |
|
) |
|
for i, data in enumerate(human_loader): |
|
trunk_id, pc, label, original_pc = data |
|
""" |
|
trunk_id: array of trunk ids of length batch_size |
|
pc: point cloud in isotropic coordinates, modified using transformation(), shape [batch, num_points, 3] |
|
label: corresponding value of seg volume at that point, shape [batch, num_points] |
|
will be 0 if part of trunk, unique spine segment id otherwise |
|
original_pc: point cloud in isotropic coordinates, unmodified, shape [batch, num_points, 3] |
|
""" |
|
pass |
|
|