File size: 1,195 Bytes
78c0046
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import torch
from torch.utils.data import Dataset
import glob
import numpy as np
import os
from tqdm import tqdm



class Robo360(Dataset):
    def __init__(self, datadir, downsample=4):

        self.root_dir = datadir
        self.downsample = downsample

        self.read_meta()


    def read_meta(self):

        poses_bounds = np.load(os.path.join(self.root_dir, 'poses_bounds.npy'))  # (N_cams, 17)

        poses = poses_bounds[:, :15].reshape(-1, 3, 5)  # (N_images, 3, 5)
        self.near_fars = poses_bounds[:, -2:]  # (N_images, 2)

        # Step 1: rescale focal length according to training resolution
        H, W, _ = poses[0, :, -1]
        self.focal = poses[:, -1, -1]
        self.img_wh = np.array([int(W / self.downsample), int(H / self.downsample)])
        self.focal = self.focal * self.img_wh[0] / W

        # Step 2: correct poses
        # Original poses has rotation in form "down right back", change to "right up back"
        # See https://github.com/bmild/nerf/issues/34
        self.poses = np.concatenate([poses[..., 1:2], -poses[..., :1], poses[..., 2:4]], -1)



    def __len__(self):
        return 0

    def __getitem__(self, idx):
        return None