|
| 1 | +import os |
| 2 | +import sys |
| 3 | +import argparse |
| 4 | +import multiprocessing |
| 5 | +import cv2 |
| 6 | +import numpy as np |
| 7 | +import tqdm |
| 8 | +from functools import partial |
| 9 | + |
| 10 | + |
| 11 | +def merge_depths(data, nframes): |
| 12 | + num, frame = data |
| 13 | + |
| 14 | + # load both depth maps |
| 15 | + depth_rendered = cv2.imread('{}.rendered_depth.png'.format(frame), cv2.IMREAD_ANYDEPTH) |
| 16 | + depth_original = cv2.imread('{}.depth.pgm'.format(frame), cv2.IMREAD_ANYDEPTH) |
| 17 | + |
| 18 | + # the faulty rendering assigns 1 to missing depth values, fix that |
| 19 | + depth_rendered = np.where(depth_rendered == 1000, 0, depth_rendered) |
| 20 | + |
| 21 | + depth_rendered = depth_rendered.astype(np.float32) / 1000.0 |
| 22 | + depth_original = depth_original.astype(np.float32) / 1000.0 |
| 23 | + |
| 24 | + # merge them |
| 25 | + depth_merged = np.where(np.abs(depth_original - depth_rendered) > 0.1, depth_original, |
| 26 | + depth_rendered) |
| 27 | + depth_merged = np.where(depth_merged == 0, depth_rendered, depth_merged) |
| 28 | + |
| 29 | + # import tfslam |
| 30 | + # avgdpt = 2 |
| 31 | + # prx_rendered = tfslam.dpt_to_prx(depth_rendered, avgdpt) |
| 32 | + # prx_orig = tfslam.dpt_to_prx(depth_original, avgdpt) |
| 33 | + # prx_merged = tfslam.dpt_to_prx(depth_merged, avgdpt) |
| 34 | + |
| 35 | + # frac = np.count_nonzero(depth_rendered) / 640. / 480. |
| 36 | + # print(frac) |
| 37 | + # if frac < 0.6: |
| 38 | + # mosaic = tfslam.create_mosaic([[prx_orig, prx_rendered, prx_merged]]) |
| 39 | + # cv2.imshow('results', mosaic) |
| 40 | + # cv2.waitKey(0) |
| 41 | + |
| 42 | + # save result |
| 43 | + cv2.imwrite('{}.merged_depth.png'.format(frame), (depth_merged * 5000.0).astype(np.uint16)) |
| 44 | + |
| 45 | + |
| 46 | +def main(): |
| 47 | + parser = argparse.ArgumentParser() |
| 48 | + parser.add_argument('--data_dir', help='Path to directory containing sequences to preprocess', required=True) |
| 49 | + parser.add_argument('--nproc', default=8, help='Number of processes to spawn') |
| 50 | + args = parser.parse_args() |
| 51 | + |
| 52 | + # generate all frame names |
| 53 | + print('Generating frame list') |
| 54 | + all_frames = list() |
| 55 | + sequences = sorted(next(os.walk(args.data_dir))[1]) |
| 56 | + nframes = 0 |
| 57 | + for seq in sequences: |
| 58 | + infofile = os.path.join(args.data_dir, seq, '_info.txt') |
| 59 | + seqlen = int(open(infofile, 'r').readlines()[-1].split(' ')[-1]) |
| 60 | + all_frames += [[nframes + i, '{}/{}/frame-{:06d}'.format(args.data_dir, seq, i)] for i in range(seqlen)] |
| 61 | + nframes += seqlen |
| 62 | + |
| 63 | + nframes = len(all_frames) |
| 64 | + |
| 65 | + print('Number of sequences: ', len(sequences)) |
| 66 | + print('Number of frames: ', nframes) |
| 67 | + |
| 68 | + pool = multiprocessing.Pool(processes=args.nproc) |
| 69 | + do_work = partial(merge_depths, nframes=nframes) |
| 70 | + for _ in tqdm.tqdm(pool.imap_unordered(do_work, all_frames), total=len(all_frames)): |
| 71 | + pass |
| 72 | + |
| 73 | + pool.close() |
| 74 | + pool.join() |
| 75 | + print('Done') |
| 76 | + |
| 77 | + |
| 78 | +if __name__ == "__main__": |
| 79 | + main() |
0 commit comments