-
Notifications
You must be signed in to change notification settings - Fork 0
/
dataset_gen.py
159 lines (143 loc) · 6.47 KB
/
dataset_gen.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
import os
import bpy
import yaml
import numpy as np
# import local modules
import sys
from pathlib import Path
file = Path(__file__).resolve()
parent = file.parents[1]
sys.path.append(str(parent))
sys.path.append(str(file.parents[0]))
# print(file.parents[0])
# print(sys.path)
from params import DAGParams
from paramgen import DAGParamGenerator
from paramload import DAGParamLoader
from render import DAGRenderer
from tqdm import tqdm
class DAGDatasetGenerator():
def __init__(self,
dataset_name: str,
dataset_root_path: str="./datasets",
mkdir: bool=True) -> None:
self.dataset_name = dataset_name
self.dataset_root_path = dataset_root_path
self.dataset_path = os.path.join(self.dataset_root_path, self.dataset_name)
self.dataset_images_folder = os.path.join(self.dataset_path, "images")
self.dataset_params_folder = os.path.join(self.dataset_path, "params")
# self.dataset_realtime_folder = os.path.join(self.dataset_path, "realtime")
if mkdir:
self._mkdirs()
self.param_generator = DAGParamGenerator()
self.param_loader = DAGParamLoader()
self.param_renderer = DAGRenderer()
def _mkdirs(self):
os.makedirs(self.dataset_path, exist_ok=True)
os.makedirs(self.dataset_images_folder, exist_ok=True)
os.makedirs(self.dataset_params_folder, exist_ok=True)
# os.makedirs(self.dataset_realtime_folder, exist_ok=True)
def populate_dataset_wrt_batches(self, num_batches: int, batch_size: int=10, num_varying_params: int=5, render: bool=True, distortion: bool=True):
batch_cam_angles = {}
for i in tqdm(range(num_batches)):
batch = self.param_generator.generate_batch_params(
num_varying_params=num_varying_params, count=batch_size
)
# choose a cam angle
cam_angle = int(np.random.choice(self.param_generator.cam_angles))
batch_cam_angles[f"batch{i}"] = cam_angle
self.param_renderer.update_lr_angle(cam_angle)
for j, param in enumerate(batch):
sample_id = "batch{}_sample{}".format(i, j)
sample_params_path = os.path.join(self.dataset_params_folder, f"{sample_id}.yml")
sample_param = DAGParams()
sample_param.set_params(param)
sample_param.save_params(sample_params_path)
if render:
# load shape params into blender
self.param_loader.load_dag_params_with_return_part(sample_param, 0)
# render image
sample_image_path = os.path.join(self.dataset_images_folder, f"{sample_id}.png")
if distortion:
self.param_renderer.render(sample_image_path, distortion=True)
else:
self.param_renderer.render(sample_image_path)
self.write_metadata(batch_cam_angles) # write meta after whole process to ensure all entries are generated
def write_metadata(self, batch_cam_angles):
'''
Writes meta.yml combining original ranges, decoders, switches and batch_cam_angles
'''
ranges = self.param_generator.save_ranges()
decoders = self.param_generator.save_decoders()
switches = self.param_generator.save_switches()
meta = {
"dataset": self.dataset_name,
"ranges": ranges,
"decoders": decoders,
"switches": switches,
"batch_cam_angles": batch_cam_angles
}
meta_path = os.path.join(self.dataset_path, "meta.yml")
with open(meta_path, "w") as f:
yaml.dump(meta, f)
def write_decoders(self):
'''
Output one extra file along with ranges.yml
decoders.yml:
decoder_name: [param_names]
'''
decoders = self.param_generator.save_decoders()
decoders_path = os.path.join(self.dataset_path, "decoders.yml")
with open(decoders_path, "w") as f:
yaml.dump(decoders, f)
# TODO: refactor these to one yml file maybe (DONE)
def write_switches(self):
switches = self.param_generator.save_switches()
switches_path = os.path.join(self.dataset_path, "switches.yml")
with open(switches_path, "w") as f:
yaml.dump(switches, f)
def write_batch_cam_angles(self, batch_cam_angles):
batch_cam_angles_path = os.path.join(self.dataset_path, "batch_cam_angles.yml")
with open(batch_cam_angles_path, "w") as f:
yaml.dump(batch_cam_angles, f)
def use_device(self, device: int):
'''
-1: CPU
0 or int index: GPU
'''
self.param_renderer.use_device(device)
if __name__ == "__main__":
# TODO: check for mysterious "Error: Cannot read file '/mnt/c/ZSY/BuildingDAG/1': No such file or directory"
# check for args
args = sys.argv[5:]
# print(args)
args = [100, 100, 5, 0, 1]
# print(f"ignoring commandline args and using settings in code: {args}")
if len(args) == 5:
num_batches = int(args[0])
batch_size = int(args[1])
num_varying_params = int(args[2])
device = int(args[3])
distortion = bool(args[4])
prefix = "DAGDataset" if not distortion else "DAGDatasetDistorted"
cwd = os.getcwd()
print(f"Current working directory: {cwd}")
generator = DAGDatasetGenerator(f"{prefix}{num_batches}_{batch_size}_{num_varying_params}", dataset_root_path=os.path.join(cwd, "datasets"))
generator.use_device(device)
print(f"Using {num_batches} batches, {batch_size} samples per batch, {num_varying_params} varying params")
print(f"Using device: {device}")
print(f"Using distortion: {distortion}")
generator.param_renderer.check_devices()
generator.populate_dataset_wrt_batches(num_batches, batch_size, num_varying_params, distortion=distortion)
elif len(args) == 0:
print("No args received, using default values and CPU")
generator = DAGDatasetGenerator("DAGDataset_10_10_5")
generator.use_device(-1)
generator.param_renderer.check_devices()
generator.populate_dataset_wrt_batches(10, 10, 5)
else:
print("Usage: blender -b -P dataset_gen.py <num_batches> <batch_size> <num_varying_params>")
sys.exit(1)
# generator.populate_dataset_wrt_batches(10, 10, 5)
# generator.write_decoders()
# generator.write_switches()