Skip to content

Commit 04fed4b

Browse files
committed
added random object generation augmentation
1 parent 78f58ca commit 04fed4b

File tree

10 files changed

+298
-49
lines changed

10 files changed

+298
-49
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,9 @@ multirun/
138138
wandb/
139139
*.out
140140
*.sl
141+
*.pcd
142+
*.ply
143+
141144
speed-tree-outputs/
142145
smart_tree/conf/tree-split-test.json
143146
smart_tree/conf/apple-trellis-split-test.json

smart_tree/data_types/cloud.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,12 +75,17 @@ def max_xyz(self):
7575
def min_xyz(self):
7676
return torch.min(self.xyz, 0)[0]
7777

78+
@property
79+
def centre(self):
80+
return self.min_xyz + (self.max_xyz - self.min_xyz) / 2
81+
82+
@property
83+
def dimensions(self):
84+
return self.max_xyz - self.min_xyz
85+
7886
@property
7987
def bbox(self):
80-
# defined by centre coordinate, x/2, y/2, z/2
81-
dimensions = (self.max_xyz - self.min_xyz) / 2
82-
centre = self.min_xyz + dimensions
83-
return centre, dimensions
88+
return self.centre, self.dimensions
8489

8590
@staticmethod
8691
def from_numpy(xyz, rgb, device=torch.device("cpu")):

smart_tree/dataset/augmentations.py

Lines changed: 80 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,25 @@
1+
import os
12
import random
23

34
import numpy as np
45
import torch
56
import random
67
import open3d as o3d
78

9+
from tqdm import tqdm
810
from pathlib import Path
911
from abc import ABC, abstractmethod
1012
from typing import List
1113

1214
from smart_tree.data_types.cloud import Cloud, LabelledCloud
1315
from smart_tree.util.math.maths import euler_angles_to_rotation
14-
from smart_tree.util.file import load_o3d_mesh
16+
from smart_tree.util.file import (
17+
load_o3d_mesh,
18+
save_o3d_mesh,
19+
save_o3d_cloud,
20+
load_o3d_cloud,
21+
)
22+
from smart_tree.util.mesh.geometries import o3d_cloud, o3d_sphere
1523
from hydra.utils import call, get_original_cwd, instantiate, to_absolute_path
1624

1725

@@ -81,45 +89,81 @@ class RandomMesh(Augmentation):
8189
def __init__(
8290
self,
8391
mesh_directory: Path,
92+
preprocessed_path: Path,
8493
voxel_size: float,
8594
number_meshes: int,
8695
min_size: float,
8796
max_size: float,
97+
max_pts: int,
8898
):
89-
self.mesh_paths = list(mesh_directory.glob("*"))
99+
"""We want to preprocess the meshes by getting them to the right scale,
100+
which is done by first converting them from mm to metres, we then
101+
scale them up to the max_size and then do a point sample, based on target voxel_size
102+
(ensure we have enough point density at the max size), then revert
103+
the scale back normal scale in metres. During inference we randomly scale based on the
104+
min_size and max_size and then translate the points and merge with the input cloud
105+
"""
106+
90107
self.voxel_size = voxel_size
91108
self.number_meshes = number_meshes
92109
self.min_size = min_size
93110
self.max_size = max_size
94-
self.class_number = 3
111+
self.class_number = 2
112+
self.preprocessed_path = preprocessed_path
113+
114+
if not (os.path.exists(preprocessed_path)):
115+
os.makedirs(preprocessed_path)
116+
117+
for mesh_path in tqdm(
118+
Path(mesh_directory).glob("*.stl"),
119+
desc="Preprocessing Meshes",
120+
leave=False,
121+
):
122+
if os.path.isfile(f"{preprocessed_path}/{mesh_path.stem}.pcd"):
123+
continue
124+
try:
125+
mesh = load_o3d_mesh(str(mesh_path))
126+
pcd = (
127+
mesh.scale(0.001, mesh.get_center())
128+
.translate(-mesh.get_center())
129+
.paint_uniform_color(np.random.rand(3))
130+
.scale(max_size, mesh.get_center())
131+
.sample_points_uniformly(
132+
min(
133+
max(int(mesh.get_surface_area() / (voxel_size**2)), 10),
134+
max_pts,
135+
)
136+
)
137+
.scale(1 / max_size, mesh.get_center())
138+
)
139+
save_o3d_cloud(f"{preprocessed_path}/{mesh_path.stem}.pcd", pcd)
140+
except:
141+
print(f"Cloud Generation Failed on {mesh_path}")
95142

96143
def __call__(self, cloud):
144+
centre, dimensions = cloud.bbox
145+
97146
for i in range(self.number_meshes):
98-
mesh = load_o3d_mesh(
99-
str(self.mesh_paths[random.randint(0, len(self.mesh_paths))])
147+
random_pcd_path = random.choice(list(self.preprocessed_path.glob("*.pcd")))
148+
pcd = load_o3d_cloud(str(random_pcd_path))
149+
scaled_pcd = pcd.scale(
150+
random.uniform(self.min_size, self.max_size), pcd.get_center()
100151
)
101152

102-
mesh = mesh.scale(0.01, center=mesh.get_center())
103-
104-
mesh = mesh.translate(-mesh.get_center())
105-
106-
mesh_pts = mesh.sample_points_uniformly(
107-
int(1000 * mesh.get_surface_area() / self.voxel_size),
108-
).paint_uniform_color(np.random.rand(3))
109-
110153
lc = LabelledCloud.from_o3d_cld(
111-
mesh_pts,
112-
class_l=torch.ones(np.asarray(mesh_pts.points).shape[0])
113-
* self.class_number,
154+
pcd,
155+
class_l=torch.ones(np.asarray(pcd.points).shape[0]) * self.class_number,
114156
)
115157

116-
cloud += lc
158+
lc = lc.rotate(
159+
euler_angles_to_rotation(torch.rand(3) * torch.pi * 2).to(
160+
cloud.xyz.device
161+
)
162+
)
163+
lc = lc.translate(cloud.min_xyz - lc.centre)
164+
lc = lc.translate(dimensions * torch.rand(3))
117165

118-
# load random mesh
119-
# voxelize mesh
120-
# create labelled cloud
121-
# randomly translate / rotate it
122-
# return new cloud
166+
cloud += lc
123167

124168
return cloud
125169

@@ -183,10 +227,14 @@ def from_cfg(cfg):
183227

184228
mesh_adder = RandomMesh(
185229
mesh_directory=Path("/local/Datasets/Thingi10K/raw_meshes/"),
186-
voxel_size=0.01,
187-
number_meshes=5,
230+
preprocessed_path=Path(
231+
"/local/uc-vision/smart-tree/data/things10K_sampled_1mm/"
232+
),
233+
voxel_size=0.001,
234+
number_meshes=20,
188235
min_size=0.01,
189-
max_size=0.5,
236+
max_size=20,
237+
max_pts=50000,
190238
)
191239

192240
cld, _ = load_data_npz(Path("/local/_smart-tree/evaluation-data/gt/apple_12.npz"))
@@ -195,7 +243,13 @@ def from_cfg(cfg):
195243

196244
cld.view()
197245

198-
# o3d_viewer([cld])
246+
o3d_viewer(
247+
[
248+
cld.to_o3d_cld(),
249+
o3d_sphere(cld.min_xyz, radius=0.1, colour=(0, 1, 0)),
250+
o3d_sphere(cld.max_xyz, radius=0.1),
251+
]
252+
)
199253

200254
quit()
201255
centre = CentreCloud()

smart_tree/dataset/dataset.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,6 @@ def __getitem__(self, idx):
5151

5252
labelled_cld = labelled_cld.to_device(torch.device("cuda"))
5353

54-
if self.augmentation != None:
55-
labelled_cld = self.augmentation(labelled_cld)
56-
5754
if self.blocking:
5855
block_center_idx = torch.randint(
5956
labelled_cld.xyz.shape[0], size=(1,), device=labelled_cld.xyz.device
@@ -66,6 +63,9 @@ def __getitem__(self, idx):
6663
)
6764
labelled_cld = labelled_cld.filter(block_filter)
6865

66+
if self.augmentation != None:
67+
labelled_cld = self.augmentation(labelled_cld)
68+
6969
xyzmin, _ = torch.min(labelled_cld.xyz, axis=0)
7070
xyzmax, _ = torch.max(labelled_cld.xyz, axis=0)
7171
make_voxel_gen = time.time()
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
""" Script to sample meshes into point clouds.
2+
3+
usage:
4+
5+
python smart_tree/scripts/sample-meshes.py --read_directory=/local/Datasets/Thingi10K/raw_meshes/ --save_directory=/local/Datasets/Thingi10K/point_clouds/ --scale=0.001 --voxel_size=0.01
6+
7+
8+
"""
9+
10+
import json
11+
import os
12+
import random
13+
from pathlib import Path
14+
15+
import click
16+
import numpy as np
17+
18+
from tqdm import tqdm
19+
20+
from smart_tree.util.file import load_o3d_mesh, load_data_npz
21+
from smart_tree.util.visualizer.view import o3d_viewer
22+
23+
24+
@click.command()
25+
@click.option(
26+
"--read_directory",
27+
type=click.Path(exists=True),
28+
prompt="read directory?",
29+
)
30+
@click.option(
31+
"--save_directory",
32+
type=click.Path(exists=True),
33+
prompt="save directory?",
34+
)
35+
@click.option(
36+
"--scale",
37+
type=float,
38+
prompt="scale?",
39+
)
40+
@click.option(
41+
"--voxel_size",
42+
type=float,
43+
prompt="voxel_size?",
44+
)
45+
def main(read_directory, save_directory, scale, voxel_size):
46+
files_paths = [str(path) for path in Path(read_directory).glob("*.stl")]
47+
48+
pcd, skeleton = load_data_npz("/local/synthetic-trees/dataset/apple/apple_1.npz")
49+
50+
for file_path in tqdm(files_paths):
51+
try:
52+
mesh = load_o3d_mesh(file_path)
53+
mesh = (
54+
mesh.scale(scale, mesh.get_center())
55+
.translate(-mesh.get_center())
56+
.paint_uniform_color(np.random.rand(3))
57+
.scale(100, mesh.get_center())
58+
.sample_points_uniformly(
59+
int(mesh.get_surface_area() / (voxel_size**2))
60+
)
61+
)
62+
except:
63+
pass
64+
65+
# o3d_viewer([mesh, pcd.to_o3d_cld()])
66+
67+
68+
if __name__ == "__main__":
69+
main()
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
""" Script to split dataset into train and test sets
2+
3+
usage:
4+
5+
python smart_tree/scripts/split-data.py --read_directory=/speed-tree/speed-tree-outputs/processed_vines/ --json_save_path=/smart-tree/smart_tree/conf/vine-split.json --sample_type=random
6+
7+
8+
"""
9+
10+
import json
11+
import os
12+
import random
13+
from pathlib import Path
14+
15+
import click
16+
import numpy as np
17+
18+
19+
def flatten_list(l):
20+
return [item for sublist in l for item in sublist]
21+
22+
23+
def random_sample(read_dir, json_save_path):
24+
items = [str(path.name) for path in Path(read_dir).rglob("*.npz")]
25+
random.shuffle(items)
26+
27+
data = {}
28+
29+
data["train"] = sorted(items[: int(0.8 * len(items))])
30+
data["test"] = sorted(items[int(0.8 * len(items)) : int(0.9 * len(items))])
31+
data["validation"] = sorted(items[int(0.9 * len(items)) :])
32+
33+
with open(json_save_path, "w") as outfile:
34+
json.dump(data, outfile, indent=4, sort_keys=False)
35+
36+
37+
def strattified_sample(read_dir, json_save_path):
38+
dirs = os.listdir(read_dir)
39+
40+
train_paths = []
41+
test_paths = []
42+
val_paths = []
43+
44+
for directory in dirs:
45+
items = [
46+
str(path.resolve())
47+
for path in Path(f"{read_dir}/{directory}").rglob("*.npz")
48+
]
49+
random.shuffle(items)
50+
51+
train_paths.append(items[: int(0.8 * len(items))])
52+
test_paths.append(
53+
items[int(0.8 * len(items)) : int(0.8 * len(items) + int(0.1 * len(items)))]
54+
)
55+
val_paths.append(items[int(0.8 * len(items)) + int(0.1 * len(items)) :])
56+
57+
data = {}
58+
59+
data["train"] = sorted(flatten_list(train_paths))
60+
data["test"] = sorted(flatten_list(test_paths))
61+
data["validation"] = sorted(flatten_list(val_paths))
62+
63+
with open(json_save_path, "w") as outfile:
64+
json.dump(data, outfile, indent=4, sort_keys=False)
65+
66+
67+
@click.command()
68+
@click.option(
69+
"--read_directory", type=click.Path(exists=True), prompt="read directory?"
70+
)
71+
@click.option("--json_save_path", prompt="json path?")
72+
@click.option("--sample_type", type=str, default=False, required=False)
73+
def main(read_directory, json_save_path, sample_type):
74+
if sample_type == "random":
75+
random_sample(read_directory, json_save_path)
76+
77+
if sample_type == "strattified":
78+
strattified_sample(read_directory, json_save_path)
79+
80+
81+
if __name__ == "__main__":
82+
main()

smart_tree/tests/dataloader.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,18 @@
1717
config_name="test-dataloader",
1818
)
1919
def main(cfg: DictConfig):
20-
train_dataloader = instantiate(
21-
cfg.data_loader, dataset=instantiate(cfg.dataset, mode="train")
22-
)
20+
dataset = instantiate(cfg.dataset, mode="train")
21+
train_dataloader = instantiate(cfg.data_loader, dataset)
2322

2423
start_time = time.time()
2524
for data in tqdm(train_dataloader):
2625
pass
2726

28-
print(time.time() - start_time)
27+
# times = [
28+
# s.elapsed_time(e) for s, e in zip(dataset.start_events, dataset.end_events)
29+
# ]
30+
31+
# print(times)
2932

3033

3134
if __name__ == "__main__":

0 commit comments

Comments
 (0)