Skip to content

Commit

Permalink
add tests for SBDataset (#3467)
Browse files Browse the repository at this point in the history
Summary: Co-authored-by: Francisco Massa <fvsmassa@gmail.com>

Reviewed By: fmassa

Differential Revision: D26756273

fbshipit-source-id: 53f4cb3022a0d434104624fc26b7b6ad3dfbd8ae
  • Loading branch information
zhangguanheng66 authored and facebook-github-bot committed Mar 4, 2021
1 parent 44f4834 commit 7029839
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 0 deletions.
1 change: 1 addition & 0 deletions test/datasets_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ class LazyImporter:
"pycocotools",
"requests",
"scipy.io",
"scipy.sparse",
)

def __init__(self):
Expand Down
68 changes: 68 additions & 0 deletions test/test_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -1193,5 +1193,73 @@ def inject_fake_data(self, tmpdir, config):
return num_images


class SBDatasetTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.SBDataset
FEATURE_TYPES = (PIL.Image.Image, (np.ndarray, PIL.Image.Image))

REQUIRED_PACKAGES = ("scipy.io", "scipy.sparse")

CONFIGS = datasets_utils.combinations_grid(
image_set=("train", "val", "train_noval"), mode=("boundaries", "segmentation")
)

_NUM_CLASSES = 20

def inject_fake_data(self, tmpdir, config):
num_images, num_images_per_image_set = self._create_split_files(tmpdir)

sizes = self._create_target_folder(tmpdir, "cls", num_images)

datasets_utils.create_image_folder(
tmpdir, "img", lambda idx: f"{self._file_stem(idx)}.jpg", num_images, size=lambda idx: sizes[idx]
)

return num_images_per_image_set[config["image_set"]]

def _create_split_files(self, root):
root = pathlib.Path(root)

splits = dict(train=(0, 1, 2), train_noval=(0, 2), val=(3,))

for split, idcs in splits.items():
self._create_split_file(root, split, idcs)

num_images = max(itertools.chain(*splits.values())) + 1
num_images_per_split = dict([(split, len(idcs)) for split, idcs in splits.items()])
return num_images, num_images_per_split

def _create_split_file(self, root, name, idcs):
with open(root / f"{name}.txt", "w") as fh:
fh.writelines(f"{self._file_stem(idx)}\n" for idx in idcs)

def _create_target_folder(self, root, name, num_images):
io = datasets_utils.lazy_importer.scipy.io

target_folder = pathlib.Path(root) / name
os.makedirs(target_folder)

sizes = [torch.randint(1, 4, size=(2,)).tolist() for _ in range(num_images)]
for idx, size in enumerate(sizes):
content = dict(
GTcls=dict(Boundaries=self._create_boundaries(size), Segmentation=self._create_segmentation(size))
)
io.savemat(target_folder / f"{self._file_stem(idx)}.mat", content)

return sizes

def _create_boundaries(self, size):
sparse = datasets_utils.lazy_importer.scipy.sparse
return [
[sparse.csc_matrix(torch.randint(0, 2, size=size, dtype=torch.uint8).numpy())]
for _ in range(self._NUM_CLASSES)
]

def _create_segmentation(self, size):
return torch.randint(0, self._NUM_CLASSES + 1, size=size, dtype=torch.uint8).numpy()

def _file_stem(self, idx):
return f"2008_{idx:06d}"


if __name__ == "__main__":
unittest.main()

0 comments on commit 7029839

Please sign in to comment.