Skip to content
This repository has been archived by the owner on Jul 2, 2021. It is now read-only.

Commit

Permalink
Merge pull request #493 from Hakuyume/add-attr-disk
Browse files Browse the repository at this point in the history
Add attr.disk
  • Loading branch information
yuyu2172 authored Dec 16, 2017
2 parents 4e29c71 + a93d33c commit 6f6858d
Show file tree
Hide file tree
Showing 9 changed files with 29 additions and 9 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ script:
- autopep8 -r . | tee check_autopep8
- test ! -s check_autopep8
- python style_checker.py .
- MPLBACKEND="agg" nosetests -a '!gpu,!slow' tests
- MPLBACKEND="agg" nosetests -a '!gpu,!slow,!disk' tests
Empty file added chainercv/testing/__init__.py
Empty file.
11 changes: 11 additions & 0 deletions chainercv/testing/attr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from chainer.testing.attr import cudnn # NOQA
from chainer.testing.attr import gpu # NOQA
from chainer.testing.attr import multi_gpu # NOQA
from chainer.testing.attr import slow # NOQA

try:
import pytest
disk = pytest.mark.disk
except ImportError:
from chainer.testing.attr import _dummy_callable
disk = _dummy_callable
6 changes: 4 additions & 2 deletions tests/datasets_tests/ade20k_tests/test_ade20k.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import numpy as np
import unittest

from chainer import testing
from chainer.testing import attr
import numpy as np

from chainercv.datasets import ade20k_semantic_segmentation_label_names
from chainercv.datasets import ADE20KSemanticSegmentationDataset
from chainercv.datasets import ADE20KTestImageDataset
from chainercv.testing import attr
from chainercv.utils import assert_is_semantic_segmentation_dataset
from chainercv.utils.testing.assertions.assert_is_image import assert_is_image

Expand All @@ -21,6 +21,7 @@ def setUp(self):
self.dataset = ADE20KSemanticSegmentationDataset(split=self.split)

@attr.slow
@attr.disk
def test_ade20k_dataset(self):
assert_is_semantic_segmentation_dataset(
self.dataset, len(ade20k_semantic_segmentation_label_names),
Expand All @@ -33,6 +34,7 @@ def setUp(self):
self.dataset = ADE20KTestImageDataset()

@attr.slow
@attr.disk
def test_ade20k_dataset(self):
indices = np.random.permutation(np.arange(len(self.dataset)))
for i in indices[:10]:
Expand Down
3 changes: 2 additions & 1 deletion tests/datasets_tests/camvid_tests/test_camvid_dataset.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import unittest

from chainer import testing
from chainer.testing import attr

from chainercv.datasets import camvid_label_names
from chainercv.datasets import CamVidDataset
from chainercv.testing import attr
from chainercv.utils import assert_is_semantic_segmentation_dataset


Expand All @@ -19,6 +19,7 @@ def setUp(self):
self.dataset = CamVidDataset(split=self.split)

@attr.slow
@attr.disk
def test_camvid_dataset(self):
assert_is_semantic_segmentation_dataset(
self.dataset, len(camvid_label_names), n_example=10)
Expand Down
6 changes: 4 additions & 2 deletions tests/datasets_tests/cityscapes_tests/test_cityscapes.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import numpy as np
import os
import shutil
import tempfile
import unittest

from chainer import testing
from chainer.testing import attr
import numpy as np

from chainercv.datasets.cityscapes.cityscapes_utils import cityscapes_labels
from chainercv.datasets import CityscapesSemanticSegmentationDataset
from chainercv.datasets import CityscapesTestImageDataset
from chainercv.testing import attr
from chainercv.utils import assert_is_semantic_segmentation_dataset
from chainercv.utils.testing.assertions.assert_is_image import assert_is_image
from chainercv.utils import write_image
Expand Down Expand Up @@ -59,6 +59,7 @@ def tearDown(self):
shutil.rmtree(self.temp_dir)

@attr.slow
@attr.disk
def test_cityscapes_semantic_segmentation_dataset(self):
assert_is_semantic_segmentation_dataset(
self.dataset, self.n_class, n_example=10)
Expand All @@ -83,6 +84,7 @@ def tearDown(self):
shutil.rmtree(self.temp_dir)

@attr.slow
@attr.disk
def test_cityscapes_dataset(self):
indices = np.random.permutation(np.arange(len(self.dataset)))
for i in indices[:10]:
Expand Down
3 changes: 2 additions & 1 deletion tests/datasets_tests/cub_tests/test_cub_label_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
import numpy as np

from chainer import testing
from chainer.testing import attr

from chainercv.datasets import cub_label_names
from chainercv.datasets import CUBLabelDataset
from chainercv.testing import attr
from chainercv.utils import assert_is_bbox
from chainercv.utils import assert_is_label_dataset

Expand All @@ -22,6 +22,7 @@ def setUp(self):
return_bb=self.return_bb, return_prob_map=self.return_prob_map)

@attr.slow
@attr.disk
def test_cub_label_dataset(self):
assert_is_label_dataset(
self.dataset, len(cub_label_names), n_example=10)
Expand Down
4 changes: 3 additions & 1 deletion tests/datasets_tests/voc_tests/test_voc_bbox_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
import numpy as np

from chainer import testing
from chainer.testing import attr
from chainer.testing import condition

from chainercv.datasets import voc_bbox_label_names
from chainercv.datasets import VOCBboxDataset
from chainercv.testing import attr
from chainercv.utils import assert_is_bbox_dataset


Expand Down Expand Up @@ -37,11 +37,13 @@ def setUp(self):
self.n_out = 4 if self.return_difficult else 3

@attr.slow
@attr.disk
def test_as_bbox_dataset(self):
assert_is_bbox_dataset(
self.dataset, len(voc_bbox_label_names), n_example=10)

@attr.slow
@attr.disk
@condition.repeat(10)
def test_difficult(self):
if not self.return_difficult:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import unittest

from chainer import testing
from chainer.testing import attr

from chainercv.datasets import voc_semantic_segmentation_label_names
from chainercv.datasets import VOCSemanticSegmentationDataset
from chainercv.testing import attr
from chainercv.utils import assert_is_semantic_segmentation_dataset


Expand All @@ -19,6 +19,7 @@ def setUp(self):
self.dataset = VOCSemanticSegmentationDataset(split=self.split)

@attr.slow
@attr.disk
def test_voc_semantic_segmentation_dataset(self):
assert_is_semantic_segmentation_dataset(
self.dataset,
Expand Down

0 comments on commit 6f6858d

Please sign in to comment.