=================================== FAILURES =================================== ___________________ TestBatchLoader.test_basic_functionality ___________________ self = def test_basic_functionality(self): def _load_func(): for _ in sm.xrange(20): yield ia.Batch(images=np.zeros((2, 4, 4, 3), dtype=np.uint8)) warnings.simplefilter("always") with warnings.catch_warnings(record=True) as caught_warnings: for nb_workers in [1, 2]: # repeat these tests many times to catch rarer race conditions for _ in sm.xrange(5): loader = multicore.BatchLoader( _load_func, queue_size=2, nb_workers=nb_workers, threaded=True) loaded = [] counter = 0 while ((not loader.all_finished() or not loader.queue.empty()) and counter < 1000): try: batch = loader.queue.get(timeout=0.001) loaded.append(batch) except: pass counter += 1 assert len(loaded) == 20*nb_workers, \ "Expected %d to be loaded by threads, got %d for %d " \ "workers at counter %d." % ( 20*nb_workers, len(loaded), nb_workers, counter ) loader = multicore.BatchLoader( _load_func, queue_size=200, nb_workers=nb_workers, threaded=True) loader.terminate() assert loader.all_finished() loader = multicore.BatchLoader( _load_func, queue_size=2, nb_workers=nb_workers, > threaded=False) test/test_multicore.py:820: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ imgaug/imgaug.py:183: in wrapped return func(*args, **kwargs) imgaug/multicore.py:558: in __init__ worker.start() /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/process.py:112: in start self._popen = self._Popen(self) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/context.py:223: in _Popen return _default_context.get_context().Process._Popen(process_obj) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/context.py:284: in _Popen return Popen(process_obj) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/popen_spawn_posix.py:32: in __init__ super().__init__(process_obj) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/popen_fork.py:20: in __init__ self._launch(process_obj) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/popen_spawn_posix.py:47: in _launch reduction.dump(process_obj, fp) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ obj = file = <_io.BytesIO object at 0x7fffc8b21110>, protocol = None def dump(obj, file, protocol=None): '''Replacement for pickle.dump() using ForkingPickler.''' > ForkingPickler(file, protocol).dump(obj) E AttributeError: Can't pickle local object 'TestBatchLoader.test_basic_functionality.._load_func' /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/reduction.py:60: AttributeError _ TestAugmenter_augment_batches.test_augment_batches_with_many_different_augmenters _ self = def test_augment_batches_with_many_different_augmenters(self): image = np.array([[0, 0, 1, 1, 1], [0, 0, 1, 1, 1], [0, 1, 1, 1, 1]], dtype=np.uint8) keypoint = ia.Keypoint(x=2, y=1) keypoints = [ia.KeypointsOnImage([keypoint], shape=image.shape + (1,))] def _lambda_func_images(images, random_state, parents, hooks): return images def _lambda_func_keypoints(keypoints_on_images, random_state, parents, hooks): return keypoints_on_images def _assertlambda_func_images(images, random_state, parents, hooks): return True def _assertlambda_func_keypoints(keypoints_on_images, random_state, parents, hooks): return True augs = [ iaa.Sequential([iaa.Fliplr(1.0), iaa.Flipud(1.0)]), iaa.SomeOf(1, [iaa.Fliplr(1.0), iaa.Flipud(1.0)]), iaa.OneOf([iaa.Fliplr(1.0), iaa.Flipud(1.0)]), iaa.Sometimes(1.0, iaa.Fliplr(1)), iaa.WithColorspace("HSV", children=iaa.Add((-50, 50))), iaa.WithChannels([0], iaa.Add((-50, 50))), iaa.Identity(name="Identity-nochange"), iaa.Lambda( func_images=_lambda_func_images, func_keypoints=_lambda_func_keypoints, name="Lambda-nochange" ), iaa.AssertLambda( func_images=_assertlambda_func_images, func_keypoints=_assertlambda_func_keypoints, name="AssertLambda-nochange" ), iaa.AssertShape( (None, 64, 64, 3), check_keypoints=False, name="AssertShape-nochange" ), iaa.Resize((0.5, 0.9)), iaa.CropAndPad(px=(-50, 50)), iaa.Pad(px=(1, 50)), iaa.Crop(px=(1, 50)), iaa.Fliplr(1.0), iaa.Flipud(1.0), iaa.Superpixels(p_replace=(0.25, 1.0), n_segments=(16, 128)), iaa.ChangeColorspace(to_colorspace="GRAY"), iaa.Grayscale(alpha=(0.1, 1.0)), iaa.GaussianBlur(1.0), iaa.AverageBlur(5), iaa.MedianBlur(5), iaa.Convolve(np.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]])), iaa.Sharpen(alpha=(0.1, 1.0), lightness=(0.8, 1.2)), iaa.Emboss(alpha=(0.1, 1.0), strength=(0.8, 1.2)), iaa.EdgeDetect(alpha=(0.1, 1.0)), iaa.DirectedEdgeDetect(alpha=(0.1, 1.0), direction=(0.0, 1.0)), iaa.Add((-50, 50)), iaa.AddElementwise((-50, 50)), iaa.AdditiveGaussianNoise(scale=(0.1, 1.0)), iaa.Multiply((0.6, 1.4)), iaa.MultiplyElementwise((0.6, 1.4)), iaa.Dropout((0.3, 0.5)), iaa.CoarseDropout((0.3, 0.5), size_percent=(0.05, 0.2)), iaa.Invert(0.5), iaa.Affine( scale=(0.7, 1.3), translate_percent=(-0.1, 0.1), rotate=(-20, 20), shear=(-20, 20), order=ia.ALL, mode=ia.ALL, cval=(0, 255)), iaa.PiecewiseAffine(scale=(0.1, 0.3)), iaa.ElasticTransformation(alpha=0.5) ] nb_iterations = 100 image = ia.quokka(size=(64, 64)) batches = [ia.Batch(images=[np.copy(image)], keypoints=[keypoints[0].deepcopy()]) for _ in sm.xrange(nb_iterations)] for aug in augs: nb_changed = 0 > batches_aug = list(aug.augment_batches(batches, background=True)) test/augmenters/test_meta.py:2846: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ imgaug/augmenters/meta.py:506: in augment_batches with multicore.Pool(self) as pool: imgaug/multicore.py:362: in __enter__ _ = self.pool # initialize internal multiprocessing pool instance imgaug/multicore.py:177: in pool maxtasksperchild=self.maxtasksperchild) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/context.py:119: in Pool context=self.get_context()) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/pool.py:176: in __init__ self._repopulate_pool() /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/pool.py:241: in _repopulate_pool w.start() /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/process.py:112: in start self._popen = self._Popen(self) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/context.py:284: in _Popen return Popen(process_obj) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/popen_spawn_posix.py:32: in __init__ super().__init__(process_obj) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/popen_fork.py:20: in __init__ self._launch(process_obj) /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/popen_spawn_posix.py:47: in _launch reduction.dump(process_obj, fp) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ obj = file = <_io.BytesIO object at 0x7fffc7d23470>, protocol = None def dump(obj, file, protocol=None): '''Replacement for pickle.dump() using ForkingPickler.''' > ForkingPickler(file, protocol).dump(obj) E AttributeError: Can't pickle local object 'TestAugmenter_augment_batches.test_augment_batches_with_many_different_augmenters.._lambda_func_images' /nix/store/drr8qcgiccfc5by09r5zc30flgwh1mbx-python3-3.7.5/lib/python3.7/multiprocessing/reduction.py:60: AttributeError =============================== warnings summary =============================== test/test_dtypes.py::Test_change_dtype_::test_clip_and_round /build/imgaug-58fac47309fb094269fe581412746f236a848f12/imgaug/imgaug.py:308: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3,and in 3.9 it will stop working return isinstance(val, collections.Iterable)