Skip to content

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
petrbel committed Oct 5, 2018
2 parents d35caad + 8bbcb9b commit 580574f
Show file tree
Hide file tree
Showing 7 changed files with 37 additions and 25 deletions.
10 changes: 5 additions & 5 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ references:
run:
name: Install dependencies on Arch Linux.
command: |
pacman -S --noconfirm python python-pip git base-devel curl
pacman -Syu --noconfirm python python-pip git base-devel curl
pip install coveralls coverage
install: &install
Expand Down Expand Up @@ -53,7 +53,7 @@ references:
command: |
coverage run setup.py test
coverage report
COVERALLS_REPO_TOKEN=IcZuMtRg3p15AtsuOoGQSDaSQxjpVF1tP coveralls
COVERALLS_REPO_TOKEN=5fdgMuuCkpjBJBJBEm3VueXvxSMN4eS12 coveralls
jobs:

Expand All @@ -79,7 +79,7 @@ jobs:

test_archlinux:
docker:
- image: pritunl/archlinux:2018-01-13
- image: archimg/base-devel
working_directory: ~/emloop
steps:
- *arch_deps
Expand All @@ -89,7 +89,7 @@ jobs:

coverage:
docker:
- image: pritunl/archlinux:2018-01-13
- image: archimg/base-devel
working_directory: ~/emloop
steps:
- *arch_deps
Expand All @@ -110,7 +110,7 @@ jobs:

deploy:
docker:
- image: pritunl/archlinux:2018-01-13
- image: archimg/base-devel
working_directory: ~/emloop
steps:
- *arch_deps
Expand Down
2 changes: 1 addition & 1 deletion docs/_base
Submodule _base updated 2 files
+0 −1 LICENSE
+1 −1 _templates/related.html
13 changes: 7 additions & 6 deletions emloop/tests/datasets/base_dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@
import numpy as np
import tabulate

import emloop.datasets.base_dataset
from emloop.datasets.base_dataset import BaseDataset
from emloop.types import Stream


class TestDataset(BaseDataset):
class MockDataset(BaseDataset):
"""Create testing dataset."""

def __init__(self, config_str: str):
Expand Down Expand Up @@ -68,9 +69,9 @@ def make_table(self, stream_name: str) -> Tuple:

def test_check_dataset(caplog):
"""Test logging of source names, dtypes and shapes of all the streams available in given dataset."""
empty_table_logging = tuple(map(lambda line: ('root', logging.INFO, line), TestDataset(None).make_table('empty')))
ragged_table_logging = tuple(map(lambda line: ('root', logging.INFO, line), TestDataset(None).make_table('ragged')))
regular_table_logging = tuple(map(lambda line: ('root', logging.INFO, line), TestDataset(None).make_table('regular')))
empty_table_logging = tuple(map(lambda line: ('root', logging.INFO, line), MockDataset(None).make_table('empty')))
ragged_table_logging = tuple(map(lambda line: ('root', logging.INFO, line), MockDataset(None).make_table('ragged')))
regular_table_logging = tuple(map(lambda line: ('root', logging.INFO, line), MockDataset(None).make_table('regular')))

complete_logging = (
(('root', logging.INFO, "Found 4 stream candidates: ['empty_stream', "
Expand All @@ -87,12 +88,12 @@ def test_check_dataset(caplog):
+ (('root', logging.WARNING, 'Exception was raised during checking stream '
'`undefined_stream`, (stack trace is displayed only with --verbose flag)'),)
+ (('root', logging.DEBUG, 'Traceback (most recent call last):\n'
' File "/root/emloop/emloop/datasets/base_dataset.py", line 61, in '
f' File "{emloop.datasets.base_dataset.__file__}", line 61, in '
'stream_info\n'
' batch = next(iter(stream_fn()))\n'
"TypeError: 'NoneType' object is not iterable\n"),)
)

caplog.set_level(logging.DEBUG)
TestDataset(None).stream_info()
MockDataset(None).stream_info()
assert caplog.record_tuples == list(complete_logging)
16 changes: 8 additions & 8 deletions emloop/tests/hooks/save_cm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from ..main_loop_test import SimpleDataset


class TestDataset(SimpleDataset):
class MockDataset(SimpleDataset):

@staticmethod
def num_classes():
Expand Down Expand Up @@ -53,7 +53,7 @@ def run_hook(hook,
@pytest.mark.parametrize('params, error', _WRONG_INPUTS)
def test_wrong_inputs(params, error, tmpdir):
with pytest.raises(error):
hook = SaveConfusionMatrix(dataset=TestDataset(), output_dir=tmpdir, **params)
hook = SaveConfusionMatrix(dataset=MockDataset(), output_dir=tmpdir, **params)
run_hook(hook)


Expand All @@ -66,33 +66,33 @@ def test_wrong_inputs(params, error, tmpdir):

@pytest.mark.parametrize('params', _CORRECT_INPUTS)
def test_correct_inputs(params, tmpdir):
hook = SaveConfusionMatrix(dataset=TestDataset(), output_dir=tmpdir, **params)
hook = SaveConfusionMatrix(dataset=MockDataset(), output_dir=tmpdir, **params)
run_hook(hook)


def test_after_epoch(tmpdir):

# test saving .png
hook = SaveConfusionMatrix(dataset=TestDataset(), output_dir=tmpdir)
hook = SaveConfusionMatrix(dataset=MockDataset(), output_dir=tmpdir)
run_hook(hook)
assert os.path.exists(os.path.join(tmpdir, 'confusion_matrix_epoch_0_train.png'))
# test storing .png
hook = SaveConfusionMatrix(dataset=TestDataset(), output_dir='', figure_action='store')
hook = SaveConfusionMatrix(dataset=MockDataset(), output_dir='', figure_action='store')
epoch_data = run_hook(hook)
assert tuple(epoch_data['train']['confusion_heatmap'].shape) == (480, 640, 3)

# test changing figure size
hook = SaveConfusionMatrix(dataset=TestDataset(), output_dir='', figure_action='store', figsize=(10, 15))
hook = SaveConfusionMatrix(dataset=MockDataset(), output_dir='', figure_action='store', figsize=(10, 15))
epoch_data = run_hook(hook)
dpi = matplotlib.rcParams['figure.dpi']
assert tuple(epoch_data['train']['confusion_heatmap'].shape) == (15*dpi, 10*dpi, 3)

# test whether using mask_name does not crash
hook = SaveConfusionMatrix(dataset=TestDataset(), output_dir=tmpdir,
hook = SaveConfusionMatrix(dataset=MockDataset(), output_dir=tmpdir,
classes_names=['first', 'second'], mask_name='masks')
run_hook(hook)

# test correct input parameters with batch data
hook = SaveConfusionMatrix(dataset=TestDataset(), output_dir=tmpdir,
hook = SaveConfusionMatrix(dataset=MockDataset(), output_dir=tmpdir,
labels_name='special_labels', predictions_name='special_predictions')
run_hook(hook, batch_data={'special_labels': [0, 1], 'special_predictions': [0, 1]})
4 changes: 2 additions & 2 deletions emloop/tests/utils/download_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import emloop.utils.download as download

_URL_ZIP = 'https://github.com/iterait/emloop-examples/releases/download/example-files/emloop-0.12.0.zip'
_URL_ZIP = 'https://github.com/iterait/emloop-examples/releases/download/example-files/emloop-0.1.0.zip'
_URL_ZIP_BASE = os.path.basename(_URL_ZIP)
_URL_RAR = 'https://github.com/iterait/emloop-examples/releases/download/example-files/anomalousTrafficTest.rar'
_URL_RAR_BASE = os.path.basename(_URL_RAR)
Expand Down Expand Up @@ -33,7 +33,7 @@ def test_download_and_unpack_successful(url, tmpdir, caplog):

assert caplog.record_tuples == [('root', logging.INFO, '\tdownloading ' + os.path.join(tmpdir, _URL_ZIP_BASE))]
assert os.path.exists(os.path.join(tmpdir, _URL_ZIP_BASE))
assert os.path.exists(os.path.join(tmpdir, 'emloop-0.12.0/setup.py'))
assert os.path.exists(os.path.join(tmpdir, 'emloop-0.1.0/setup.py'))


@pytest.mark.parametrize('url, url_base, path', UNPACK_FAILURE)
Expand Down
4 changes: 2 additions & 2 deletions emloop/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import yaml

from .yaml import load_yaml
from .yaml import load_yaml, reload


def parse_arg(arg: str) -> typing.Tuple[str, typing.Any]:
Expand Down Expand Up @@ -45,7 +45,7 @@ def load_config(config_file: str, additional_args: typing.Iterable[str]=()) -> d
conf = conf[key_part]
conf[key] = value

return config
return reload(config)


__all__ = []
13 changes: 12 additions & 1 deletion emloop/utils/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,18 @@ def make_simple(data: Any) -> Any:
:param data: data to be made simple (dict instead of CommentedMap etc.)
:return: simplified data
"""
return yaml.load(yaml.dump(data, Dumper=ruamel.yaml.RoundTripDumper), ruamel.yaml.Loader)
return yaml.load(yaml.dump(data, Dumper=ruamel.yaml.RoundTripDumper), Loader=ruamel.yaml.Loader)


def reload(data: Any) -> Any:
"""
Dump and load yaml data.
This is useful to avoid many anchor parsing bugs. When you edit a yaml config, reload it to make sure
the changes are propagated to anchor expansions.
:param data: data to be reloaded
:return: reloaded data
"""
return yaml.load(yaml.dump(data, Dumper=ruamel.yaml.RoundTripDumper), Loader=ruamel.yaml.RoundTripLoader)

__all__ = []

0 comments on commit 580574f

Please sign in to comment.