diff --git a/examples/nas/classic_nas/mnist.py b/examples/nas/classic_nas/mnist.py index 55c9601cd4..629e04ca28 100644 --- a/examples/nas/classic_nas/mnist.py +++ b/examples/nas/classic_nas/mnist.py @@ -149,7 +149,7 @@ def get_params(): # Training settings parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument("--data_dir", type=str, - default='/tmp/tensorflow/mnist/input_data', help="data directory") + default='./data', help="data directory") parser.add_argument('--batch_size', type=int, default=64, metavar='N', help='input batch size for training (default: 64)') parser.add_argument("--hidden_size", type=int, default=512, metavar='N', diff --git a/examples/nas/darts/search.py b/examples/nas/darts/search.py index 9800c349b3..691e6c71a9 100644 --- a/examples/nas/darts/search.py +++ b/examples/nas/darts/search.py @@ -22,12 +22,13 @@ parser.add_argument("--batch-size", default=64, type=int) parser.add_argument("--log-frequency", default=10, type=int) parser.add_argument("--epochs", default=50, type=int) + parser.add_argument("--channels", default=16, type=int) parser.add_argument("--unrolled", default=False, action="store_true") args = parser.parse_args() dataset_train, dataset_valid = datasets.get_dataset("cifar10") - model = CNN(32, 3, 16, 10, args.layers) + model = CNN(32, 3, args.channels, 10, args.layers) criterion = nn.CrossEntropyLoss() optim = torch.optim.SGD(model.parameters(), 0.025, momentum=0.9, weight_decay=3.0E-4) diff --git a/examples/nas/enas/search.py b/examples/nas/enas/search.py index 38096a3fe6..44c870bb00 100644 --- a/examples/nas/enas/search.py +++ b/examples/nas/enas/search.py @@ -24,16 +24,17 @@ parser.add_argument("--batch-size", default=128, type=int) parser.add_argument("--log-frequency", default=10, type=int) parser.add_argument("--search-for", choices=["macro", "micro"], default="macro") + parser.add_argument("--epochs", default=None, type=int, help="Number of epochs (default: macro 310, micro 150)") args = parser.parse_args() dataset_train, dataset_valid = datasets.get_dataset("cifar10") if args.search_for == "macro": model = GeneralNetwork() - num_epochs = 310 + num_epochs = args.epochs or 310 mutator = None elif args.search_for == "micro": model = MicroNetwork(num_layers=6, out_channels=20, num_nodes=5, dropout_rate=0.1, use_aux_heads=True) - num_epochs = 150 + num_epochs = args.epochs or 150 mutator = enas.EnasMutator(model, tanh_constant=1.1, cell_exit_extra_step=True) else: raise AssertionError diff --git a/examples/nas/pdarts/search.py b/examples/nas/pdarts/search.py index 4e5fdfad98..9f3fea234f 100644 --- a/examples/nas/pdarts/search.py +++ b/examples/nas/pdarts/search.py @@ -25,23 +25,28 @@ if __name__ == "__main__": parser = ArgumentParser("pdarts") - parser.add_argument('--add_layers', action='append', - default=[0, 6, 12], help='add layers') - parser.add_argument('--dropped_ops', action='append', - default=[3, 2, 1], help='drop ops') + parser.add_argument('--add_layers', action='append', type=int, + help='add layers, default: [0, 6, 12]') + parser.add_argument('--dropped_ops', action='append', type=int, + help='drop ops, default: [3, 2, 1]') parser.add_argument("--nodes", default=4, type=int) parser.add_argument("--init_layers", default=5, type=int) + parser.add_argument("--channels", default=16, type=int) parser.add_argument("--batch-size", default=64, type=int) parser.add_argument("--log-frequency", default=1, type=int) parser.add_argument("--epochs", default=50, type=int) parser.add_argument("--unrolled", default=False, action="store_true") args = parser.parse_args() + if args.add_layers is None: + args.add_layers = [0, 6, 12] + if args.dropped_ops is None: + args.dropped_ops = [3, 2, 1] logger.info("loading data") dataset_train, dataset_valid = datasets.get_dataset("cifar10") def model_creator(layers): - model = CNN(32, 3, 16, 10, layers, n_nodes=args.nodes) + model = CNN(32, 3, args.channels, 10, layers, n_nodes=args.nodes) criterion = nn.CrossEntropyLoss() optim = torch.optim.SGD(model.parameters(), 0.025, momentum=0.9, weight_decay=3.0E-4) diff --git a/test/pipelines/pipelines-it-local.yml b/test/pipelines/pipelines-it-local.yml index 941dcaedb8..4606795274 100644 --- a/test/pipelines/pipelines-it-local.yml +++ b/test/pipelines/pipelines-it-local.yml @@ -26,6 +26,10 @@ jobs: cd test PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts local displayName: 'Integration test' + - script: | + cd test + PATH=$HOME/.local/bin:$PATH source scripts/nas.sh + displayName: 'NAS test' - script: | cd test source scripts/model_compression.sh diff --git a/test/scripts/nas.sh b/test/scripts/nas.sh new file mode 100644 index 0000000000..ca4d3fd1cc --- /dev/null +++ b/test/scripts/nas.sh @@ -0,0 +1,37 @@ +#!/bin/bash +set -e +CWD=${PWD} + +echo "" +echo "===========================Testing: NAS===========================" +EXAMPLE_DIR=${CWD}/../examples/nas + +echo "testing classic nas..." +cd $EXAMPLE_DIR/classic_nas +SEARCH_SPACE_JSON=nni_auto_gen_search_space.json +if [ -f $SEARCH_SPACE_JSON ]; then + rm $SEARCH_SPACE_JSON +fi +nnictl ss_gen -t "python3 mnist.py" +if [ ! -f $SEARCH_SPACE_JSON ]; then + echo "Search space file not found!" + exit 1 +fi + +echo "testing darts..." +cd $EXAMPLE_DIR/darts +python3 search.py --epochs 1 --channels 2 --layers 4 +python3 retrain.py --arc-checkpoint ./checkpoints/epoch_0.json --layers 4 --epochs 1 + +echo "testing enas..." +cd $EXAMPLE_DIR/enas +python3 search.py --search-for macro --epochs 1 +python3 search.py --search-for micro --epochs 1 + +echo "testing naive..." +cd $EXAMPLE_DIR/naive +python3 train.py + +echo "testing pdarts..." +cd $EXAMPLE_DIR/pdarts +python3 search.py --epochs 1 --channels 4 --nodes 2 --log-frequency 10 --add_layers 0 --add_layers 1 --dropped_ops 3 --dropped_ops 3