Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Integration tests for NAS #2231

Merged
merged 5 commits into from
Mar 27, 2020
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/nas/classic_nas/mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def get_params():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument("--data_dir", type=str,
default='/tmp/tensorflow/mnist/input_data', help="data directory")
default='./data', help="data directory")
parser.add_argument('--batch_size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument("--hidden_size", type=int, default=512, metavar='N',
Expand Down
3 changes: 2 additions & 1 deletion examples/nas/darts/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,13 @@
parser.add_argument("--batch-size", default=64, type=int)
parser.add_argument("--log-frequency", default=10, type=int)
parser.add_argument("--epochs", default=50, type=int)
parser.add_argument("--channels", default=16, type=int)
parser.add_argument("--unrolled", default=False, action="store_true")
args = parser.parse_args()

dataset_train, dataset_valid = datasets.get_dataset("cifar10")

model = CNN(32, 3, 16, 10, args.layers)
model = CNN(32, 3, args.channels, 10, args.layers)
criterion = nn.CrossEntropyLoss()

optim = torch.optim.SGD(model.parameters(), 0.025, momentum=0.9, weight_decay=3.0E-4)
Expand Down
5 changes: 3 additions & 2 deletions examples/nas/enas/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,17 @@
parser.add_argument("--batch-size", default=128, type=int)
parser.add_argument("--log-frequency", default=10, type=int)
parser.add_argument("--search-for", choices=["macro", "micro"], default="macro")
parser.add_argument("--epochs", default=None, type=int, help="Number of epochs (default: macro 310, micro 150)")
args = parser.parse_args()

dataset_train, dataset_valid = datasets.get_dataset("cifar10")
if args.search_for == "macro":
model = GeneralNetwork()
num_epochs = 310
num_epochs = args.epochs or 310
mutator = None
elif args.search_for == "micro":
model = MicroNetwork(num_layers=6, out_channels=20, num_nodes=5, dropout_rate=0.1, use_aux_heads=True)
num_epochs = 150
num_epochs = args.epochs or 150
mutator = enas.EnasMutator(model, tanh_constant=1.1, cell_exit_extra_step=True)
else:
raise AssertionError
Expand Down
15 changes: 10 additions & 5 deletions examples/nas/pdarts/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,23 +25,28 @@

if __name__ == "__main__":
parser = ArgumentParser("pdarts")
parser.add_argument('--add_layers', action='append',
default=[0, 6, 12], help='add layers')
parser.add_argument('--dropped_ops', action='append',
default=[3, 2, 1], help='drop ops')
parser.add_argument('--add_layers', action='append', type=int,
help='add layers, default: [0, 6, 12]')
parser.add_argument('--dropped_ops', action='append', type=int,
help='drop ops, default: [3, 2, 1]')
parser.add_argument("--nodes", default=4, type=int)
parser.add_argument("--init_layers", default=5, type=int)
parser.add_argument("--channels", default=16, type=int)
parser.add_argument("--batch-size", default=64, type=int)
parser.add_argument("--log-frequency", default=1, type=int)
parser.add_argument("--epochs", default=50, type=int)
parser.add_argument("--unrolled", default=False, action="store_true")
args = parser.parse_args()
if args.add_layers is None:
args.add_layers = [0, 6, 12]
if args.dropped_ops is None:
args.dropped_ops = [3, 2, 1]

logger.info("loading data")
dataset_train, dataset_valid = datasets.get_dataset("cifar10")

def model_creator(layers):
model = CNN(32, 3, 16, 10, layers, n_nodes=args.nodes)
model = CNN(32, 3, args.channels, 10, layers, n_nodes=args.nodes)
criterion = nn.CrossEntropyLoss()

optim = torch.optim.SGD(model.parameters(), 0.025, momentum=0.9, weight_decay=3.0E-4)
Expand Down
4 changes: 4 additions & 0 deletions test/pipelines/pipelines-it-local.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ jobs:
cd test
PATH=$HOME/.local/bin:$PATH python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts local
displayName: 'Integration test'
- script: |
cd test
PATH=$HOME/.local/bin:$PATH source scripts/nas.sh
displayName: 'NAS test'
- script: |
cd test
source scripts/model_compression.sh
Expand Down
37 changes: 37 additions & 0 deletions test/scripts/nas.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#!/bin/bash
set -e
CWD=${PWD}

echo ""
echo "===========================Testing: NAS==========================="
EXAMPLE_DIR=${CWD}/../examples/nas

echo "testing classic nas..."
cd $EXAMPLE_DIR/classic_nas
SEARCH_SPACE_JSON=nni_auto_gen_search_space.json
if [ -f $SEARCH_SPACE_JSON ]; then
rm $SEARCH_SPACE_JSON
fi
nnictl ss_gen -t "python3 mnist.py"
if [ ! -f $SEARCH_SPACE_JSON ]; then
echo "Search space file not found!"
exit 1
fi

echo "testing darts..."
cd $EXAMPLE_DIR/darts
python3 search.py --epochs 1 --channels 2 --layers 4
python3 retrain.py --arc-checkpoint ./checkpoints/epoch_0.json --layers 4 --epochs 1

echo "testing enas..."
cd $EXAMPLE_DIR/enas
python3 search.py --search-for macro --epochs 1
python3 search.py --search-for micro --epochs 1

echo "testing naive..."
cd $EXAMPLE_DIR/naive
python3 train.py

echo "testing pdarts..."
cd $EXAMPLE_DIR/pdarts
python3 search.py --epochs 1 --channels 4 --nodes 2 --log-frequency 10 --add_layers 0 --add_layers 1 --dropped_ops 3 --dropped_ops 3