Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Merge from dogfood branch to master #5

Merged
merged 3 commits into from
Sep 7, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
128 changes: 101 additions & 27 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
BIN_PATH ?= /usr/bin
NODE_PATH ?= /usr/share
EXAMPLE_PATH ?= /usr/share/nni/examples

SRC_DIR := ${PWD}

.PHONY: build install uninstall
BIN_PATH ?= ${HOME}/.local/bin
INSTALL_PREFIX ?= ${HOME}/.local
PIP_MODE ?= --user
EXAMPLES_PATH ?= ${HOME}/nni/examples
WHOAMI := $(shell whoami)
.PHONY: build install uninstall dev-install
YARN := $(INSTALL_PREFIX)/yarn/bin/yarn

build:
### Building NNI Manager ###
Expand All @@ -21,50 +21,124 @@ build:


install:
mkdir -p $(NODE_PATH)/nni
mkdir -p $(EXAMPLE_PATH)
ifneq ('$(HOME)', '/root')
ifeq (${WHOAMI}, root)
### Sorry, sudo make install is not supported ###
exit 1
endif
endif

mkdir -p $(BIN_PATH)
mkdir -p $(INSTALL_PREFIX)/nni
mkdir -p $(EXAMPLES_PATH)

### Installing NNI Manager ###
cp -rT src/nni_manager/dist $(NODE_PATH)/nni/nni_manager
cp -rT src/nni_manager/node_modules $(NODE_PATH)/nni/nni_manager/node_modules
cp -rT src/nni_manager/dist $(INSTALL_PREFIX)/nni/nni_manager
cp -rT src/nni_manager/node_modules $(INSTALL_PREFIX)/nni/nni_manager/node_modules

### Installing Web UI ###
cp -rT src/webui/build $(NODE_PATH)/nni/webui
ln -sf $(NODE_PATH)/nni/nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)/serve
cp -rT src/webui/build $(INSTALL_PREFIX)/nni/webui
ln -sf $(INSTALL_PREFIX)/nni/nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)/serve

### Installing Python SDK dependencies ###
pip3 install -r src/sdk/pynni/requirements.txt
pip3 install $(PIP_MODE) -r src/sdk/pynni/requirements.txt
### Installing Python SDK ###
cd src/sdk/pynni && python3 setup.py install
cd src/sdk/pynni && python3 setup.py install $(PIP_MODE)

### Installing nnictl ###
cd tools && python3 setup.py install
cd tools && python3 setup.py install $(PIP_MODE)

echo '#!/bin/sh' > $(BIN_PATH)/nnimanager
echo 'cd $(NODE_PATH)/nni/nni_manager && node main.js $$@' >> $(BIN_PATH)/nnimanager
echo 'cd $(INSTALL_PREFIX)/nni/nni_manager && node main.js $$@' >> $(BIN_PATH)/nnimanager
chmod +x $(BIN_PATH)/nnimanager

install -m 755 tools/nnictl $(BIN_PATH)/nnictl
echo '#!/bin/sh' > $(BIN_PATH)/nnictl
echo 'NNI_MANAGER=$(BIN_PATH)/nnimanager WEB_UI_FOLDER=$(INSTALL_PREFIX)/nni/webui python3 -m nnicmd.nnictl $$@' >> $(BIN_PATH)/nnictl
chmod +x $(BIN_PATH)/nnictl

### Installing examples ###
cp -rT examples $(EXAMPLE_PATH)
cp -rT examples $(EXAMPLES_PATH)


pip-install:
ifneq ('$(HOME)', '/root')
ifeq (${WHOAMI}, root)
### Sorry, sudo make install is not supported ###
exit 1
endif
endif

### Prepare Node.js ###
wget https://nodejs.org/dist/v10.9.0/node-v10.9.0-linux-x64.tar.xz
tar xf node-v10.9.0-linux-x64.tar.xz
cp -rT node-v10.9.0-linux-x64 $(INSTALL_PREFIX)/node

### Prepare Yarn 1.9.4 ###
wget https://github.com/yarnpkg/yarn/releases/download/v1.9.4/yarn-v1.9.4.tar.gz
tar xf yarn-v1.9.4.tar.gz
cp -rT yarn-v1.9.4 $(INSTALL_PREFIX)/yarn

### Building NNI Manager ###
cd src/nni_manager && $(YARN) && $(YARN) build

### Building Web UI ###
cd src/webui && $(YARN) && $(YARN) build

mkdir -p $(BIN_PATH)
mkdir -p $(INSTALL_PREFIX)/nni

### Installing NNI Manager ###
cp -rT src/nni_manager/dist $(INSTALL_PREFIX)/nni/nni_manager
cp -rT src/nni_manager/node_modules $(INSTALL_PREFIX)/nni/nni_manager/node_modules
echo '#!/bin/sh' > $(BIN_PATH)/nnimanager
echo 'cd $(INSTALL_PREFIX)/nni/nni_manager && node main.js $$@' >> $(BIN_PATH)/nnimanager
chmod +x $(BIN_PATH)/nnimanager

### Installing Web UI ###
cp -rT src/webui/build $(INSTALL_PREFIX)/nni/webui
ln -sf $(INSTALL_PREFIX)/nni/nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)/serve

### Installing examples ###
cp -rT examples $(EXAMPLES_PATH)


dev-install:
mkdir -p $(BIN_PATH)
mkdir -p $(INSTALL_PREFIX)/nni

### Installing NNI Manager ###
ln -sf $(INSTALL_PREFIX)/nni/nni_manager $(PWD)/src/nni_manager/dist
ln -sf $(INSTALL_PREFIX)/nni/nni_manager/node_modules $(PWD)/src/nni_manager/node_modules

### Installing Web UI ###
ln -sf $(INSTALL_PREFIX)/nni/webui $(PWD)/src/webui
ln -sf $(INSTALL_PREFIX)/nni/nni_manager/node_modules/serve/bin/serve.js $(BIN_PATH)/serve

### Installing Python SDK dependencies ###
pip3 install --user -r src/sdk/pynni/requirements.txt
pip3 install $(PIP_MODE) -r src/sdk/pynni/requirements.txt
### Installing Python SDK ###
cd src/sdk/pynni && pip3 install --user -e .
cd src/sdk/pynni && pip3 install $(PIP_MODE) -e .

### Installing nnictl ###
cd tools && pip3 install --user -e .
cd tools && pip3 install $(PIP_MODE) -e .

echo '#!/bin/sh' > $(BIN_PATH)/nnimanager
echo 'cd $(INSTALL_PREFIX)/nni/nni_manager && node main.js $$@' >> $(BIN_PATH)/nnimanager
chmod +x $(BIN_PATH)/nnimanager

echo '#!/bin/sh' > $(BIN_PATH)/nnictl
echo 'NNI_MANAGER=$(BIN_PATH)/nnimanager python3 -m nnicmd.nnictl $$@' >> $(BIN_PATH)/nnictl
chmod +x $(BIN_PATH)/nnictl

### Installing examples ###
ln -sf $(EXAMPLES_PATH) $(PWD)/examples


uninstall:
-rm -r $(EXAMPLE_PATH)
-rm -r $(NODE_PATH)/nni
-pip3 uninstall -y nnictl
-pip3 uninstall -y nni
-rm $(BIN_PATH)/nnictl
-rm $(BIN_PATH)/nnimanager
-pip3 uninstall -y nnictl
-rm -r $(INSTALL_PREFIX)/nni
-rm -r $(EXAMPLES_PATH)
-rm $(BIN_PATH)/serve
-rm $(BIN_PATH)/nnimanager
-rm $(BIN_PATH)/nnictl
58 changes: 45 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,21 +1,53 @@
# Introduction
Neural Network Intelligence(NNI) is a light package for supporting hyper-parameter tuning or neural architecture search.
It could easily run in different environments, such as: local/remote machine/cloud.
And it offers a new annotation language for user to conveniently design search space.
Also user could write code using any language or any machine learning framework.

# Getting Started
TODO: Guide users through getting your code up and running on their own system. In this section you can talk about:
1. Installation process
2. Software dependencies
3. Latest releases
4. API references
NNI (Neural Network Intelligence) is a toolkit to help users running automated machine learning experiments.
The tool dispatches and runs trial jobs that generated by tuning algorithms to search the best neural architecture and/or hyper-parameters at different environments (e.g. local, remote servers, Cloud).

```
AutoML experiment Training Services
┌────────┐ ┌────────────────────────┐ ┌────────────────┐
│ nnictl │ ─────> │ nni_manager │ │ Local Machine │
└────────┘ │ sdk/tuner │ └────────────────┘
│ hyperopt_tuner │
│ evolution_tuner │ trail jobs ┌────────────────┐
│ ... │ ────────> │ Remote Servers │
├────────────────────────┤ └────────────────┘
│ trial job source code │
│ sdk/annotation │ ┌────────────────┐
├────────────────────────┤ │ Yarn,K8s, │
│ nni_board │ │ ... │
└────────────────────────┘ └────────────────┘
```
## **Who should consider using NNI**
* You want to try different AutoML algorithms for your training code (model) at local
* You want to run AutoML trial jobs in different environments to speed up search (e.g. remote servers, Cloud)
* As a researcher and data scientist, you want to implement your own AutoML algorithms and compare with other algorithms
* As a ML platform owner, you want to support AutoML in your platform

# Getting Started with NNI

## **Installation**
Install through python pip
* requirements: python >= 3.5
```
pip3 install -v --user git+https://github.com/Microsoft/NeuralNetworkIntelligence.git
source ~/.bashrc
```


## **Quick start: run an experiment at local**
Requirements:
* with NNI installed on your machine.

Run the following command to create an experiment for [mnist]
```bash
nnictl create --config ~/nni/examples/trials/mnist-annotation/config.yaml
```
This command will start the experiment and WebUI. The WebUI endpoint will be shown in the output of this command (for example, `http://localhost:8080`). Open this URL using your browsers. You can analyze your experiment through WebUI, or open trials' tensorboard. Please refer to [here](docs/GetStarted.md) for the GetStarted tutorial.

# Build and Test
TODO: Describe and show how to build your code and run the tests.

# Contribute
TODO: Explain how other users and developers can contribute to make your code better.
NNI is designed as an automatic searching framework with high extensibility. NNI has a very clear modular design. Contributing more tuner/assessor algorithms, training services, SDKs are really welcome. Please refer to [here](docs/ToContribute.md) for how to contribute.

# Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
88 changes: 88 additions & 0 deletions docs/CustomizedTuner.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# Customized Tuner for Experts

*Tuner receive result from Trial as a matric to evaluate the performance of a specific parameters/architecture configure. And tuner send next hyper-parameter or architecture configure to Trial.*

So, if user want to implement a customized Tuner, she/he only need to:

1) Inherit a tuner of a base Tuner class
2) Implement receive_trial_result and generate_parameter function
3) Write a script to run Tuner

Here ia an example:

**1) Inherit a tuner of a base Tuner class**
```python
from nni.tuner import Tuner

class CustomizedTuner(Tuner):
def __init__(self, ...):
...
```

**2) Implement receive_trial_result and generate_parameter function**
```python
from nni.tuner import Tuner

class CustomizedTuner(Tuner):
def __init__(self, ...):
...

def receive_trial_result(self, parameter_id, parameters, reward):
'''
Record an observation of the objective function and Train
parameter_id: int
parameters: object created by 'generate_parameters()'
reward: object reported by trial
'''
# your code implements here.
...

def generate_parameters(self, parameter_id):
'''
Returns a set of trial (hyper-)parameters, as a serializable object
parameter_id: int
'''
# your code implements here.
return your_parameters
...
```
```receive_trial_result``` will receive ```the parameter_id, parameters, reward``` as parameters input. Also, Tuner will receive the ```reward``` object are exactly same reward that Trial send.

The ```your_parameters``` return from ```generate_parameters``` function, will be package as json object by NNI SDK. NNI SDK will unpack json object so the Trial will receive the exact same ```your_parameters``` from Tuner.

For example:
If the you implement the ```generate_parameters``` like this:
```python
def generate_parameters(self, parameter_id):
'''
Returns a set of trial (hyper-)parameters, as a serializable object
parameter_id: int
'''
# your code implements here.
return {"dropout": 0.3, "learning_rate": 0.4}
```
It's means your Tuner will always generate parameters ```{"dropout": 0.3, "learning_rate": 0.4}```. Then Trial will receive ```{"dropout": 0.3, "learning_rate": 0.4}``` this object will using ```nni.get_parameters()``` API from NNI SDK. After training of Trial, it will send result to Tuner by calling ```nni.report_final_result(0.93)```. Then ```receive_trial_result``` will function will receied these parameters like:
```
parameter_id = 82347
parameters = {"dropout": 0.3, "learning_rate": 0.4}
reward = 0.93
```

**3) Configure your customized tuner in experiment yaml config file**

NNI needs to locate your customized tuner class and instantiate the class, so you need to specify the location of the customized tuner class and pass literal values as parameters to the \_\_init__ constructor.
```yaml
tuner:
codeDir: /home/abc/mytuner
classFileName: my_customized_tuner.py
className: CustomizedTuner
# Any parameter need to pass to your tuner class __init__ constructor
# can be specified in this optional classArgs field, for example
classArgs:
arg1: value1
```

More detail example you could see:
> * [evolution-tuner](../src/sdk/pynni/nni/evolution_tuner)
> * [hyperopt-tuner](../src/sdk/pynni/nni/hyperopt_tuner)
> * [evolution-based-customized-tuner](../examples/tuners/ga_customer_tuner)
46 changes: 29 additions & 17 deletions docs/EnableAssessor.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,19 @@ trainingServicePlatform: local
# choice: true, false
useAnnotation: true
tuner:
tunerName: TPE
optimizationMode: Maximize
builtinTunerName: TPE
classArgs:
optimize_mode: maximize
assessor:
assessorName: Medianstop
optimizationMode: Maximize
builtinAssessorName: Medianstop
classArgs:
optimize_mode: maximize
trial:
trialCommand: python mnist.py
trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0
command: python mnist.py
codeDir: /usr/share/nni/examples/trials/mnist-annotation
gpuNum: 0
```
For our built-in assessors, you need to fill two fields: `assessorName` which chooses NNI provided assessors (refer to [here]() for built-in assessors), `optimizationMode` which includes Maximize and Minimize (you want to maximize or minimize your trial result).
For our built-in assessors, you need to fill two fields: `builtinAssessorName` which chooses NNI provided assessors (refer to [here]() for built-in assessors), `optimize_mode` which includes maximize and minimize (you want to maximize or minimize your trial result).

## Using user customized Assessor
You can also write your own assessor following the guidance [here](). For example, you wrote an assessor for `examples/trials/mnist-annotation`. You should prepare the yaml configure below:
Expand All @@ -46,15 +48,25 @@ trainingServicePlatform: local
# choice: true, false
useAnnotation: true
tuner:
tunerName: TPE
optimizationMode: Maximize
# Possible values: TPE, Random, Anneal, Evolution
builtinTunerName: TPE
classArgs:
optimize_mode: maximize
assessor:
assessorCommand: your_command
assessorCodeDir: /path/of/your/asessor
assessorGpuNum: 0
# Your assessor code directory
codeDir:
# Name of the file which contains your assessor class
classFileName:
# Your assessor class name, must be a subclass of nni.Assessor
className:
# Parameter names and literal values you want to pass to
# the __init__ constructor of your assessor class
classArgs:
arg1: value1
gpuNum: 0
trial:
trialCommand: python mnist.py
trialCodeDir: /usr/share/nni/examples/trials/mnist-annotation
trialGpuNum: 0
command: python mnist.py
codeDir: /usr/share/nni/examples/trials/mnist-annotation
gpuNum: 0
```
You only need to fill three field: `assessorCommand`, `assessorCodeDir` and `assessorGpuNum`.
You need to fill: `codeDir`, `classFileName`, `className`, and pass parameters to \_\_init__ constructor through `classArgs` field if the \_\_init__ constructor of your assessor class has required parameters.
Loading