diff --git a/.travis.yml b/.travis.yml index 7cc4d8c0ba..a9f4a1d734 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,9 +4,9 @@ language: python python: - "3.6" before_install: - - wget https://nodejs.org/dist/v10.9.0/node-v10.9.0-linux-x64.tar.xz - - tar xf node-v10.9.0-linux-x64.tar.xz - - sudo mv node-v10.9.0-linux-x64 /usr/local/node + - wget https://nodejs.org/dist/v10.10.0/node-v10.10.0-linux-x64.tar.xz + - tar xf node-v10.10.0-linux-x64.tar.xz + - sudo mv node-v10.10.0-linux-x64 /usr/local/node - export PATH=/usr/local/node/bin:$PATH - sudo sh -c 'PATH=/usr/local/node/bin:$PATH yarn global add serve' install: diff --git a/Makefile b/Makefile index 6e8513e06c..38bdd2c6d4 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,14 @@ # Setting variables SHELL := /bin/bash +PIP_INSTALL := python3 -m pip install +PIP_UNINSTALL := python3 -m pip uninstall + +## Colorful output +_INFO := $(shell echo -e '\e[1;36m') +_WARNING := $(shell echo -e '\e[1;33m') +_END := $(shell echo -e '\e[0m') + ## Install directories ifeq ($(shell id -u), 0) # is root @@ -20,7 +28,7 @@ else # is normal user endif ## Dependency information -NODE_VERSION ?= v10.9.0 +NODE_VERSION ?= v10.10.0 NODE_TARBALL ?= node-$(NODE_VERSION)-linux-x64.tar.xz NODE_PATH ?= $(INSTALL_PREFIX)/nni/node @@ -35,41 +43,37 @@ SERVE_PATH ?= $(INSTALL_PREFIX)/nni/serve ## Check if dependencies have been installed globally ifeq (, $(shell command -v node 2>/dev/null)) - $(info Node.js not found) + $(info $(_INFO) Node.js not found $(_END)) _MISS_DEPS := 1 # node not found else _VER := $(shell node --version) _NEWER := $(shell echo -e "$(NODE_VERSION)\n$(_VER)" | sort -Vr | head -n 1) ifneq ($(_VER), $(_NEWER)) - $(info Node.js version not match) + $(info $(_INFO) Node.js version not match $(_END)) _MISS_DEPS := 1 # node outdated endif endif ifeq (, $(shell command -v yarnpkg 2>/dev/null)) - $(info Yarn not found) + $(info $(_INFO) Yarn not found $(_END)) _MISS_DEPS := 1 # yarn not found endif ifeq (, $(shell command -v serve 2>/dev/null)) - $(info Serve not found) + $(info $(_INFO) Serve not found $(_END)) _MISS_DEPS := 1 # serve not found endif ifdef _MISS_DEPS - $(info Missing dependencies, use local toolchain) + $(info $(_INFO) Missing dependencies, use local toolchain $(_END)) NODE := $(NODE_PATH)/bin/node YARN := PATH=$${PATH}:$(NODE_PATH)/bin $(YARN_PATH)/bin/yarn SERVE := $(SERVE_PATH)/serve else - $(info All dependencies found, use global toolchain) + $(info $(_INFO) All dependencies found, use global toolchain $(_END)) NODE := node YARN := yarnpkg SERVE := serve endif -## Colorful output -_INFO := $(shell echo -e '\e[1;36m') -_WARNING := $(shell echo -e '\e[1;33m') -_END := $(shell echo -e '\e[0m') # Setting variables end @@ -90,7 +94,6 @@ build: #$(_INFO) Building nnictl $(_END) cd tools && python3 setup.py build - # Standard installation target # Must be invoked after building .PHONY: install @@ -109,17 +112,15 @@ remote-machine-install: cd src/sdk/pynni && python3 setup.py install $(PIP_MODE) -# All-in-one target +# All-in-one target for non-expert users # Installs NNI as well as its dependencies, and update bashrc to set PATH .PHONY: easy-install easy-install: check-perm easy-install: install-dependencies easy-install: build -easy-install: install-python-modules -easy-install: install-node-modules -easy-install: install-scripts -easy-install: install-examples -easy-install: update-bashrc +easy-install: install +easy-install: update-bash-config + easy-install: #$(_INFO) Complete! #(_END) @@ -132,6 +133,7 @@ pip-install: build pip-install: install-node-modules pip-install: install-scripts pip-install: install-examples +pip-install: update-bash-config # Target for NNI developers @@ -146,8 +148,8 @@ dev-install: .PHONY: uninstall uninstall: - -pip3 uninstall -y nni - -pip3 uninstall -y nnictl + -$(PIP_UNINSTALL) -y nni + -$(PIP_UNINSTALL) -y nnictl -rm -rf $(INSTALL_PREFIX)/nni -rm -f $(BIN_PATH)/nnimanager -rm -f $(BIN_PATH)/nnictl @@ -206,7 +208,6 @@ install-python-modules: #$(_INFO) Installing nnictl $(_END) cd tools && python3 setup.py install $(PIP_MODE) - .PHONY: install-node-modules install-node-modules: mkdir -p $(INSTALL_PREFIX)/nni @@ -222,11 +223,11 @@ install-node-modules: .PHONY: install-dev-modules install-dev-modules: #$(_INFO) Installing Python SDK $(_END) - cd src/sdk/pynni && pip3 install $(PIP_MODE) -e . + cd src/sdk/pynni && $(PIP_INSTALL) $(PIP_MODE) -e . #$(_INFO) Installing nnictl $(_END) - cd tools && pip3 install $(PIP_MODE) -e . - + cd tools && $(PIP_INSTALL) $(PIP_MODE) -e . + mkdir -p $(INSTALL_PREFIX)/nni #$(_INFO) Installing NNI Manager $(_END) @@ -254,9 +255,6 @@ install-scripts: chmod +x $(BIN_PATH)/nnictl install -Dm644 tools/bash-completion $(BASH_COMP_SCRIPT) -ifndef _ROOT - echo '[[ -f $(BASH_COMP_SCRIPT) ]] && source $(BASH_COMP_SCRIPT)' >> ~/.bash_completion -endif .PHONY: install-examples @@ -265,16 +263,20 @@ install-examples: [ $(EXAMPLES_PATH) = ${PWD}/examples ] || cp -rT examples $(EXAMPLES_PATH) -.PHONY: update-bashrc -ifeq (, $(shell echo $$PATH | tr ':' '\n' | grep -x '$(BIN_PATH)')) # $(BIN_PATH) not in PATH - ifdef _ROOT - $(error $(BIN_PATH) not in PATH as root, which should never happen) - endif -update-bashrc: +.PHONY: update-bash-config +ifndef _ROOT +update-bash-config: + #$(_INFO) Updating bash configurations $(_END) + ifeq (, $(shell echo $$PATH | tr ':' '\n' | grep -x '$(BIN_PATH)')) # $(BIN_PATH) not in PATH #$(_WARNING) NOTE: adding $(BIN_PATH) to PATH in bashrc $(_END) echo 'export PATH="$$PATH:$(BIN_PATH)"' >> ~/.bashrc -else # $(BIN_PATH) already in PATH -update-bashrc: ; + endif + ifeq (, $(shell (source ~/.bash_completion ; command -v _nnictl) 2>/dev/null)) # completion not installed + #$(_WARNING) NOTE: adding $(BASH_COMP_SCRIPT) to ~/.bash_completion $(_END) + echo '[[ -f $(BASH_COMP_SCRIPT) ]] && source $(BASH_COMP_SCRIPT)' >> ~/.bash_completion + endif +else +update-bash-config: ; endif @@ -297,7 +299,7 @@ ifdef _ROOT $(error You should not develop NNI as root) endif ifdef _MISS_DEPS - $(error Please install Node.js, Yarn, and Serve to develop NNI) +# $(error Please install Node.js, Yarn, and Serve to develop NNI) endif #$(_INFO) Pass! $(_END) diff --git a/README.Makefile.md b/README.Makefile.md new file mode 100644 index 0000000000..3a815af9d9 --- /dev/null +++ b/README.Makefile.md @@ -0,0 +1,91 @@ +# Makefile and Installation Setup + +NNI uses GNU make for building and installing. + +The `Makefile` offers standard targets `build`, `install`, and `uninstall`, as well as alternative installation targets for different setup: + +* `easy-install`: target for non-expert users, which handles everything automatically; +* `pip-install`: target in favor of `setup.py`; +* `dev-install`: target for NNI contributors, which installs NNI as symlinks instead of copying files; +* `remote-machine-install`: target that only installs core Python library for remote machine workers. + +The targets will be detailed later. + +## Dependencies + +NNI requires at least Node.js, Yarn, and setuptools to build, while PIP and TypeScript are also recommended. + +NNI requires Node.js, serve, and all dependency libraries to run. +Required Node.js libraries (including TypeScript) can be installed by Yarn, and required Python libraries can be installed by setuptools or PIP. + +For NNI *users*, `make install-dependencies` can be used to install Node.js, Yarn, and serve. +This will install Node.js and serve to NNI's installation directory, and install Yarn to `/tmp/nni-yarn`. +This target requires wget to work. + +For NNI *developers*, it is recommended to install Node.js, Yarn, and serve manually. +See their official sites for installation guide. + +## Building NNI + +Simply run `make` when dependencies are ready. + +## Installation + +### Directory Hierarchy + +The main parts of NNI project consist of two Node.js modules (`nni_manager`, `webui`) and two Python packages (`nni`, `nnictl`). + +By default the Node.js modules are installed to `/usr/share/nni` for all users or installed to `~/.local/nni` for current user. + +The Python packages are installed with setuptools and therefore the location depends on Python configuration. +When install as non-priviledged user and virtualenv is not detected, `--user` flag will be used. + +In addition, `nnictl` offers a bash completion scripts, which will be installed to `/usr/share/bash-completion/completions` or `~/.bash_completion.d`. + +In some configuration, NNI will also install Node.js and the serve module to `/usr/share/nni`. + +All directories mentioned above are configurable. See next section for details. + +### Configuration + +The `Makefile` uses environment variables to override default settings. + +Available variables are listed below: + +| Name | Description | Default for normal user | Default for root | +|--------------------|---------------------------------------------------------|-----------------------------------|-------------------------------------------------| +| `BIN_PATH` | Path for executables | `~/.local/bin` | `/usr/bin` | +| `INSTALL_PREFIX` | Path for Node.js modules (a suffix `nni` will be added) | `~/.local` | `/usr/share` | +| `EXAMPLES_PATH` | Path for NNI examples | `~/nni/examples` | `$INSTALL_PREFIX/nni/examples` | +| `BASH_COMP_SCRIPT` | Path of bash completion script | `~/.bash_completion.d/nnictl` | `/usr/share/bash-completion/completions/nnictl` | +| `PIP_MODE` | Arguments for `python3 setup.py install` | `--user` if `VIRTUAL_ENV` not set | (empty) | +| `NODE_PATH` | Path to install Node.js runtime | `$INSTALL_PREFIX/nni/node` | `$INSTALL_PREFIX/nni/node` | +| `SERVE_PATH` | Path to install serve package | `$INSTALL_PREFIX/nni/serve` | `$INSTALL_PREFIX/nni/serve` | +| `YARN_PATH` | Path to install Yarn | `/tmp/nni-yarn` | `/tmp/nni-yarn` | +| `NODE` | Node.js command | see source file | see source file | +| `SERVE` | serve command | see source file | see source file | +| `YARN` | Yarn command | see source file | see source file | + +Note that these variables will influence installation destination as well as generated `nnictl` and `nnimanager` scripts. +If the path to copy files is different from where they will run (e.g. when creating a distro package), please generate `nnictl` and `nnimanager` manually. + +### Targets + +The workflow of each installation targets is listed below: + +| Target | Workflow | +|--------------------------|----------------------------------------------------------------------| +| `install` | Install Python packages, Node.js modules, NNI scripts, and examples | +| `easy-install` | Install dependencies, build, install NNI, and edit `~/.bashrc` | +| `pip-install` | Install dependencies, build, install NNI excluding Python packages | +| `dev-install` | Install Python and Node.js modules as symlinks, then install scripts | +| `remote-machine-install` | Install `nni` Python package | + +## TODO + +* `clean` target +* `test` target +* `lint` target +* Exclude tuners and their dependencies from `remote-machine-install` +* Test cases for each target +* Review variables diff --git a/README.md b/README.md index 51678149a1..d99f2c8217 100644 --- a/README.md +++ b/README.md @@ -26,35 +26,45 @@ The tool dispatches and runs trial jobs that generated by tuning algorithms to s * As a researcher and data scientist, you want to implement your own AutoML algorithms and compare with other algorithms * As a ML platform owner, you want to support AutoML in your platform -# Getting Started with NNI +# Get Started with NNI ## **Installation** -Install through python pip. (the current version only supports linux, nni on ubuntu 16.04 or newer has been well tested) -* requirements: python >= 3.5, git, wget +pip Installation Prerequisites +* linux (ubuntu 16.04 or newer version has been well tested) +* python >= 3.5 +* git, wget + ``` -pip3 install -v --user git+https://github.com/Microsoft/nni.git@v0.1 +python3 -m pip install -v --user git+https://github.com/Microsoft/nni.git@v0.2 source ~/.bashrc ``` +## **Quick start: run your first experiment at local** +It only requires 3 steps to start an experiment on NNI: +![](./docs/3_steps.jpg) + + +NNI provides a set of examples in the package to get you familiar with the above process. In the following example [/examples/trials/mnist], we had already set up the configuration and updated the training codes for you. You can directly run the following command to start an experiment. -## **Quick start: run an experiment at local** -Requirements: -* NNI installed on your local machine -* tensorflow installed +**NOTE**: The following example is an experiment built on TensorFlow, make sure you have **TensorFlow installed** before running the following command. -Run the following command to create an experiment for [mnist] +Try it out: ```bash - nnictl create --config ~/nni/examples/trials/mnist-annotation/config.yml + nnictl create --config ~/nni/examples/trials/mnist/config.yml ``` -This command will start an experiment and a WebUI. The WebUI endpoint will be shown in the output of this command (for example, `http://localhost:8080`). Open this URL in your browser. You can analyze your experiment through WebUI, or browse trials' tensorboard. + +In the command output, find out the **Web UI url** and open it in your browser. You can analyze your experiment through WebUI, or browse trials' tensorboard. + +To learn more about how this example was constructed and how to analyze the experiment results in NNI Web UI, please refer to [How to write a trial run on NNI (MNIST as an example)?](docs/WriteYourTrial.md) ## **Please refer to [Get Started Tutorial](docs/GetStarted.md) for more detailed information.** ## More tutorials -* [How to write a trial running on NNI (Mnist as an example)?](docs/WriteYourTrial.md) + * [Tutorial of NNI python annotation.](tools/nni_annotation/README.md) * [Tuners supported by NNI.](src/sdk/pynni/nni/README.md) * [How to enable early stop (i.e. assessor) in an experiment?](docs/EnableAssessor.md) * [How to run an experiment on multiple machines?](docs/RemoteMachineMode.md) +* [How to run an experiment on OpenPAI?](docs/PAIMode.md) * [How to write a customized tuner?](docs/CustomizedTuner.md) * [How to write a customized assessor?](examples/assessors/README.md) * [How to resume an experiment?](docs/NNICTLDOC.md) diff --git a/deployment/Dockerfile b/deployment/Dockerfile index 9527b87225..d0ddf99587 100644 --- a/deployment/Dockerfile +++ b/deployment/Dockerfile @@ -10,4 +10,6 @@ RUN pip3 --no-cache-dir install tensorflow-gpu==1.10.0 # #Keras 2.1.6 # -RUN pip3 --no-cache-dir install Keras==2.1.6 \ No newline at end of file +RUN pip3 --no-cache-dir install Keras==2.1.6 + +WORKDIR /root \ No newline at end of file diff --git a/deployment/Dockerfile.build.base b/deployment/Dockerfile.build.base index 8fd7bf69aa..56315a3b5f 100644 --- a/deployment/Dockerfile.build.base +++ b/deployment/Dockerfile.build.base @@ -22,27 +22,62 @@ FROM nvidia/cuda:9.0-cudnn7-devel-ubuntu16.04 LABEL maintainer='Microsoft NNI Team' -RUN apt-get update && apt-get install -y --no-install-recommends \ - sudo apt-utils git curl vim unzip openssh-client wget \ - build-essential cmake \ - libopenblas-dev +ENV HADOOP_VERSION=2.7.2 +LABEL HADOOP_VERSION=2.7.2 -# -# Python 3.5 -# -RUN apt-get install -y --no-install-recommends python3.5 python3.5-dev python3-pip python3-tk && \ - pip3 install --no-cache-dir --upgrade pip setuptools && \ - echo "alias python='python3'" >> /root/.bash_aliases && \ - echo "alias pip='pip3'" >> /root/.bash_aliases +RUN DEBIAN_FRONTEND=noninteractive && \ + apt-get -y update && \ + apt-get -y install sudo \ + apt-utils \ + git \ + curl \ + vim \ + unzip \ + wget \ + build-essential \ + cmake \ + libopenblas-dev \ + automake \ + openjdk-8-jdk \ + openssh-client \ + openssh-server \ + lsof \ + python3.5 \ + python3-dev \ + python3-pip \ + python3-tk \ + libcupti-dev && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* # numpy 1.14.3 scipy 1.1.0 RUN pip3 --no-cache-dir install \ numpy==1.14.3 scipy==1.1.0 # -#Install node 10.9.0, yarn 1.9.4, NNI v0.1 +#Install hadoop +# +RUN wget -qO- http://archive.apache.org/dist/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz | \ + tar xz -C /usr/local && \ + mv /usr/local/hadoop-${HADOOP_VERSION} /usr/local/hadoop + +# +#Install NNI # -RUN git clone -b v0.1 https://github.com/Microsoft/nni.git -RUN cd nni && sh install.sh -RUN echo 'PATH=~/.local/node/bin:~/.local/yarn/bin:~/.local/bin:$PATH' >> ~/.bashrc -RUN cd .. && rm -rf nni +RUN pip3 install -v --user git+https://github.com/Microsoft/nni.git@v0.2 + +ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 \ + HADOOP_INSTALL=/usr/local/hadoop \ + NVIDIA_VISIBLE_DEVICES=all + +ENV HADOOP_PREFIX=${HADOOP_INSTALL} \ + HADOOP_BIN_DIR=${HADOOP_INSTALL}/bin \ + HADOOP_SBIN_DIR=${HADOOP_INSTALL}/sbin \ + HADOOP_HDFS_HOME=${HADOOP_INSTALL} \ + HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_INSTALL}/lib/native \ + HADOOP_OPTS="-Djava.library.path=${HADOOP_INSTALL}/lib/native" + +ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/root/.local/bin:/usr/bin:/sbin:/bin:${HADOOP_BIN_DIR}:${HADOOP_SBIN_DIR} \ + LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/usr/local/cuda/lib64:/usr/local/cuda/targets/x86_64-linux/lib/stubs:${JAVA_HOME}/jre/lib/amd64/server + +WORKDIR /root diff --git a/docs/3_steps.jpg b/docs/3_steps.jpg new file mode 100644 index 0000000000..e5e18540ea Binary files /dev/null and b/docs/3_steps.jpg differ diff --git a/docs/GetStarted.md b/docs/GetStarted.md index 5239c1aa5e..42366eb1e8 100644 --- a/docs/GetStarted.md +++ b/docs/GetStarted.md @@ -1,28 +1,29 @@ -**Getting Started with NNI** +**Get Started with NNI** === ## **Installation** * __Dependencies__ python >= 3.5 + git + wget - python pip should also be correctly installed. You could use "which pip" or "pip -V" to check in Linux. + python pip should also be correctly installed. You could use "python3 -m pip -V" to check in Linux. - * Note: For now, we don't support virtual environment. + * Note: we don't support virtual environment in current releases. * __Install NNI through pip__ - pip3 install -v --user git+https://github.com/Microsoft/nni.git@v0.1 + python3 -m pip install -v --user git+https://github.com/Microsoft/nni.git@v0.2 source ~/.bashrc * __Install NNI through source code__ - git clone -b v0.1 https://github.com/Microsoft/nni.git + git clone -b v0.2 https://github.com/Microsoft/nni.git cd nni chmod +x install.sh source install.sh - ## **Quick start: run a customized experiment** An experiment is to run multiple trial jobs, each trial job tries a configuration which includes a specific neural architecture (or model) and hyper-parameter values. To run an experiment through NNI, you should: diff --git a/docs/HowToContribute.md b/docs/HowToContribute.md new file mode 100644 index 0000000000..87a570e182 --- /dev/null +++ b/docs/HowToContribute.md @@ -0,0 +1,53 @@ +**How to contribute** +=== +## Best practice for debug NNI source code + +For debugging NNI source code, your development environment should be under Ubuntu 16.04 (or above) system with python 3 and pip 3 installed, then follow the below steps. + +**1. Clone the source code** + +Run the command +``` +git clone https://github.com/Microsoft/nni.git +``` +to clone the source code + +**2. Prepare the debug environment and install dependencies** + +Change directory to the source code folder, then run the command +``` +make install-dependencies +``` +to install the dependent tools for the environment + +**3. Build source code** + +Run the command +``` +make build +``` +to build the source code + +**4. Install NNI to development environment** + +Run the command +``` +make dev-install +``` +to install the distribution content to development environment, and create cli scripts + +**5. Check if the environment is ready** + +Now, you can try to start an experiment to check if your environment is ready +For example, run the command +``` +nnictl create --config ~/nni/examples/trials/mnist/config.yml +``` +And open web ui to check if everything is OK + +**6. Redeploy** + +After you change some code, just use **step 4** to rebuild your code, then the change will take effect immediately + +--- +At last, wish you have a wonderful day. \ No newline at end of file diff --git a/docs/NNICTLDOC.md b/docs/NNICTLDOC.md index 62e3dffe34..dadb3e5fb1 100644 --- a/docs/NNICTLDOC.md +++ b/docs/NNICTLDOC.md @@ -234,4 +234,19 @@ nnictl log | --head, -h| False| |show head lines of stderr| | --tail, -t| False| |show tail lines of stderr| | --path, -p| False| |show the path of stderr file| + +* __nnictl log trial__ + * Description + + Show trial log path. + + * Usage + + nnictl log trial [options] + + Options: + + | Name, shorthand | Required|Default | Description | + | ------ | ------ | ------ |------ | + | --id, -I| False| |the id of trial| \ No newline at end of file diff --git a/docs/PAIMode.md b/docs/PAIMode.md new file mode 100644 index 0000000000..05da292afa --- /dev/null +++ b/docs/PAIMode.md @@ -0,0 +1,80 @@ +**Run an Experiment on OpenPAI** +=== +NNI supports running an experiment on [OpenPAI](https://github.com/Microsoft/pai) (aka pai), called pai mode. Before starting to use NNI pai mode, you should have an account to access an [OpenPAI](https://github.com/Microsoft/pai) cluster. See [here](https://github.com/Microsoft/pai#how-to-deploy) if you don't have any OpenPAI account and want to deploy an OpenPAI cluster. In pai mode, your trial program will run in pai's container created by Docker. + +## Setup environment +Install NNI, follow the install guide [here](GetStarted.md). + +## Run an experiment +Use `examples/trials/mnist-annotation` as an example. The nni config yaml file's content is like: +``` +authorName: your_name +experimentName: auto_mnist +# how many trials could be concurrently running +trialConcurrency: 2 +# maximum experiment running duration +maxExecDuration: 3h +# empty means never stop +maxTrialNum: 100 +# choice: local, remote, pai +trainingServicePlatform: pai +# choice: true, false +useAnnotation: true +tuner: + builtinTunerName: TPE + classArgs: + optimize_mode: maximize +trial: + command: python3 mnist.py + codeDir: ~/nni/examples/trials/mnist-annotation + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + image: openpai/pai.example.tensorflow + dataDir: hdfs://10.1.1.1:9000/nni + outputDir: hdfs://10.1.1.1:9000/nni +# Configuration to access OpenPAI Cluster +paiConfig: + userName: your_pai_nni_user + passWord: your_pai_password + host: 10.1.1.1 +``` +Note: You should set `trainingServicePlatform: pai` in nni config yaml file if you want to start experiment in pai mode. + +Compared with LocalMode and [RemoteMachineMode](RemoteMachineMode.md), trial configuration in pai mode have five additional keys: +* cpuNum + * Required key. Should be positive number based on your trial program's CPU requirement +* memoryMB + * Required key. Should be positive number based on your trial program's memory requirement +* image + * Required key. In pai mode, your trial program will be scheduled by OpenPAI to run in [Docker container](https://www.docker.com/). This key is used to specify the Docker image used to create the container in which your traill will run. + * We already build a docker image [nnimsra/nni](https://hub.docker.com/r/msranni/nni/) on [Docker Hub](https://hub.docker.com/). It contains NNI python packages, Node modules and javascript artifact files required to start experiment, and all of NNI dependencies. The docker file used to build this image can be found at [here](../deployment/Dockerfile.build.base). You can either use this image directly in your config file, or build your own image based on it. +* dataDir + * Optional key. It specifies the HDFS data direcotry for trial to download data. The format should be something like hdfs://{your HDFS host}:9000/{your data directory} +* outputDir + * Optional key. It specifies the HDFS output direcotry for trial. Once the trial is completed (either succeed or fail), trial's stdout, stderr will be copied to this directory by NNI sdk automatically. The format should be something like hdfs://{your HDFS host}:9000/{your output directory} + +Once complete to fill nni experiment config file and save (for example, save as exp_pai.yaml), then run the following command +``` +nnictl create --config exp_pai.yaml +``` +to start the experiment in pai mode. NNI will create OpanPAI job for each trial, and the job name format is something like `nni_exp_{experiment_id}_trial_{trial_id}`. +You can see the pai jobs created by NNI in your OpenPAI cluster's web portal, like: +![](./nni_pai_joblist.jpg) + +Notice: In pai mode, NNIManager will start a rest server and listen on `51189` port, to receive metrics from trial job running in PAI container. So you should `enable 51189` TCP port in your firewall rule to allow incoming traffic. + +Once a trial job is completed, you can goto NNI WebUI's overview page (like http://localhost:8080/oview) to check trial's information. + +Expand a trial information in trial list view, click the logPath link like: +![](./nni_webui_joblist.jpg) + +And you will be redirected to HDFS web portal to browse the output files of that trial in HDFS: +![](./nni_trial_hdfs_output.jpg) + +You can see there're three fils in output folder: stderr, stdout, and trial.log + +If you also want to save trial's other output into HDFS, like model files, you can use environment variable `NNI_OUTPUT_DIR` in your trial code to save your own output files, and NNI SDK will copy all the files in `NNI_OUTPUT_DIR` from trial's container to HDFS. + +Any problems when using NNI in pai mode, plesae create issues on [NNI github repo](https://github.com/Microsoft/nni), or send mail to nni@microsoft.com + diff --git a/docs/RELEASE.md b/docs/RELEASE.md index 745566a96c..61b4c78c72 100644 --- a/docs/RELEASE.md +++ b/docs/RELEASE.md @@ -1,3 +1,18 @@ +# Release 0.2.0 - 9/29/2018 +## Major Features + * Support [OpenPAI](https://github.com/Microsoft/pai) (aka pai) Training Service (See [here](./PAIMode.md) for instructions about how to submit NNI job in pai mode) + * Support training services on pai mode. NNI trials will be scheduled to run on OpenPAI cluster + * NNI trial's output (including logs and model file) will be copied to OpenPAI HDFS for further debugging and checking + * Support [SMAC](https://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf) tuner (See [here](../src/sdk/pynni/nni/README.md) for instructions about how to use SMAC tuner) + * [SMAC](https://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf) is based on Sequential Model-Based Optimization (SMBO). It adapts the most prominent previously used model class (Gaussian stochastic process models) and introduces the model class of random forests to SMBO to handle categorical parameters. The SMAC supported by NNI is a wrapper on [SMAC3](https://github.com/automl/SMAC3) + * Support NNI installation on [conda](https://conda.io/docs/index.html) and python virtual environment + * Others + * Update ga squad example and related documentation + * WebUI UX small enhancement and bug fix + +## Known Issues +[Known Issues in release 0.2.0](https://github.com/Microsoft/nni/labels/nni020knownissues). + # Release 0.1.0 - 9/10/2018 (initial release) Initial release of Neural Network Intelligence (NNI). diff --git a/docs/RemoteMachineMode.md b/docs/RemoteMachineMode.md index 1cf847151a..94f393324d 100644 --- a/docs/RemoteMachineMode.md +++ b/docs/RemoteMachineMode.md @@ -1,9 +1,10 @@ **Run an Experiment on Multiple Machines** === -NNI supports running an experiment on multiple machines, called remote machine mode. Let's say you have multiple machines with the account `bob` (Note: the account is not necessarily the same on multiple machines): -| IP | Username | Password | -| --------|---------|-------| -| 10.1.1.1 | bob | bob123 | +NNI supports running an experiment on multiple machines, called remote machine mode. Let's say you have multiple machines with the account `bob` (Note: the account is not necessarily the same on multiple machines): + +| IP | Username| Password | +| -------- |---------|-------| +| 10.1.1.1 | bob | bob123 | | 10.1.1.2 | bob | bob123 | | 10.1.1.3 | bob | bob123 | @@ -14,7 +15,7 @@ For remote machines that are used only to run trials but not the nnictl, you can * __Install python SDK through pip__ - pip3 install --user git+https://github.com/Microsoft/NeuralNetworkIntelligence.git#subdirectory=src/sdk/pynni + python3 -m pip install --user git+https://github.com/Microsoft/NeuralNetworkIntelligence.git#subdirectory=src/sdk/pynni * __Install python SDK through source code__ @@ -61,4 +62,4 @@ Simply filling the `machineList` section. This yaml file is named `exp_remote.ya ``` nnictl create --config exp_remote.yaml ``` -to start the experiment. This command can be executed on one of those three machines above, and can also be executed on another machine which has NNI installed and has network accessibility to those three machines. \ No newline at end of file +to start the experiment. This command can be executed on one of those three machines above, and can also be executed on another machine which has NNI installed and has network accessibility to those three machines. diff --git a/docs/SearchSpaceSpec.md b/docs/SearchSpaceSpec.md index c26d79b376..ad59e36f16 100644 --- a/docs/SearchSpaceSpec.md +++ b/docs/SearchSpaceSpec.md @@ -66,3 +66,5 @@ The candidate type and value for variable is here: * Which means the variable value is a value like round(exp(normal(mu, sigma)) / q) * q * Suitable for a discrete variable with respect to which the objective is smooth and gets smoother with the size of the variable, which is bounded from one side.
+ +Note that SMAC only supports a subset of the types above, including `choice`, `randint`, `uniform`, `loguniform`, `quniform(q=1)`. In the current version, SMAC does not support cascaded search space (i.e., conditional variable in SMAC). \ No newline at end of file diff --git a/docs/StartExperiment.md b/docs/StartExperiment.md new file mode 100644 index 0000000000..30f736ff15 --- /dev/null +++ b/docs/StartExperiment.md @@ -0,0 +1,33 @@ +How to start an experiment +=== +## 1.Introduce +There are few steps to start an new experiment of nni, here are the process. + +## 2.Details +### 2.1 Check environment +The first step to start an experiment is to check whether the environment is ready, nnictl will check if there is an old experiment running or the port of restfurl server is occupied. +NNICTL will also validate the content of config yaml file, to ensure the experiment config is in correct format. + +### 2.2 Start restful server +After check environment, nnictl will start an restful server process to manage nni experiment, the devault port is 51188. + +### 2.3 Check restful server +Before next steps, nnictl will check whether restful server is successfully started, or the starting process will stop and show error message. + +### 2.4 Set experiment config +NNICTL need to set experiment config before start an experiment, experiment config includes the config values in config yaml file. + +### 2.5 Check experiment cofig +NNICTL will ensure the request to set config is successfully executed. + +### 2.6 Start Web UI +NNICTL will start a Web UI process to show Web UI information,the default port of Web UI is 8080. + +### 2.7 Check Web UI +If Web UI is not successfully started, nnictl will give a warning information, and will continue to start experiment. + +### 2.8 Start Experiment +This is the most import step of starting an nni experiment, nnictl will call restful server process to setup an experiment. + +### 2.9 Check experiment +After start experiment, nnictl will check whether the experiment is correctly created, and show more information of this experiment to users. \ No newline at end of file diff --git a/docs/ToContribute.md b/docs/ToContribute.md deleted file mode 100644 index b19602ed7e..0000000000 --- a/docs/ToContribute.md +++ /dev/null @@ -1,3 +0,0 @@ -## How to contribute - -TBD \ No newline at end of file diff --git a/docs/WebUI.md b/docs/WebUI.md new file mode 100644 index 0000000000..2e77158df8 --- /dev/null +++ b/docs/WebUI.md @@ -0,0 +1,54 @@ +# WebUI + +## View summary page + +Click the tab "Overview". + +* See the experiment parameters. +* See search_space json. +* See good performance trial. + +![](./img/overview.jpg) + +## View job accuracy + +Click the tab "Optimization Progress" to see the point graph of all trials. Hover every point to see its specific accuracy. + +![](./img/accuracy.jpg) + +## View hyper parameter + +Click the tab "Hyper Parameter" to see the parallel graph. + +* You can select the percentage to see top trials. +* Choose two axis to swap its positions + +![](./img/searchspace.jpg) + +## View trial status + +Click the tab "Trial Status" to see the status of the all trials. Specifically: + +* Trial duration: trial's duration in the bar graph. +* Trial detail: trial's id, trial's duration, start time, end time, status, accuracy and search space file. + +![](./img/openRow.jpg) + +* Kill: you can kill a job that status is running. +* Tensor: you can see a job in the tensorflow graph, it will link to the Tensorboard page. + +![](./img/trialStatus.jpg) + +* Intermediate Result Graph. + +![](./img/intermediate.jpg) + +## Control + +Click the tab "Control" to add a new trial or update the search_space file and some experiment parameters. + +![](./img/control.jpg) + +## Feedback + +[Known Issues](https://github.com/Microsoft/nni/issues). \ No newline at end of file diff --git a/docs/WriteYourTrial.md b/docs/WriteYourTrial.md index 82dfe3b1d1..18388aa9fd 100644 --- a/docs/WriteYourTrial.md +++ b/docs/WriteYourTrial.md @@ -1,9 +1,14 @@ -**Write a Trial which can Run on NNI** +**Write a Trial Run on NNI** === -There would be only a few changes on your existing trial(model) code to make the code runnable on NNI. We provide two approaches for you to modify your code: `Python annotation` and `NNI APIs for trial` -## NNI APIs -We also support NNI APIs for trial code. By using this approach, you should first prepare a search space file. An example is shown below: +A **Trial** in NNI is an individual attempt at applying a set of parameters on a model. + +To define a NNI trial, you need to firstly define the set of parameters and then update the model. NNI provide two approaches for you to define a trial: `NNI API` and `NNI Python annotation`. + +## NNI API +>Step 1 - Prepare a SearchSpace parameters file. + +An example is shown below: ``` { "dropout_rate":{"_type":"uniform","_value":[0.1,0.5]}, @@ -12,32 +17,71 @@ We also support NNI APIs for trial code. By using this approach, you should firs "learning_rate":{"_type":"uniform","_value":[0.0001, 0.1]} } ``` -You can refer to [here](SearchSpaceSpec.md) for the tutorial of search space. +Refer to [SearchSpaceSpec.md](SearchSpaceSpec.md) to learn more about search space. -Then, include `import nni` in your trial code to use NNI APIs. Using the line: -``` -RECEIVED_PARAMS = nni.get_parameters() -``` -to get hyper-parameters' values assigned by tuner. `RECEIVED_PARAMS` is an object, for example: -``` -{"conv_size": 2, "hidden_size": 124, "learning_rate": 0.0307, "dropout_rate": 0.2029} -``` +>Step 2 - Update model codes +~~~~ +2.1 Declare NNI API + Include `import nni` in your trial code to use NNI APIs. + +2.2 Get predefined parameters + Use the following code snippet: + + RECEIVED_PARAMS = nni.get_parameters() + + to get hyper-parameters' values assigned by tuner. `RECEIVED_PARAMS` is an object, for example: + + {"conv_size": 2, "hidden_size": 124, "learning_rate": 0.0307, "dropout_rate": 0.2029} + +2.3 Report NNI results + Use the API: -On the other hand, you can use the API: `nni.report_intermediate_result(accuracy)` to send `accuracy` to assessor. And use `nni.report_final_result(accuracy)` to send `accuracy` to tuner. Here `accuracy` could be any python data type, but **NOTE that if you use built-in tuner/assessor, `accuracy` should be a numerical variable(e.g. float, int)**. + `nni.report_intermediate_result(accuracy)` + + to send `accuracy` to assessor. + + Use the API: -The assessor will decide which trial should early stop based on the history performance of trial(intermediate result of one trial). -The tuner will generate next parameters/architecture based on the explore history(final result of all trials). + `nni.report_final_result(accuracy)` + + to send `accuracy` to tuner. +~~~~ + +**NOTE**: +~~~~ +accuracy - The `accuracy` could be any python object, but if you use NNI built-in tuner/assessor, `accuracy` should be a numerical variable (e.g. float, int). +assessor - The assessor will decide which trial should early stop based on the history performance of trial (intermediate result of one trial). +tuner - The tuner will generate next parameters/architecture based on the explore history (final result of all trials). +~~~~ + +>Step 3 - Enable NNI API + +To enable NNI API mode, you need to set useAnnotation to *false* and provide the path of SearchSpace file (you just defined in step 1): -In the yaml configure file, you need two lines to enable NNI APIs: ``` useAnnotation: false searchSpacePath: /path/to/your/search_space.json ``` -You can refer to [here](../examples/trials/README.md) for more information about how to write trial code using NNI APIs. +You can refer to [here](ExperimentConfig.md) for more information about how to set up experiment configurations. + +(../examples/trials/README.md) for more information about how to write trial code using NNI APIs. + +## NNI Python Annotation +An alternative to write a trial is to use NNI's syntax for python. Simple as any annotation, NNI annotation is working like comments in your codes. You don't have to make structure or any other big changes to your existing codes. With a few lines of NNI annotation, you will be able to: +* annotate the variables you want to tune +* specify in which range you want to tune the variables +* annotate which variable you want to report as intermediate result to `assessor` +* annotate which variable you want to report as the final result (e.g. model accuracy) to `tuner`. + +Again, take MNIST as an example, it only requires 2 steps to write a trial with NNI Annotation. + +>Step 1 - Update codes with annotations + +Please refer the following tensorflow code snippet for NNI Annotation, the highlighted 4 lines are annotations that help you to: (1) tune batch\_size and (2) dropout\_rate, (3) report test\_acc every 100 steps, and (4) at last report test\_acc as final result. + +>What noteworthy is: as these new added codes are annotations, it does not actually change your previous codes logic, you can still run your code as usual in environments without NNI installed. -## NNI Annotation -We designed a new syntax for users to annotate the variables they want to tune and in what range they want to tune the variables. Also, they can annotate which variable they want to report as intermediate result to `assessor`, and which variable to report as the final result (e.g. model accuracy) to `tuner`. A really appealing feature of our NNI annotation is that it exists as comments in your code, which means you can run your code as before without NNI. Let's look at an example, below is a piece of tensorflow code: ```diff with tf.Session() as sess: sess.run(tf.global_variables_initializer()) @@ -64,14 +108,16 @@ with tf.Session() as sess: + """@nni.report_final_result(test_acc)""" ``` -Let's say you want to tune batch\_size and dropout\_rate, and report test\_acc every 100 steps, at last report test\_acc as final result. With our NNI annotation, your code would look like below: +>NOTE +>>`@nni.variable` will take effect on its following line +>> +>>`@nni.report_intermediate_result`/`@nni.report_final_result` will send the data to assessor/tuner at that line. +>> +>>Please refer to [Annotation README](../tools/annotation/README.md) for more information about annotation syntax and its usage. -Simply adding four lines would make your code runnable on NNI. You can still run your code independently. `@nni.variable` works on its next line assignment, and `@nni.report_intermediate_result`/`@nni.report_final_result` would send the data to assessor/tuner at that line. Please refer to [here](../tools/annotation/README.md) for more annotation syntax and more powerful usage. In the yaml configure file, you need one line to enable NNI annotation: +>Step 2 - Enable NNI Annotation +In the yaml configure file, you need to set *useAnnotation* to true to enable NNI annotation: ``` useAnnotation: true ``` - -For users to correctly leverage NNI annotation, we briefly introduce how NNI annotation works here: NNI precompiles users' trial code to find all the annotations each of which is one line with `"""@nni` at the head of the line. Then NNI replaces each annotation with a corresponding NNI API at the location where the annotation is. - -**Note that: in your trial code, you can use either one of NNI APIs and NNI annotation, but not both of them simultaneously.** \ No newline at end of file diff --git a/docs/img/accuracy.jpg b/docs/img/accuracy.jpg new file mode 100644 index 0000000000..d71586d243 Binary files /dev/null and b/docs/img/accuracy.jpg differ diff --git a/docs/img/control.jpg b/docs/img/control.jpg new file mode 100644 index 0000000000..66fe5b85a4 Binary files /dev/null and b/docs/img/control.jpg differ diff --git a/docs/img/experiment_process.jpg b/docs/img/experiment_process.jpg new file mode 100644 index 0000000000..141e41cad9 Binary files /dev/null and b/docs/img/experiment_process.jpg differ diff --git a/docs/img/intermediate.jpg b/docs/img/intermediate.jpg new file mode 100644 index 0000000000..ae56fc63c6 Binary files /dev/null and b/docs/img/intermediate.jpg differ diff --git a/docs/img/openRow.jpg b/docs/img/openRow.jpg new file mode 100644 index 0000000000..86ff6f6441 Binary files /dev/null and b/docs/img/openRow.jpg differ diff --git a/docs/img/overview.jpg b/docs/img/overview.jpg new file mode 100644 index 0000000000..04085892d8 Binary files /dev/null and b/docs/img/overview.jpg differ diff --git a/docs/img/searchspace.jpg b/docs/img/searchspace.jpg new file mode 100644 index 0000000000..f0e728cbed Binary files /dev/null and b/docs/img/searchspace.jpg differ diff --git a/docs/img/trialStatus.jpg b/docs/img/trialStatus.jpg new file mode 100644 index 0000000000..1d77cb7655 Binary files /dev/null and b/docs/img/trialStatus.jpg differ diff --git a/docs/nni_pai_joblist.jpg b/docs/nni_pai_joblist.jpg new file mode 100644 index 0000000000..fa7564cb2a Binary files /dev/null and b/docs/nni_pai_joblist.jpg differ diff --git a/docs/nni_trial_hdfs_output.jpg b/docs/nni_trial_hdfs_output.jpg new file mode 100644 index 0000000000..decbf3bde2 Binary files /dev/null and b/docs/nni_trial_hdfs_output.jpg differ diff --git a/docs/nni_webui_joblist.jpg b/docs/nni_webui_joblist.jpg new file mode 100644 index 0000000000..7789d7f8d4 Binary files /dev/null and b/docs/nni_webui_joblist.jpg differ diff --git a/examples/trials/auto-gbdt/config.yml b/examples/trials/auto-gbdt/config.yml index 205b6f3a76..e6f3b963ac 100644 --- a/examples/trials/auto-gbdt/config.yml +++ b/examples/trials/auto-gbdt/config.yml @@ -3,18 +3,19 @@ experimentName: example_auto-gbdt trialConcurrency: 1 maxExecDuration: 10h maxTrialNum: 10 -#choice: local, remote +#choice: local, remote, pai trainingServicePlatform: local -searchSpacePath: ~/nni/examples/trials/auto-gbdt/search_space.json +searchSpacePath: search_space.json #choice: true, false useAnnotation: false tuner: - #choice: TPE, Random, Anneal, Evolution + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) builtinTunerName: TPE classArgs: #choice: maximize, minimize optimize_mode: minimize trial: command: python3 main.py - codeDir: ~/nni/examples/trials/auto-gbdt/ - gpuNum: 0 \ No newline at end of file + codeDir: . + gpuNum: 0 diff --git a/examples/trials/auto-gbdt/config_pai.yml b/examples/trials/auto-gbdt/config_pai.yml new file mode 100644 index 0000000000..26577cf83a --- /dev/null +++ b/examples/trials/auto-gbdt/config_pai.yml @@ -0,0 +1,36 @@ +authorName: default +experimentName: example_auto-gbdt +trialConcurrency: 1 +maxExecDuration: 10h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +searchSpacePath: search_space.json +#choice: true, false +useAnnotation: false +tuner: + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: minimize +trial: + command: python3 main.py + codeDir: . + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 \ No newline at end of file diff --git a/examples/trials/ga_squad/README.md b/examples/trials/ga_squad/README.md new file mode 100644 index 0000000000..ab8ba853f7 --- /dev/null +++ b/examples/trials/ga_squad/README.md @@ -0,0 +1,254 @@ +# Automatic Model Architecture Search for Reading Comprehension +This example shows us how to use Genetic Algorithm to find good model architectures for Reading Comprehension task. + +## Search Space +Since attention and recurrent neural network (RNN) module have been proven effective in Reading Comprehension. +We conclude the search space as follow: + +1. IDENTITY (Effectively means keep training). +2. INSERT-RNN-LAYER (Inserts a LSTM. Comparing the performance of GRU and LSTM in our experiment, we decided to use LSTM here.) +3. REMOVE-RNN-LAYER +4. INSERT-ATTENTION-LAYER(Inserts a attention layer.) +5. REMOVE-ATTENTION-LAYER +6. ADD-SKIP (Identity between random layers). +7. REMOVE-SKIP (Removes random skip). + +![ga-squad-logo](./ga_squad.png) + +## New version +Also we have another version which time cost is less and performance is better. We will release soon. + +# How to run this example? + +## Download data + +### Use downloading script to download data + +Execute the following command to download needed files +using the downloading script: + +``` +chmod +x ./download.sh +./download.sh +``` + +### Download manually + +1. download "dev-v1.1.json" and "train-v1.1.json" in https://rajpurkar.github.io/SQuAD-explorer/ + +``` +wget https://rajpurkar.github.io/SQuAD-explorer/dataset/train-v1.1.json +wget https://rajpurkar.github.io/SQuAD-explorer/dataset/dev-v1.1.json +``` + +2. download "glove.840B.300d.txt" in https://nlp.stanford.edu/projects/glove/ + +``` +wget http://nlp.stanford.edu/data/glove.840B.300d.zip +unzip glove.840B.300d.zip +``` + +## Update configuration +Modify `nni/examples/trials/ga_squad/config.yaml`, here is the default configuration: + +``` +authorName: default +experimentName: example_ga_squad +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 1 +#choice: local, remote +trainingServicePlatform: local +#choice: true, false +useAnnotation: false +tuner: + codeDir: ~/nni/examples/tuners/ga_customer_tuner + classFileName: customer_tuner.py + className: CustomerTuner + classArgs: + optimize_mode: maximize +trial: + command: python3 trial.py + codeDir: ~/nni/examples/trials/ga_squad + gpuNum: 0 +``` + +In the "trial" part, if you want to use GPU to perform the architecture search, change `gpuNum` from `0` to `1`. You need to increase the `maxTrialNum` and `maxExecDuration`, according to how long you want to wait for the search result. + +`trialConcurrency` is the number of trials running concurrently, which is the number of GPUs you want to use, if you are setting `gpuNum` to 1. + +## submit this job + +``` +nnictl create --config ~/nni/examples/trials/ga_squad/config.yaml +``` + +# Techinal details about the trial + +## How does it works +The evolution-algorithm based architecture for question answering has two different parts just like any other examples: the trial and the tuner. + +### The trial + +The trial has a lot of different files, functions and classes. Here we will only give most of those files a brief introduction: + +* `attention.py` contains an implementaion for attention mechanism in Tensorflow. +* `data.py` contains functions for data preprocessing. +* `evaluate.py` contains the evaluation script. +* `graph.py` contains the definition of the computation graph. +* `rnn.py` contains an implementaion for GRU in Tensorflow. +* `train_model.py` is a wrapper for the whole question answering model. + +Among those files, `trial.py` and `graph_to_tf.py` is special. + +`graph_to_tf.py` has a function named as `graph_to_network`, here is its skelton code: + +``` +def graph_to_network(input1, + input2, + input1_lengths, + input2_lengths, + graph, + dropout_rate, + is_training, + num_heads=1, + rnn_units=256): + topology = graph.is_topology() + layers = dict() + layers_sequence_lengths = dict() + num_units = input1.get_shape().as_list()[-1] + layers[0] = input1*tf.sqrt(tf.cast(num_units, tf.float32)) + \ + positional_encoding(input1, scale=False, zero_pad=False) + layers[1] = input2*tf.sqrt(tf.cast(num_units, tf.float32)) + layers[0] = dropout(layers[0], dropout_rate, is_training) + layers[1] = dropout(layers[1], dropout_rate, is_training) + layers_sequence_lengths[0] = input1_lengths + layers_sequence_lengths[1] = input2_lengths + for _, topo_i in enumerate(topology): + if topo_i == '|': + continue + if graph.layers[topo_i].graph_type == LayerType.input.value: + # ...... + elif graph.layers[topo_i].graph_type == LayerType.attention.value: + # ...... + # More layers to handle +``` + +As we can see, this function is actually a compiler, that converts the internal model DAG configuration (which will be introduced in the `Model configuration format` section) `graph`, to a Tensorflow computation graph. + +``` +topology = graph.is_topology() +``` + +performs topological sorting on the internal graph representation, and the code inside the loop: + +``` +for _, topo_i in enumerate(topology): +``` + +performs actually conversion that maps each layer to a part in Tensorflow computation graph. + +### The tuner + +The tuner is much more simple than the trial. They actually share the same `graph.py`. Besides, the tuner has a `customer_tuner.py`, the most important class in which is `CustomerTuner`: + +``` +class CustomerTuner(Tuner): + # ...... + + def generate_parameters(self, parameter_id): + """Returns a set of trial graph config, as a serializable object. + parameter_id : int + """ + if len(self.population) <= 0: + logger.debug("the len of poplution lower than zero.") + raise Exception('The population is empty') + pos = -1 + for i in range(len(self.population)): + if self.population[i].result == None: + pos = i + break + if pos != -1: + indiv = copy.deepcopy(self.population[pos]) + self.population.pop(pos) + temp = json.loads(graph_dumps(indiv.config)) + else: + random.shuffle(self.population) + if self.population[0].result > self.population[1].result: + self.population[0] = self.population[1] + indiv = copy.deepcopy(self.population[0]) + self.population.pop(1) + indiv.mutation() + graph = indiv.config + temp = json.loads(graph_dumps(graph)) + + # ...... +``` + +As we can see, the overloaded method `generate_parameters` implements a pretty naive mutation algorithm. The code lines: + +``` + if self.population[0].result > self.population[1].result: + self.population[0] = self.population[1] + indiv = copy.deepcopy(self.population[0]) +``` + +controls the mutation process. It will always take two random individuals in the population, only keeping and mutating the one with better result. + +## Model configuration format + +Here is an example of the model configuration, which is passed from the tuner to the trial in the architecture search procedure. + +``` +{ + "max_layer_num": 50, + "layers": [ + { + "input_size": 0, + "type": 3, + "output_size": 1, + "input": [], + "size": "x", + "output": [4, 5], + "is_delete": false + }, + { + "input_size": 0, + "type": 3, + "output_size": 1, + "input": [], + "size": "y", + "output": [4, 5], + "is_delete": false + }, + { + "input_size": 1, + "type": 4, + "output_size": 0, + "input": [6], + "size": "x", + "output": [], + "is_delete": false + }, + { + "input_size": 1, + "type": 4, + "output_size": 0, + "input": [5], + "size": "y", + "output": [], + "is_delete": false + }, + {"Comment": "More layers will be here for actual graphs."} + ] +} +``` + +Every model configuration will has a "layers" section, which is a JSON list of layer definitions. The definition of each layer is also a JSON object, where: + + * `type` is the type of the layer. 0, 1, 2, 3, 4 corresponde to attention, self-attention, RNN, input and output layer respectively. + * `size` is the length of the output. "x", "y" corresponde to document length / question length, respectively. + * `input_size` is the number of inputs the layer has. + * `input` is the indices of layers taken as input of this layer. + * `output` is the indices of layers use this layer's output as their input. + * `is_delete` means whether the layer is still available. \ No newline at end of file diff --git a/examples/trials/ga_squad/attention.py b/examples/trials/ga_squad/attention.py index 381873b4f9..7a7e02d74a 100644 --- a/examples/trials/ga_squad/attention.py +++ b/examples/trials/ga_squad/attention.py @@ -31,37 +31,6 @@ def _get_variable(variable_dict, name, shape, initializer=None, dtype=tf.float32 name=name, shape=shape, initializer=initializer, dtype=dtype) return variable_dict[name] - -def batch_linear_layer(matrix_a, matrix_b): - ''' - shape of matrix_a is [*, batch, dima] - shape of matrix_b is [batch, dima, dimb] - result is [*, batch, dimb] - for each batch, do matrix_a linear op to last dim - ''' - matrix_a = tf.expand_dims(matrix_a, -1) - while len(list(matrix_b.shape)) < len(list(matrix_a.shape)): - matrix_b = tf.expand_dims(matrix_b, 0) - return tf.reduce_sum(matrix_a * matrix_b, -2) - - -def split_last_dim(x, factor): - shape = tf.shape(x) - last_dim = int(x.shape[-1]) - assert last_dim % factor == 0, \ - "last dim isn't divisible by factor {%d} {%d}" % (last_dim, factor) - new_shape = tf.concat( - [shape[:-1], tf.constant([factor, last_dim // factor])], axis=0) - return tf.reshape(x, new_shape) - - -def merge_last2_dim(x): - shape = tf.shape(x) - last_dim = int(x.shape[-1]) * int(x.shape[-2]) - new_shape = tf.concat([shape[:-2], tf.constant([last_dim])], axis=0) - return tf.reshape(x, new_shape) - - class DotAttention: ''' DotAttention @@ -197,192 +166,4 @@ def get_att(self, s, prob): ''' buf = s * tf.expand_dims(prob, axis=-1) att = tf.reduce_sum(buf, axis=-3) - return att - - -class MultiHeadAttention: - ''' - MultiHeadAttention. - ''' - def __init__(self, name, hidden_dim, head, add=True, dot=True, divide=True): - self._name = '/'.join([name, 'dot_att']) - self._head = head - self._head_dim = hidden_dim // head - self._hidden_dim = self._head_dim * head - self._add = add - self._dot = dot - assert add or dot, "you must at least choose one between add and dot" - self._div = 1.0 - if divide: - self._div = math.sqrt(self._head_dim) - self._var = {} - - @property - def hidden_dim(self): - return self._head_dim * self._head - - @property - def name(self): - return self._name - - @property - def var(self): - return self._var - - def _get_var(self, name, shape, initializer=None): - with tf.variable_scope(self.name): - return _get_variable(self.var, name, shape, initializer) - - def _define_params(self, tgt_dim): - self._get_var('tgt_project', [tgt_dim, self._hidden_dim]) - self._get_var('tgt_bias', [1, self._hidden_dim]) - self._get_var('v', [self._head, self._head_dim, 1]) - - def get_pre_compute(self, src): - s_shape = src.get_shape().as_list() - src_dim = s_shape[-1] - src_project = self._get_var('src_project', [src_dim, self._hidden_dim]) - src_bias = self._get_var('src_bias', [1, self._hidden_dim]) - src = split_last_dim(tf.tensordot(src, src_project, - [[2], [0]]) + src_bias, self._head) - return src - - def get_prob(self, src, tgt, mask, pre_compute): - ''' - :param s: [src_sequence_length, batch_size, src_dim] - :param h: [batch_size, tgt_dim] or [tgt_sequence_length, batch_size, tgt_dim] - :param mask: [src_sequence_length, batch_size]\ - or [tgt_sequence_length, src_sequence_length, batch_sizse] - :param pre_compute: [src_sequence_length, batch_size, hidden_dim] - :return: [src_sequence_length, batch_size]\ - or [tgt_sequence_length, src_sequence_length, batch_size] - ''' - s_shape = src.get_shape().as_list() - h_shape = tgt.get_shape().as_list() - src_dim = s_shape[-1] - tgt_dim = h_shape[-1] - print('src tgt dim: ', src_dim, tgt_dim) - assert src_dim is not None, 'src dimension must be defined' - assert tgt_dim is not None, 'tgt dimension must be defined' - - self._define_params(tgt_dim) - - if len(h_shape) == 2: - tgt = tf.expand_dims(tgt, 0) - - tgt_project = self._var['tgt_project'] - tgt_bias = self._var['tgt_bias'] - - if pre_compute is None: - pre_compute = self.get_pre_compute(src) - - src = pre_compute - tgt = split_last_dim(tf.tensordot(tgt, tgt_project, - [[2], [0]]) + tgt_bias, self._head) - - add_attention = 0 - dot_attention = 0 - if self._add: - buf = tf.tanh(tf.expand_dims(src, 0) + tf.expand_dims(tgt, 1)) - v = self.var['v'] - add_attention = tf.squeeze(batch_linear_layer(buf, v), -1) - if self._dot: - dot_attention = tf.reduce_sum(tf.expand_dims( - src, 0) * tf.expand_dims(tgt, 1), -1) - dot_attention /= self._div - - attention = add_attention + dot_attention - mask = tf.expand_dims(mask, -1) - logits = attention + (mask - 1) * 10000.0 - prob = tf.nn.softmax(logits, 1) - if len(h_shape) == 2: - prob = tf.squeeze(prob, axis=[0]) - return prob - - def map_target(self, tgt): - tgt_project = self._var['tgt_project'] - tgt_bias = self._var['tgt_bias'] - tgt = tf.tensordot(tgt, tgt_project, [[1], [0]]) + tgt_bias - return tgt - - def get_att(self, src, prob): - ''' - :param s: [src_sequence_length, batch_size, head, head_dim] - :param prob: [src_sequence_length, batch_size, head]\ - or [tgt_sequence_length, src_sequence_length, batch_size, head] - :return: [batch_size, src_dim] or [tgt_sequence_length, batch_size, src_dim] - ''' - buf = merge_last2_dim(tf.reduce_sum( - src * tf.expand_dims(prob, axis=-1), axis=-4)) - return buf - - -class DotAttentionWrapper(RNNCell): - ''' - A wrapper for DotAttention or MultiHeadAttention. - ''' - - def __init__(self, cell, attention, - src, mask, is_gated, - reuse=None, dropout=None, - keep_input=True, map_target=False): - super().__init__(self, _reuse=reuse) - assert isinstance(attention, (DotAttention, MultiHeadAttention)), \ - 'type of attention is not supported' - assert isinstance(cell, RNNCell), 'type of cell must be RNNCell' - self._attention = attention - self._src = src - self._mask = mask - self._pre_computed = None - self._is_gated = is_gated - self._cell = cell - self._dropout = dropout - self._keep_input = keep_input - self._map_target = map_target - - @property - def state_size(self): - return self._cell.state_size - - @property - def output_size(self): - return self._cell.output_size - - def call(self, inputs, state): - if self._pre_computed is None: - self._pre_computed = self._attention.get_pre_compute(self._src) - att_prob = self._attention.get_prob( - src=self._src, - tgt=tf.concat([inputs, state], axis=1), - mask=self._mask, - pre_compute=self._pre_computed) - if isinstance(self._attention, DotAttention): - att = self._attention.get_att(self._src, att_prob) - else: - att = self._attention.get_att(self._pre_computed, att_prob) - x_list = [att] - if self._keep_input: - x_list.append(inputs) - if inputs.shape[1] == att.shape[1]: - x_list.append(inputs - att) - x_list.append(inputs * att) - if self._map_target and isinstance(self._attention, MultiHeadAttention): - tgt = self._attention.map_target( - tf.concat([inputs, state], axis=1)) - x_list += [tgt, att-tgt, att*tgt] - - x = tf.concat(x_list, axis=1) - dim = x.get_shape().as_list()[1] - assert dim is not None, 'dim must be defined' - if self._is_gated: - g = tf.get_variable('att_gate', - shape=[dim, dim], - dtype=tf.float32, - initializer=None) - bias_g = tf.get_variable( - 'bias_gate', shape=[1, dim], dtype=tf.float32) - gate = tf.sigmoid(tf.matmul(x, g) + bias_g) - x = x * gate - if self._dropout is not None: - x = self._dropout(x) - return self._cell.call(x, state) + return att \ No newline at end of file diff --git a/examples/trials/ga_squad/config.yml b/examples/trials/ga_squad/config.yml index a094a71f79..c6fec5bcbc 100644 --- a/examples/trials/ga_squad/config.yml +++ b/examples/trials/ga_squad/config.yml @@ -2,18 +2,18 @@ authorName: default experimentName: example_ga_squad trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 1 -#choice: local, remote +maxTrialNum: 10 +#choice: local, remote, pai trainingServicePlatform: local #choice: true, false useAnnotation: false tuner: - codeDir: ~/nni/examples/tuners/ga_customer_tuner + codeDir: ../tuners/ga_customer_tuner classFileName: customer_tuner.py className: CustomerTuner classArgs: optimize_mode: maximize trial: command: python3 trial.py - codeDir: ~/nni/examples/trials/ga_squad + codeDir: . gpuNum: 0 \ No newline at end of file diff --git a/examples/trials/ga_squad/config_pai.yml b/examples/trials/ga_squad/config_pai.yml new file mode 100644 index 0000000000..56c2d33069 --- /dev/null +++ b/examples/trials/ga_squad/config_pai.yml @@ -0,0 +1,34 @@ +authorName: default +experimentName: example_ga_squad +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +#choice: true, false +useAnnotation: false +tuner: + codeDir: ../tuners/ga_customer_tuner + classFileName: customer_tuner.py + className: CustomerTuner + classArgs: + optimize_mode: maximize +trial: + command: python3 trial.py + codeDir: . + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 \ No newline at end of file diff --git a/examples/trials/ga_squad/ga_squad.png b/examples/trials/ga_squad/ga_squad.png new file mode 100644 index 0000000000..4c82cd4654 Binary files /dev/null and b/examples/trials/ga_squad/ga_squad.png differ diff --git a/examples/trials/ga_squad/readme.md b/examples/trials/ga_squad/readme.md deleted file mode 100644 index 99eaf12fd5..0000000000 --- a/examples/trials/ga_squad/readme.md +++ /dev/null @@ -1,33 +0,0 @@ -# Download data - -## Use downloading script - -Execute the following command to download needed files -using the downloading script: - -``` -chmod +x ./download.sh -./download.sh -``` - -## Download manually - -1. download "dev-v1.1.json" and "train-v1.1.json" in https://rajpurkar.github.io/SQuAD-explorer/ - -``` -wget https://rajpurkar.github.io/SQuAD-explorer/dataset/train-v1.1.json -wget https://rajpurkar.github.io/SQuAD-explorer/dataset/dev-v1.1.json -``` - -2. download "glove.840B.300d.txt" in https://nlp.stanford.edu/projects/glove/ - -``` -wget http://nlp.stanford.edu/data/glove.840B.300d.zip -unzip glove.840B.300d.zip -``` - -# How to submit this job - -1. run "$NNI_ROOT_DIR/auto_run.py" as "$NNI_ROOT_DIR/README-AUTO.md" said. -2. use the dockerImage openpai.azurecr.io/nni_v0.0.1, which means it use a tensorflow cpu-version. -3. this model don't need search_space.json. \ No newline at end of file diff --git a/examples/trials/ga_squad/trial.py b/examples/trials/ga_squad/trial.py index 7373711e36..b96805c9c7 100644 --- a/examples/trials/ga_squad/trial.py +++ b/examples/trials/ga_squad/trial.py @@ -51,7 +51,7 @@ def get_config(): parser = argparse.ArgumentParser( description='This program is using genetic algorithm to search architecture for SQuAD.') parser.add_argument('--input_file', type=str, - default='./dev-v1.1.json', help='input file') + default='./train-v1.1.json', help='input file') parser.add_argument('--dev_file', type=str, default='./dev-v1.1.json', help='dev file') parser.add_argument('--embedding_file', type=str, diff --git a/examples/trials/mnist-annotation/config.yml b/examples/trials/mnist-annotation/config.yml index f7a0b06da8..b0555ad3a2 100644 --- a/examples/trials/mnist-annotation/config.yml +++ b/examples/trials/mnist-annotation/config.yml @@ -2,18 +2,19 @@ authorName: default experimentName: example_mnist trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 1 -#choice: local, remote +maxTrialNum: 10 +#choice: local, remote, pai trainingServicePlatform: local #choice: true, false useAnnotation: true tuner: - #choice: TPE, Random, Anneal, Evolution + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) builtinTunerName: TPE classArgs: #choice: maximize, minimize optimize_mode: maximize trial: command: python3 mnist.py - codeDir: ~/nni/examples/trials/mnist-annotation - gpuNum: 0 \ No newline at end of file + codeDir: . + gpuNum: 0 diff --git a/examples/trials/mnist-annotation/config_pai.yml b/examples/trials/mnist-annotation/config_pai.yml new file mode 100644 index 0000000000..edb9e62384 --- /dev/null +++ b/examples/trials/mnist-annotation/config_pai.yml @@ -0,0 +1,35 @@ +authorName: default +experimentName: example_mnist +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +#choice: true, false +useAnnotation: true +tuner: + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 mnist.py + codeDir: . + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 \ No newline at end of file diff --git a/examples/trials/mnist-batch-tune-keras/config.yml b/examples/trials/mnist-batch-tune-keras/config.yml index 4b750a2394..e0722f9117 100644 --- a/examples/trials/mnist-batch-tune-keras/config.yml +++ b/examples/trials/mnist-batch-tune-keras/config.yml @@ -2,19 +2,20 @@ authorName: default experimentName: example_mnist-keras trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 6 -#choice: local, remote +maxTrialNum: 10 +#choice: local, remote, pai trainingServicePlatform: local -searchSpacePath: ~/nni/examples/trials/mnist-batch-tune-keras/search_space.json +searchSpacePath: search_space.json #choice: true, false useAnnotation: false tuner: #choice: TPE, Random, Anneal, Evolution, BatchTuner + #SMAC (SMAC should be installed through nnictl) builtinTunerName: BatchTuner classArgs: #choice: maximize, minimize optimize_mode: maximize trial: command: python3 mnist-keras.py - codeDir: ~/nni/examples/trials/mnist-batch-tune-keras + codeDir: . gpuNum: 0 diff --git a/examples/trials/mnist-batch-tune-keras/config_pai.yml b/examples/trials/mnist-batch-tune-keras/config_pai.yml new file mode 100644 index 0000000000..183c220e2d --- /dev/null +++ b/examples/trials/mnist-batch-tune-keras/config_pai.yml @@ -0,0 +1,36 @@ +authorName: default +experimentName: example_mnist-keras +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +searchSpacePath: search_space.json +#choice: true, false +useAnnotation: false +tuner: + #choice: TPE, Random, Anneal, Evolution, BatchTuner + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: BatchTuner + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 mnist-keras.py + codeDir: . + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 \ No newline at end of file diff --git a/examples/trials/mnist-keras/config.yml b/examples/trials/mnist-keras/config.yml index 83baf364ad..6ea1c2a367 100644 --- a/examples/trials/mnist-keras/config.yml +++ b/examples/trials/mnist-keras/config.yml @@ -2,19 +2,20 @@ authorName: default experimentName: example_mnist-keras trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 1 -#choice: local, remote +maxTrialNum: 10 +#choice: local, remote, pai trainingServicePlatform: local -searchSpacePath: ~/nni/examples/trials/mnist-keras/search_space.json +searchSpacePath: search_space.json #choice: true, false useAnnotation: false tuner: - #choice: TPE, Random, Anneal, Evolution + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) builtinTunerName: TPE classArgs: #choice: maximize, minimize optimize_mode: maximize trial: command: python3 mnist-keras.py - codeDir: ~/nni/examples/trials/mnist-keras - gpuNum: 0 \ No newline at end of file + codeDir: . + gpuNum: 0 diff --git a/examples/trials/mnist-keras/config_pai.yml b/examples/trials/mnist-keras/config_pai.yml new file mode 100644 index 0000000000..bbf8136144 --- /dev/null +++ b/examples/trials/mnist-keras/config_pai.yml @@ -0,0 +1,36 @@ +authorName: default +experimentName: example_mnist-keras +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +searchSpacePath: search_space.json +#choice: true, false +useAnnotation: false +tuner: + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 mnist-keras.py + codeDir: . + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 \ No newline at end of file diff --git a/examples/trials/mnist-smartparam/config.yml b/examples/trials/mnist-smartparam/config.yml index 1343937742..912eabb24e 100644 --- a/examples/trials/mnist-smartparam/config.yml +++ b/examples/trials/mnist-smartparam/config.yml @@ -2,18 +2,19 @@ authorName: default experimentName: example_mnist-smartparam trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 1 -#choice: local, remote +maxTrialNum: 10 +#choice: local, remote, pai trainingServicePlatform: local #choice: true, false useAnnotation: true tuner: - #choice: TPE, Random, Anneal, Evolution + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) builtinTunerName: TPE classArgs: #choice: maximize, minimize optimize_mode: maximize trial: command: python3 mnist.py - codeDir: ~/nni/examples/trials/mnist-smartparam - gpuNum: 0 \ No newline at end of file + codeDir: . + gpuNum: 0 diff --git a/examples/trials/mnist-smartparam/config_pai.yml b/examples/trials/mnist-smartparam/config_pai.yml new file mode 100644 index 0000000000..4b5a088d11 --- /dev/null +++ b/examples/trials/mnist-smartparam/config_pai.yml @@ -0,0 +1,35 @@ +authorName: default +experimentName: example_mnist-smartparam +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +#choice: true, false +useAnnotation: true +tuner: + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 mnist.py + codeDir: . + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 \ No newline at end of file diff --git a/examples/trials/mnist/config.yml b/examples/trials/mnist/config.yml index 331afab2b9..2f6141fa45 100644 --- a/examples/trials/mnist/config.yml +++ b/examples/trials/mnist/config.yml @@ -2,19 +2,20 @@ authorName: default experimentName: example_mnist trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 1 -#choice: local, remote +maxTrialNum: 10 +#choice: local, remote, pai trainingServicePlatform: local -searchSpacePath: ~/nni/examples/trials/mnist/search_space.json +searchSpacePath: search_space.json #choice: true, false useAnnotation: false tuner: - #choice: TPE, Random, Anneal, Evolution + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) builtinTunerName: TPE classArgs: #choice: maximize, minimize optimize_mode: maximize trial: command: python3 mnist.py - codeDir: ~/nni/examples/trials/mnist - gpuNum: 0 \ No newline at end of file + codeDir: . + gpuNum: 0 diff --git a/examples/trials/mnist/config_assessor.yml b/examples/trials/mnist/config_assessor.yml index e2776f2b99..2a919ad626 100644 --- a/examples/trials/mnist/config_assessor.yml +++ b/examples/trials/mnist/config_assessor.yml @@ -9,7 +9,8 @@ searchSpacePath: ~/nni/examples/trials/mnist/search_space.json #choice: true, false useAnnotation: false tuner: - #choice: TPE, Random, Anneal, Evolution + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) builtinTunerName: TPE classArgs: #choice: maximize, minimize @@ -23,4 +24,4 @@ assessor: trial: command: python3 mnist.py codeDir: ~/nni/examples/trials/mnist - gpuNum: 0 \ No newline at end of file + gpuNum: 0 diff --git a/examples/trials/mnist/config_pai.yml b/examples/trials/mnist/config_pai.yml new file mode 100644 index 0000000000..a20fdce40b --- /dev/null +++ b/examples/trials/mnist/config_pai.yml @@ -0,0 +1,36 @@ +authorName: default +experimentName: example_mnist +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +searchSpacePath: search_space.json +#choice: true, false +useAnnotation: false +tuner: + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 mnist.py + codeDir: . + gpuNum: 0 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 \ No newline at end of file diff --git a/examples/trials/pytorch_cifar10/config.yml b/examples/trials/pytorch_cifar10/config.yml index 102191315c..1d6f847805 100644 --- a/examples/trials/pytorch_cifar10/config.yml +++ b/examples/trials/pytorch_cifar10/config.yml @@ -2,19 +2,20 @@ authorName: default experimentName: example_pytorch_cifar10 trialConcurrency: 1 maxExecDuration: 100h -maxTrialNum: 1 -#choice: local, remote +maxTrialNum: 10 +#choice: local, remote, pai trainingServicePlatform: local -searchSpacePath: ~/nni/examples/trials/pytorch_cifar10/search_space.json +searchSpacePath: search_space.json #choice: true, false useAnnotation: false tuner: - #choice: TPE, Random, Anneal, Evolution + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) builtinTunerName: TPE classArgs: #choice: maximize, minimize optimize_mode: maximize trial: command: python3 main.py - codeDir: ~/nni/examples/trials/pytorch_cifar10 + codeDir: . gpuNum: 1 diff --git a/examples/trials/pytorch_cifar10/config_pai.yml b/examples/trials/pytorch_cifar10/config_pai.yml new file mode 100644 index 0000000000..783285f815 --- /dev/null +++ b/examples/trials/pytorch_cifar10/config_pai.yml @@ -0,0 +1,36 @@ +authorName: default +experimentName: example_pytorch_cifar10 +trialConcurrency: 1 +maxExecDuration: 100h +maxTrialNum: 10 +#choice: local, remote, pai +trainingServicePlatform: pai +searchSpacePath: search_space.json +#choice: true, false +useAnnotation: false +tuner: + #choice: TPE, Random, Anneal, Evolution, + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 main.py + codeDir: . + gpuNum: 1 + cpuNum: 1 + memoryMB: 8196 + #The docker image to run nni job on pai + image: openpai/pai.example.tensorflow + #The hdfs directory to store data on pai, format 'hdfs://host:port/directory' + hdfsDataDir: hdfs://10.10.10.10:9000/username/nni + #The hdfs directory to store output data generated by nni, format 'hdfs://host:port/directory' + hdfsOutputDir: hdfs://10.10.10.10:9000/username/nni +paiConfig: + #The username to login pai + userName: username + #The password to login pai + passWord: password + #The host of restful server of pai + host: 10.10.10.10 diff --git a/install.sh b/install.sh index 3d5199e187..0c3d39bcf1 100644 --- a/install.sh +++ b/install.sh @@ -1,3 +1,7 @@ #!/bin/bash -make easy-install +make install-dependencies +make build +make dev-install +make install-examples +make update-bash-config source ~/.bashrc diff --git a/setup.py b/setup.py index eeee54d075..ea38f80667 100644 --- a/setup.py +++ b/setup.py @@ -22,42 +22,20 @@ import os from setuptools import setup, find_packages from setuptools.command.install import install -from subprocess import Popen +import subprocess def read(fname): return open(os.path.join(os.path.dirname(__file__), fname), encoding='utf-8').read() class CustomInstallCommand(install): '''a customized install class in pip module''' - def makeInstall(self): - '''execute make pip-install command''' - cmds = ['make', 'pip-install'] - process = Popen(cmds) - if process.wait() != 0: - print('Error: Make Install Failed') - exit(-1) - - def writeEnvironmentVariables(self, variable_name): - '''write an environment variable into ~/.bashrc''' - paths = os.getenv("PATH").split(':') - bin_path = os.path.join(os.getenv('HOME'),'.local/'+variable_name+'/bin') - - if bin_path not in paths: - bashrc_path = os.path.join(os.getenv('HOME'), '.bashrc') - process = Popen('echo export PATH=' + bin_path + ':\$PATH >> ' + bashrc_path, shell=True) - if process.wait() != 0: - print('Error: Write Environment Variables Failed') - exit(-1) - def run(self): - install.run(self) - self.makeInstall() - self.writeEnvironmentVariables('node') - self.writeEnvironmentVariables('yarn') + super().run() + subprocess.run(['make', 'pip-install'], check=True) setup( name = 'NNI', - version = '0.1.0', + version = '0.2.0', author = 'Microsoft NNI Team', author_email = 'nni@microsoft.com', description = 'Neural Network Intelligence project', @@ -69,8 +47,10 @@ def run(self): package_dir = { 'nni_annotation': 'tools/nni_annotation', 'nni': 'src/sdk/pynni/nni', - 'nnicmd': 'tools/nnicmd' + 'nnicmd': 'tools/nnicmd', + 'trial_tool':'tools/trial_tool' }, + package_data = {'nni': ['**/requirements.txt']}, python_requires = '>=3.5', install_requires = [ 'astor', @@ -81,16 +61,11 @@ def run(self): 'pyyaml', 'requests', 'scipy', - 'schema' - ], - dependency_links = [ - 'git+https://github.com/hyperopt/hyperopt.git' + 'schema', + 'pyhdfs' ], cmdclass={ 'install': CustomInstallCommand - }, - entry_points={ - 'console_scripts': ['nnictl = nnicmd.nnictl:parse_args'] } ) diff --git a/src/nni_manager/common/log.ts b/src/nni_manager/common/log.ts index da1c4e5c0b..0b3945746b 100644 --- a/src/nni_manager/common/log.ts +++ b/src/nni_manager/common/log.ts @@ -40,7 +40,7 @@ class BufferSerialEmitter { private writable: Writable; constructor(writable: Writable) { - this.buffer = new Buffer(0); + this.buffer = Buffer.alloc(0); this.emitting = false; this.writable = writable; } @@ -61,7 +61,7 @@ class BufferSerialEmitter { this.emit(); } }); - this.buffer = new Buffer(0); + this.buffer = Buffer.alloc(0); } } diff --git a/src/nni_manager/common/manager.ts b/src/nni_manager/common/manager.ts index 10fb9a4227..fd6b2561e3 100644 --- a/src/nni_manager/common/manager.ts +++ b/src/nni_manager/common/manager.ts @@ -22,7 +22,7 @@ import { MetricDataRecord, MetricType, TrialJobInfo } from './datastore'; import { TrialJobStatus } from './trainingService'; -type ProfileUpdateType = 'TRIAL_CONCURRENCY' | 'MAX_EXEC_DURATION' | 'SEARCH_SPACE'; +type ProfileUpdateType = 'TRIAL_CONCURRENCY' | 'MAX_EXEC_DURATION' | 'SEARCH_SPACE' | 'MAX_TRIAL_NUM'; interface ExperimentParams { authorName: string; @@ -70,7 +70,7 @@ interface TrialJobStatistics { } interface NNIManagerStatus { - status: 'INITIALIZED' | 'EXPERIMENT_RUNNING' | 'ERROR' | 'STOPPING' | 'STOPPED'; + status: 'INITIALIZED' | 'EXPERIMENT_RUNNING' | 'ERROR' | 'STOPPING' | 'STOPPED' | 'SUSPENDED'; errors: string[]; } diff --git a/src/nni_manager/common/utils.ts b/src/nni_manager/common/utils.ts index ba0650ef28..e83e40e919 100644 --- a/src/nni_manager/common/utils.ts +++ b/src/nni_manager/common/utils.ts @@ -161,10 +161,6 @@ function parseArg(names: string[]): string { function getMsgDispatcherCommand(tuner: any, assessor: any): string { let command: string = `python3 -m nni --tuner_class_name ${tuner.className}`; - if (process.env.VIRTUAL_ENV) { - command = path.join(process.env.VIRTUAL_ENV, 'bin/') +command; - } - if (tuner.classArgs !== undefined) { command += ` --tuner_args ${JSON.stringify(JSON.stringify(tuner.classArgs))}`; } @@ -229,5 +225,19 @@ function cleanupUnitTest(): void { Container.restore(ExperimentStartupInfo); } -export { getMsgDispatcherCommand, getLogDir, getExperimentRootDir, getDefaultDatabaseDir, mkDirP, delay, prepareUnitTest, - parseArg, cleanupUnitTest, uniqueString, randomSelect }; +/** + * Get IPv4 address of current machine + */ +function getIPV4Address(): string { + let ipv4Address : string = ''; + + for(const item of os.networkInterfaces().eth0) { + if(item.family === 'IPv4') { + ipv4Address = item.address; + } + } + return ipv4Address; +} + +export { getMsgDispatcherCommand, getLogDir, getExperimentRootDir, getDefaultDatabaseDir, getIPV4Address, + mkDirP, delay, prepareUnitTest, parseArg, cleanupUnitTest, uniqueString, randomSelect }; diff --git a/src/nni_manager/core/nniDataStore.ts b/src/nni_manager/core/nniDataStore.ts index 47c2f01dc3..790a5680c0 100644 --- a/src/nni_manager/core/nniDataStore.ts +++ b/src/nni_manager/core/nniDataStore.ts @@ -160,12 +160,11 @@ class NNIDataStore implements DataStore { private async getFinalMetricData(trialJobId: string): Promise { const metrics: MetricDataRecord[] = await this.getMetricData(trialJobId, 'FINAL'); - assert(metrics.length <= 1); - if (metrics.length === 1) { - return metrics[0]; - } else { - return undefined; + if (metrics.length > 1) { + this.log.error(`Found multiple final results for trial job: ${trialJobId}`); } + + return metrics[0]; } private getJobStatusByLatestEvent(event: TrialJobEvent): TrialJobStatus { diff --git a/src/nni_manager/core/nnimanager.ts b/src/nni_manager/core/nnimanager.ts index 48d9fa3c83..7061efd29f 100644 --- a/src/nni_manager/core/nnimanager.ts +++ b/src/nni_manager/core/nnimanager.ts @@ -40,7 +40,6 @@ import { REQUEST_TRIAL_JOBS, TERMINATE, TRIAL_END, UPDATE_SEARCH_SPACE } from './commands'; import { createDispatcherInterface, IpcInterface } from './ipcInterface'; -import { TrialJobMaintainerEvent, TrialJobs } from './trialJobs'; /** * NNIManager @@ -48,23 +47,28 @@ import { TrialJobMaintainerEvent, TrialJobs } from './trialJobs'; class NNIManager implements Manager { private trainingService: TrainingService; private dispatcher: IpcInterface | undefined; - private trialJobsMaintainer: TrialJobs | undefined; private currSubmittedTrialNum: number; // need to be recovered - private trialConcurrencyReduction: number; + private trialConcurrencyChange: number; // >0: increase, <0: decrease private customizedTrials: string[]; // need to be recovered private log: Logger; private dataStore: DataStore; private experimentProfile: ExperimentProfile; private dispatcherPid: number; private status: NNIManagerStatus; + private waitingTrials: string[]; + private trialJobs: Map; + private suspendDuration: number; constructor() { this.currSubmittedTrialNum = 0; - this.trialConcurrencyReduction = 0; + this.trialConcurrencyChange = 0; this.customizedTrials = []; this.trainingService = component.get(TrainingService); assert(this.trainingService); this.dispatcherPid = 0; + this.waitingTrials = []; + this.trialJobs = new Map(); + this.suspendDuration = 0; this.log = getLogger(); this.dataStore = component.get(DataStore); @@ -86,6 +90,9 @@ class NNIManager implements Manager { case 'SEARCH_SPACE': this.updateSearchSpace(experimentProfile.params.searchSpace); break; + case 'MAX_TRIAL_NUM': + this.updateMaxTrialNum(experimentProfile.params.maxTrialNum); + break; default: throw new Error('Error: unrecognized updateType'); } @@ -195,13 +202,8 @@ class NNIManager implements Manager { public stopExperiment(): Promise { this.status.status = 'STOPPING'; - if (this.trialJobsMaintainer !== undefined) { - this.trialJobsMaintainer.setStopLoop(); - return Promise.resolve(); - } else { - return Promise.reject(new Error('Error: undefined trialJobsMaintainer')); - } + return Promise.resolve(); } public async getMetricData(trialJobId: string, metricType: MetricType): Promise { @@ -236,14 +238,16 @@ class NNIManager implements Manager { newCwd = cwd; } // TO DO: add CUDA_VISIBLE_DEVICES + let nniEnv = { + NNI_MODE: mode, + NNI_CHECKPOINT_DIRECTORY: dataDirectory, + NNI_LOG_DIRECTORY: getLogDir() + }; + let newEnv = Object.assign({}, process.env, nniEnv); const tunerProc: ChildProcess = spawn(command, [], { stdio, cwd: newCwd, - env: { - NNI_MODE: mode, - NNI_CHECKPOINT_DIRECTORY: dataDirectory, - NNI_LOG_DIRECTORY: getLogDir() - }, + env: newEnv, shell: true }); this.dispatcherPid = tunerProc.pid; @@ -253,28 +257,14 @@ class NNIManager implements Manager { } private updateTrialConcurrency(trialConcurrency: number): void { - // TO DO: this method can only be called after startExperiment/resumeExperiment - if (trialConcurrency > this.experimentProfile.params.trialConcurrency) { - if (this.dispatcher === undefined) { - throw new Error('Error: tuner has to be initialized'); - } - this.dispatcher.sendCommand( - REQUEST_TRIAL_JOBS, - String(trialConcurrency - this.experimentProfile.params.trialConcurrency) - ); - } else { - // we assume trialConcurrency >= 0, which is checked by restserver - this.trialConcurrencyReduction += (this.experimentProfile.params.trialConcurrency - trialConcurrency); - } + // we assume trialConcurrency >= 0, which is checked by restserver + this.trialConcurrencyChange += (trialConcurrency - this.experimentProfile.params.trialConcurrency); this.experimentProfile.params.trialConcurrency = trialConcurrency; return; } private updateMaxExecDuration(duration: number): void { - if (this.trialJobsMaintainer !== undefined) { - this.trialJobsMaintainer.updateMaxExecDuration(duration); - } this.experimentProfile.params.maxExecDuration = duration; return; @@ -290,6 +280,12 @@ class NNIManager implements Manager { return; } + private updateMaxTrialNum(maxTrialNum: number): void { + this.experimentProfile.params.maxTrialNum = maxTrialNum; + + return; + } + private async experimentDoneCleanUp(): Promise { if (this.dispatcher === undefined) { throw new Error('Error: tuner has not been setup'); @@ -332,11 +328,117 @@ class NNIManager implements Manager { const execDuration: number = this.experimentProfile.execDuration; for (; ;) { await delay(1000 * 60 * 10); // 10 minutes - this.experimentProfile.execDuration = execDuration + (Date.now() - startTime) / 1000; + this.experimentProfile.execDuration = execDuration + (Date.now() - startTime) / 1000 - this.suspendDuration; await this.storeExperimentProfile(); } } + private async requestTrialJobsStatus(): Promise { + const deferred: Deferred = new Deferred(); + let finishedTrialJobNum: number = 0; + for (const trialJobId of Array.from(this.trialJobs.keys())) { + const trialJobDetail: TrialJobDetail = await this.trainingService.getTrialJob(trialJobId); + const oldTrialJobDetail: TrialJobDetail | undefined = this.trialJobs.get(trialJobId); + assert(oldTrialJobDetail); + if (oldTrialJobDetail !== undefined && oldTrialJobDetail.status !== trialJobDetail.status) { + this.trialJobs.set(trialJobId, Object.assign({}, trialJobDetail)); + await this.dataStore.storeTrialJobEvent(trialJobDetail.status, trialJobDetail.id, undefined, trialJobDetail.url); + } + switch (trialJobDetail.status) { + case 'SUCCEEDED': + case 'USER_CANCELED': + this.trialJobs.delete(trialJobId); + finishedTrialJobNum++; + break; + case 'FAILED': + case 'SYS_CANCELED': + // In the current version, we do not retry + // TO DO: push this job to queue for retry + this.trialJobs.delete(trialJobId); + finishedTrialJobNum++; + break; + case 'WAITING': + case 'RUNNING': + case 'UNKNOWN': + // Do nothing + break; + default: + // TO DO: add warning in log + } + } + deferred.resolve(finishedTrialJobNum); + + return deferred.promise; + } + + private async manageTrials(): Promise { + if (this.dispatcher === undefined) { + throw new Error('Error: tuner has not been setup'); + } + const startTime: number = Date.now(); + let suspendStartTime: number = 0; + for (; ;) { + if (this.status.status === 'STOPPING') { + break; + } + const finishedTrialJobNum: number = await this.requestTrialJobsStatus(); + const requestTrialNum: number = this.trialConcurrencyChange + finishedTrialJobNum; + if (requestTrialNum >= 0) { + this.trialConcurrencyChange = 0; + } else { + this.trialConcurrencyChange = requestTrialNum; + } + for (let i: number = 0; i < requestTrialNum; i++) { + // ask tuner for more trials + if (this.customizedTrials.length > 0) { + const hyperParams: string | undefined = this.customizedTrials.shift(); + this.dispatcher.sendCommand(ADD_CUSTOMIZED_TRIAL_JOB, hyperParams); + } else { + this.dispatcher.sendCommand(REQUEST_TRIAL_JOBS, '1'); + } + } + // check maxtrialnum and maxduration here + if ((Date.now() - startTime) / 1000 + this.experimentProfile.execDuration - this.suspendDuration + > this.experimentProfile.params.maxExecDuration || + this.currSubmittedTrialNum >= this.experimentProfile.params.maxTrialNum) { + assert(this.status.status === 'EXPERIMENT_RUNNING' || this.status.status === 'SUSPENDED'); + if (this.status.status === 'EXPERIMENT_RUNNING') { + suspendStartTime = Date.now(); + } + this.status.status = 'SUSPENDED'; + } else { + if (this.status.status === 'SUSPENDED') { + assert(suspendStartTime !== 0); + this.suspendDuration += (Date.now() - suspendStartTime) / 1000; + } + this.status.status = 'EXPERIMENT_RUNNING'; + for (let i: number = this.trialJobs.size; i < this.experimentProfile.params.trialConcurrency; i++) { + if (this.waitingTrials.length === 0 || + this.currSubmittedTrialNum >= this.experimentProfile.params.maxTrialNum) { + break; + } + const hyperParams: string | undefined = this.waitingTrials.shift(); + if (hyperParams === undefined) { + throw new Error(`Error: invalid hyper-parameters for job submission: ${hyperParams}`); + } + this.currSubmittedTrialNum++; + const trialJobAppForm: TrialJobApplicationForm = { + jobType: 'TRIAL', + hyperParameters: hyperParams + }; + const trialJobDetail: TrialJobDetail = await this.trainingService.submitTrialJob(trialJobAppForm); + this.trialJobs.set(trialJobDetail.id, Object.assign({}, trialJobDetail)); + assert(trialJobDetail.status === 'WAITING'); + await this.dataStore.storeTrialJobEvent(trialJobDetail.status, trialJobDetail.id, hyperParams, trialJobDetail.url); + } + } + await delay(1000 * 5); // 5 seconds + } + this.log.info('Experiment done, cleaning up...'); + await this.experimentDoneCleanUp(); + this.log.info('Experiment done.'); + } + private storeExperimentProfile(): Promise { this.experimentProfile.revision += 1; @@ -344,12 +446,7 @@ class NNIManager implements Manager { } private async run(): Promise { - this.trialJobsMaintainer = new TrialJobs( - this.trainingService, - this.experimentProfile.execDuration, - this.experimentProfile.params.maxExecDuration); - - assert(this.dispatcher !== undefined && this.trialJobsMaintainer !== undefined); + assert(this.dispatcher !== undefined); this.addEventListeners(); @@ -358,12 +455,12 @@ class NNIManager implements Manager { await Promise.all([ this.periodicallyUpdateExecDuration(), this.trainingService.run(), - this.trialJobsMaintainer.run()]); + this.manageTrials()]); } - private addEventListeners(): void { + private addEventListeners(): void { // TO DO: cannot run this method more than once in one NNIManager instance - if (this.dispatcher === undefined || this.trialJobsMaintainer === undefined) { + if (this.dispatcher === undefined) { throw new Error('Error: tuner or job maintainer have not been setup'); } this.trainingService.addTrialJobMetricListener((metric: TrialJobMetric) => { @@ -372,12 +469,6 @@ class NNIManager implements Manager { }); }); - this.trialJobsMaintainer.on(async (event: TrialJobMaintainerEvent, trialJobDetail: TrialJobDetail) => { - this.onTrialJobEvent(event, trialJobDetail).catch((err: Error) => { - this.criticalError(err); - }); - }); - this.dispatcher.onCommand((commandType: string, content: string) => { this.onTunerCommand(commandType, content).catch((err: Error) => { this.criticalError(err); @@ -392,9 +483,6 @@ class NNIManager implements Manager { // TO DO: we should send INITIALIZE command to tuner if user's tuner needs to run init method in tuner this.log.debug(`Send tuner command: update search space: ${this.experimentProfile.params.searchSpace}`); this.dispatcher.sendCommand(UPDATE_SEARCH_SPACE, this.experimentProfile.params.searchSpace); - if (this.trialConcurrencyReduction !== 0) { - throw new Error('Error: cannot modify trialConcurrency before startExperiment'); - } this.log.debug(`Send tuner command: ${this.experimentProfile.params.trialConcurrency}`); this.dispatcher.sendCommand(REQUEST_TRIAL_JOBS, String(this.experimentProfile.params.trialConcurrency)); } @@ -407,73 +495,15 @@ class NNIManager implements Manager { this.dispatcher.sendCommand(REPORT_METRIC_DATA, metric.data); } - private async onTrialJobEvent(event: TrialJobMaintainerEvent, trialJobDetail: TrialJobDetail): Promise { - if (trialJobDetail !== undefined) { - this.log.debug(`Job event: ${event}, id: ${trialJobDetail.id}`); - } else { - this.log.debug(`Job event: ${event}`); - } - if (this.dispatcher === undefined) { - throw new Error('Error: tuner has not been setup'); - } - switch (event) { - case 'SUCCEEDED': - case 'FAILED': - case 'USER_CANCELED': - case 'SYS_CANCELED': - if (this.trialConcurrencyReduction > 0) { - this.trialConcurrencyReduction--; - } else { - if (this.currSubmittedTrialNum < this.experimentProfile.params.maxTrialNum) { - if (this.customizedTrials.length > 0) { - const hyperParams: string | undefined = this.customizedTrials.shift(); - this.dispatcher.sendCommand(ADD_CUSTOMIZED_TRIAL_JOB, hyperParams); - } else { - this.dispatcher.sendCommand(REQUEST_TRIAL_JOBS, '1'); - } - } - } - this.dispatcher.sendCommand(TRIAL_END, JSON.stringify({trial_job_id: trialJobDetail.id, event: event})); - await this.dataStore.storeTrialJobEvent(event, trialJobDetail.id, undefined, trialJobDetail.url); - break; - case 'RUNNING': - await this.dataStore.storeTrialJobEvent(event, trialJobDetail.id, undefined, trialJobDetail.url); - break; - case 'EXPERIMENT_DONE': - this.log.info('Experiment done, cleaning up...'); - await this.experimentDoneCleanUp(); - this.log.info('Experiment done.'); - break; - default: - throw new Error('Error: unrecognized event from trialJobsMaintainer'); - } - } - private async onTunerCommand(commandType: string, content: string): Promise { this.log.info(`Command from tuner: ${commandType}, ${content}`); - if (this.trialJobsMaintainer === undefined) { - throw new Error('Error: trialJobsMaintainer not initialized'); - } switch (commandType) { case NEW_TRIAL_JOB: - if (this.currSubmittedTrialNum < this.experimentProfile.params.maxTrialNum) { - this.currSubmittedTrialNum++; - const trialJobAppForm: TrialJobApplicationForm = { - jobType: 'TRIAL', - hyperParameters: content - }; - const trialJobDetail: TrialJobDetail = await this.trainingService.submitTrialJob(trialJobAppForm); - this.trialJobsMaintainer.setTrialJob(trialJobDetail.id, Object.assign({}, trialJobDetail)); - // TO DO: to uncomment - assert(trialJobDetail.status === 'WAITING'); - await this.dataStore.storeTrialJobEvent(trialJobDetail.status, trialJobDetail.id, content, trialJobDetail.url); - if (this.currSubmittedTrialNum === this.experimentProfile.params.maxTrialNum) { - this.trialJobsMaintainer.setNoMoreTrials(); - } - } + this.waitingTrials.push(content); break; case NO_MORE_TRIAL_JOBS: - this.trialJobsMaintainer.setNoMoreTrials(); + //this.trialJobsMaintainer.setNoMoreTrials(); + // ignore this event for now break; case KILL_TRIAL_JOB: await this.trainingService.cancelTrialJob(JSON.parse(content)); diff --git a/src/nni_manager/core/trialJobs.ts b/src/nni_manager/core/trialJobs.ts deleted file mode 100644 index 0d36855563..0000000000 --- a/src/nni_manager/core/trialJobs.ts +++ /dev/null @@ -1,131 +0,0 @@ -/** - * Copyright (c) Microsoft Corporation - * All rights reserved. - * - * MIT License - * - * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated - * documentation files (the "Software"), to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and - * to permit persons to whom the Software is furnished to do so, subject to the following conditions: - * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING - * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, - * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -'use strict'; - -import * as assert from 'assert'; -import { EventEmitter } from 'events'; -import { TrainingService, TrialJobDetail, TrialJobStatus } from '../common/trainingService'; -import { delay } from '../common/utils'; - -type TrialJobMaintainerEvent = TrialJobStatus | 'EXPERIMENT_DONE'; - -/** - * TrialJobs - */ -class TrialJobs { - private eventEmitter: EventEmitter; - private trialJobs: Map; - private noMoreTrials: boolean; - private stopLoop: boolean; - private trainingService: TrainingService; - private pastExecDuration: number; // second - private maxExecDuration: number; // second - - constructor( - trainingService: TrainingService, - pastExecDuration: number, // second - maxExecDuration: number // second - ) { - this.eventEmitter = new EventEmitter(); - this.trialJobs = new Map(); - this.noMoreTrials = false; - this.stopLoop = false; - this.trainingService = trainingService; - this.pastExecDuration = pastExecDuration; - this.maxExecDuration = maxExecDuration; - } - - public setTrialJob(key: string, value: TrialJobDetail): void { - this.trialJobs.set(key, value); - } - - public getTrialJob(key: string): TrialJobDetail | undefined { - return this.trialJobs.get(key); - } - - public setNoMoreTrials(): void { - this.noMoreTrials = true; - } - - public setStopLoop(): void { - this.stopLoop = true; - } - - public updateMaxExecDuration(duration: number): void { - this.maxExecDuration = duration; - } - - public on(listener: (event: TrialJobMaintainerEvent, trialJobDetail: TrialJobDetail) => void): void { - this.eventEmitter.addListener('all', listener); - } - - public async requestTrialJobsStatus(): Promise { - for (const trialJobId of Array.from(this.trialJobs.keys())) { - const trialJobDetail: TrialJobDetail = await this.trainingService.getTrialJob(trialJobId); - switch (trialJobDetail.status) { - case 'SUCCEEDED': - case 'USER_CANCELED': - this.eventEmitter.emit('all', trialJobDetail.status, trialJobDetail); - this.trialJobs.delete(trialJobId); - break; - case 'FAILED': - case 'SYS_CANCELED': - // In the current version, we do not retry - // TO DO: push this job to queue for retry - this.eventEmitter.emit('all', trialJobDetail.status, trialJobDetail); - this.trialJobs.delete(trialJobId); - break; - case 'WAITING': - // Do nothing - break; - case 'RUNNING': - const oldTrialJobDetail: TrialJobDetail | undefined = this.trialJobs.get(trialJobId); - assert(oldTrialJobDetail); - if (oldTrialJobDetail !== undefined && oldTrialJobDetail.status === "WAITING") { - this.trialJobs.set(trialJobId, trialJobDetail); - this.eventEmitter.emit('all', trialJobDetail.status, trialJobDetail); - } - break; - case 'UNKNOWN': - // Do nothing - break; - default: - // TO DO: add warning in log - } - } - - return Promise.resolve(); - } - - public async run(): Promise { - const startTime: number = Date.now(); - while ((Date.now() - startTime) / 1000 + this.pastExecDuration < this.maxExecDuration) { - if (this.stopLoop || - (this.noMoreTrials && this.trialJobs.size === 0)) { - break; - } - await this.requestTrialJobsStatus(); - await delay(5000); - } - this.eventEmitter.emit('all', 'EXPERIMENT_DONE'); - } -} - -export { TrialJobs, TrialJobMaintainerEvent }; diff --git a/src/nni_manager/main.ts b/src/nni_manager/main.ts index 6d9c9fa64b..f3d386eccd 100644 --- a/src/nni_manager/main.ts +++ b/src/nni_manager/main.ts @@ -36,6 +36,7 @@ import { LocalTrainingServiceForGPU } from './training_service/local/localTraini import { RemoteMachineTrainingService } from './training_service/remote_machine/remoteMachineTrainingService'; +import { PAITrainingService } from './training_service/pai/paiTrainingService' function initStartupInfo(startExpMode: string, resumeExperimentId: string) { @@ -49,6 +50,8 @@ async function initContainer(platformMode: string): Promise { Container.bind(TrainingService).to(LocalTrainingServiceForGPU).scope(Scope.Singleton); } else if (platformMode === 'remote') { Container.bind(TrainingService).to(RemoteMachineTrainingService).scope(Scope.Singleton); + } else if (platformMode === 'pai'){ + Container.bind(TrainingService).to(PAITrainingService).scope(Scope.Singleton); } else { throw new Error(`Error: unsupported mode: ${mode}`); } @@ -61,7 +64,7 @@ async function initContainer(platformMode: string): Promise { } function usage(): void { - console.info('usage: node main.js --port --mode --start_mode --experiment_id '); + console.info('usage: node main.js --port --mode --start_mode --experiment_id '); } let port: number = NNIRestServer.DEFAULT_PORT; @@ -71,7 +74,7 @@ if (strPort && strPort.length > 0) { } const mode: string = parseArg(['--mode', '-m']); -if (!['local', 'remote'].includes(mode)) { +if (!['local', 'remote', 'pai'].includes(mode)) { usage(); process.exit(1); } diff --git a/src/nni_manager/package.json b/src/nni_manager/package.json index 46522044fd..04ee4df3c2 100644 --- a/src/nni_manager/package.json +++ b/src/nni_manager/package.json @@ -23,7 +23,8 @@ "tree-kill": "^1.2.0", "ts-deferred": "^1.0.4", "typescript-ioc": "^1.2.4", - "typescript-string-operations": "^1.3.1" + "typescript-string-operations": "^1.3.1", + "webhdfs":"^1.2.0" }, "devDependencies": { "@types/chai": "^4.1.4", @@ -40,6 +41,7 @@ "chai": "^4.1.2", "mocha": "^5.2.0", "request": "^2.87.0", + "rmdir": "^1.2.0", "tmp": "^0.0.33", "ts-node": "^7.0.0", "tslint": "^5.11.0", diff --git a/src/nni_manager/rest_server/restValidationSchemas.ts b/src/nni_manager/rest_server/restValidationSchemas.ts index 218a8c22c4..b981463434 100644 --- a/src/nni_manager/rest_server/restValidationSchemas.ts +++ b/src/nni_manager/rest_server/restValidationSchemas.ts @@ -33,9 +33,19 @@ export namespace ValidationSchemas { passphrase: joi.string() })), trial_config: joi.object({ - gpuNum: joi.number().min(0).required(), + image: joi.string().min(1), codeDir: joi.string().min(1).required(), - command: joi.string().min(1).required() + dataDir: joi.string(), + outputDir: joi.string(), + cpuNum: joi.number().min(1), + memoryMB: joi.number().min(100), + gpuNum: joi.number().min(0).required(), + command: joi.string().min(1).required() + }), + pai_config: joi.object({ + userName: joi.string().min(1).required(), + passWord: joi.string().min(1).required(), + host: joi.string().min(1).required() }) } }; @@ -48,7 +58,7 @@ export namespace ValidationSchemas { searchSpace: joi.string().required(), maxExecDuration: joi.number().min(0).required(), tuner: joi.object({ - builtinTunerName: joi.string().valid('TPE', 'Random', 'Anneal', 'Evolution'), + builtinTunerName: joi.string().valid('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC'), codeDir: joi.string(), classFileName: joi.string(), className: joi.string(), @@ -73,7 +83,7 @@ export namespace ValidationSchemas { }; export const UPDATEEXPERIMENT = { query: { - update_type: joi.string().required().valid('TRIAL_CONCURRENCY', 'MAX_EXEC_DURATION', 'SEARCH_SPACE') + update_type: joi.string().required().valid('TRIAL_CONCURRENCY', 'MAX_EXEC_DURATION', 'SEARCH_SPACE', 'MAX_TRIAL_NUM') }, body: { id: joi.string().required(), diff --git a/src/nni_manager/training_service/common/jobMetrics.ts b/src/nni_manager/training_service/common/jobMetrics.ts new file mode 100644 index 0000000000..a1abe64574 --- /dev/null +++ b/src/nni_manager/training_service/common/jobMetrics.ts @@ -0,0 +1,37 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +import { TrialJobStatus } from '../../common/trainingService'; + +// tslint:disable-next-line:max-classes-per-file +export class JobMetrics { + public readonly jobId: string; + public readonly metrics: string[]; + public readonly jobStatus: TrialJobStatus; + public readonly endTimestamp: number; + + constructor(jobId : string, metrics : string[], jobStatus : TrialJobStatus, endTimestamp : number) { + this.jobId = jobId; + this.metrics = metrics; + this.jobStatus = jobStatus; + this.endTimestamp = endTimestamp; + } +} diff --git a/src/nni_manager/training_service/common/trialConfigMetadataKey.ts b/src/nni_manager/training_service/common/trialConfigMetadataKey.ts index e9749e562e..12df449ee1 100644 --- a/src/nni_manager/training_service/common/trialConfigMetadataKey.ts +++ b/src/nni_manager/training_service/common/trialConfigMetadataKey.ts @@ -26,5 +26,6 @@ export enum TrialConfigMetadataKey { MACHINE_LIST = 'machine_list', TRIAL_CONFIG = 'trial_config', EXPERIMENT_ID = 'experimentId', - RANDOM_SCHEDULER = 'random_scheduler' + RANDOM_SCHEDULER = 'random_scheduler', + PAI_CLUSTER_CONFIG = 'pai_config' } diff --git a/src/nni_manager/training_service/pai/hdfsClientUtility.ts b/src/nni_manager/training_service/pai/hdfsClientUtility.ts new file mode 100644 index 0000000000..69fc383e6d --- /dev/null +++ b/src/nni_manager/training_service/pai/hdfsClientUtility.ts @@ -0,0 +1,203 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +import * as path from 'path'; +import * as fs from 'fs'; +import { Deferred } from 'ts-deferred'; +import { getLogger } from '../../common/log'; + +/** + * HDFS client utility, including copy file/directory + */ +export namespace HDFSClientUtility { + /** + * Copy a local file to hdfs directory + * + * @param localFilePath local file path(source) + * @param hdfsFilePath hdfs file path(target) + * @param hdfsClient hdfs client + */ + export async function copyFileToHdfs(localFilePath : string, hdfsFilePath : string, hdfsClient : any) : Promise { + const deferred: Deferred = new Deferred(); + fs.exists(localFilePath, (exists : boolean) => { + // Detect if local file exist + if (exists) { + var localFileStream = fs.createReadStream(localFilePath); + var hdfsFileStream = hdfsClient.createWriteStream(hdfsFilePath); + localFileStream.pipe(hdfsFileStream); + hdfsFileStream.on('finish', function onFinish () { + deferred.resolve(); + }); + hdfsFileStream.on('error', (err : any) => { + getLogger().error(`HDFSCientUtility:copyFileToHdfs, copy file failed, err is ${err.message}`); + deferred.reject(err); + }); + } else { + getLogger().error(`HDFSCientUtility:copyFileToHdfs, ${localFilePath} doesn't exist locally`); + deferred.reject('file not exist!'); + } + }); + return deferred.promise; + } + + /** + * Recursively copy local directory to hdfs directory + * + * @param localDirectory local directory + * @param hdfsDirectory HDFS directory + * @param hdfsClient HDFS client + */ + export async function copyDirectoryToHdfs(localDirectory : string, hdfsDirectory : string, hdfsClient : any) : Promise{ + const deferred: Deferred = new Deferred(); + // TODO: fs.readdirSync doesn't support ~($HOME) + const fileNameArray: string[] = fs.readdirSync(localDirectory); + + for(var fileName of fileNameArray){ + const fullFilePath: string = path.join(localDirectory, fileName); + try { + if (fs.lstatSync(fullFilePath).isFile()) { + await copyFileToHdfs(fullFilePath, path.join(hdfsDirectory, fileName), hdfsClient); + } else { + // If filePath is a directory, recuisively copy it to remote directory + await copyDirectoryToHdfs(fullFilePath, path.join(hdfsDirectory, fileName), hdfsClient); + } + } catch(error) { + deferred.reject(error); + } + } + // All files/directories are copied successfully, resolve + deferred.resolve(); + + return deferred.promise; + } + + /** + * Read content from HDFS file + * + * @param hdfsPath HDFS file path + * @param hdfsClient HDFS client + */ + export async function readFileFromHDFS(hdfsPath : string, hdfsClient :any) : Promise { + const deferred: Deferred = new Deferred(); + let buffer : Buffer = Buffer.alloc(0); + + const exist : boolean = await pathExists(hdfsPath, hdfsClient); + if(!exist) { + deferred.reject(`${hdfsPath} doesn't exists`); + } + + const remoteFileStream = hdfsClient.createReadStream(hdfsPath); + remoteFileStream.on('error', (err : any) => { + // Reject with the error + deferred.reject(err); + }); + + remoteFileStream.on('data', (chunk : any) => { + // Concat the data chunk to buffer + buffer = Buffer.concat([buffer, chunk]); + }); + + remoteFileStream.on('finish', function onFinish () { + // Upload is done, resolve + deferred.resolve(buffer); + }); + + return deferred.promise; + } + + /** + * Check if an HDFS path already exists + * + * @param hdfsPath target path need to check in HDFS + * @param hdfsClient HDFS client + */ + export async function pathExists(hdfsPath : string, hdfsClient : any) : Promise { + const deferred : Deferred = new Deferred(); + hdfsClient.exists(hdfsPath, (exist : boolean ) => { + deferred.resolve(exist); + }); + + // Set timeout and reject the promise once reach timeout (5 seconds) + setTimeout(() => deferred.reject(`Check HDFS path ${hdfsPath} exists timeout`), 5000); + + return deferred.promise; + } + + /** + * Mkdir in HDFS, use default permission 755 + * + * @param hdfsPath the path in HDFS. It could be either file or directory + * @param hdfsClient + */ + export function mkdir(hdfsPath : string, hdfsClient : any) : Promise { + const deferred : Deferred = new Deferred(); + + hdfsClient.mkdir(hdfsPath, (err : any)=> { + if(!err) { + deferred.resolve(true); + } else { + deferred.reject(err.message); + } + }); + + return deferred.promise; + } + + /** + * Read directory contents + * + * @param hdfsPath the path in HDFS. It could be either file or directory + * @param hdfsClient + */ + export async function readdir(hdfsPath : string, hdfsClient : any) : Promise { + const deferred : Deferred = new Deferred(); + const exist : boolean = await pathExists(hdfsPath, hdfsClient); + if(!exist) { + deferred.reject(`${hdfsPath} doesn't exists`); + } + + hdfsClient.readdir(hdfsPath, (err : any, files : any[] ) => { + if(err) { + deferred.reject(err); + } + + deferred.resolve(files); + }); + + return deferred.promise; + } + + /** + * Delete HDFS path + * @param hdfsPath the path in HDFS. It could be either file or directory + * @param hdfsClient + * @param recursive Mark if need to delete recursively + */ + export function deletePath(hdfsPath : string, hdfsClient : any, recursive : boolean = true) : Promise { + const deferred : Deferred = new Deferred(); + hdfsClient.unlink(hdfsPath, recursive, (err : any)=> { + if(!err) { + deferred.resolve(true); + } else { + deferred.reject(err.message); + } + }); + return deferred.promise; + } +} diff --git a/src/nni_manager/training_service/pai/paiConfig.ts b/src/nni_manager/training_service/pai/paiConfig.ts new file mode 100644 index 0000000000..aa84021ec4 --- /dev/null +++ b/src/nni_manager/training_service/pai/paiConfig.ts @@ -0,0 +1,123 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +import {TrialConfig} from '../common/trialConfig' + +export class PAITaskRole { + // Name for the task role + public readonly name: string; + // Number of tasks for the task role, no less than 1 + public readonly taskNumber: number; + // CPU number for one task in the task role, no less than 1 + public readonly cpuNumber: number; + // Memory for one task in the task role, no less than 100 + public readonly memoryMB: number; + // GPU number for one task in the task role, no less than 0 + public readonly gpuNumber: number; + // Executable command for tasks in the task role, can not be empty + public readonly command: string; + + /** + * Constructor + * @param name Name for the task role + * @param taskNumber Number of tasks for the task role, no less than 1 + * @param cpuNumber CPU number for one task in the task role, no less than 1 + * @param memoryMB Memory for one task in the task role, no less than 100 + * @param gpuNumber GPU number for one task in the task role, no less than 0 + * @param command Executable command for tasks in the task role, can not be empty + */ + constructor(name : string, taskNumber : number, cpuNumber : number, memoryMB : number, gpuNumber : number, command : string) { + this.name = name; + this.taskNumber = taskNumber; + this.cpuNumber = cpuNumber; + this.memoryMB = memoryMB; + this.gpuNumber = gpuNumber; + this.command = command; + } +} + +export class PAIJobConfig{ + // Name for the job, need to be unique + public readonly jobName: string; + // URL pointing to the Docker image for all tasks in the job + public readonly image: string; + // Data directory existing on HDFS + public readonly dataDir: string; + // Output directory on HDFS + public readonly outputDir: string; + // Code directory on HDFS + public readonly codeDir: string; + + // List of taskRole, one task role at least + public taskRoles: PAITaskRole[]; + + /** + * Constructor + * @param jobName Name for the job, need to be unique + * @param image URL pointing to the Docker image for all tasks in the job + * @param dataDir Data directory existing on HDFS + * @param outputDir Output directory on HDFS + * @param taskRoles List of taskRole, one task role at least + */ + constructor(jobName: string, image : string, dataDir : string, outputDir : string, codeDir : string, taskRoles : PAITaskRole[]){ + this.jobName = jobName; + this.image = image; + this.dataDir = dataDir; + this.outputDir = outputDir; + this.codeDir = codeDir; + this.taskRoles = taskRoles; + } +} + +export class PAIClusterConfig { + public readonly userName: string; + public readonly passWord: string; + public readonly host: string; + + /** + * Constructor + * @param userName User name of PAI Cluster + * @param passWord password of PAI Cluster + * @param host Host IP of PAI Cluster + */ + constructor(userName: string, passWord : string, host : string){ + this.userName = userName; + this.passWord = passWord; + this.host = host; + } +} + +export class NNIPAITrialConfig extends TrialConfig{ + public readonly cpuNum: number; + public readonly memoryMB: number; + public readonly image: string; + public readonly dataDir: string; + public outputDir: string; + + constructor(command : string, codeDir : string, gpuNum : number, cpuNum: number, memoryMB: number, image: string, dataDir: string, outputDir: string) { + super(command, codeDir, gpuNum); + this.cpuNum = cpuNum; + this.memoryMB = memoryMB; + this.image = image; + this.dataDir = dataDir; + this.outputDir = outputDir; + } +} \ No newline at end of file diff --git a/src/nni_manager/training_service/pai/paiData.ts b/src/nni_manager/training_service/pai/paiData.ts new file mode 100644 index 0000000000..e18b5904f6 --- /dev/null +++ b/src/nni_manager/training_service/pai/paiData.ts @@ -0,0 +1,70 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +import { JobApplicationForm, TrialJobDetail, TrialJobStatus } from 'common/trainingService'; + +export class PAITrialJobDetail implements TrialJobDetail { + public id: string; + public status: TrialJobStatus; + public paiJobName: string; + public submitTime: number; + public startTime?: number; + public endTime?: number; + public tags?: string[]; + public url?: string; + public workingDirectory: string; + public form: JobApplicationForm; + public hdfsLogPath: string; + + constructor(id: string, status: TrialJobStatus, paiJobName : string, + submitTime: number, workingDirectory: string, form: JobApplicationForm, hdfsLogPath: string) { + this.id = id; + this.status = status; + this.paiJobName = paiJobName; + this.submitTime = submitTime; + this.workingDirectory = workingDirectory; + this.form = form; + this.tags = []; + this.hdfsLogPath = hdfsLogPath; + } +} + +export const PAI_INSTALL_NNI_SHELL_FORMAT: string = +`#!/bin/bash +if python3 -c 'import nni' > /dev/null 2>&1; then + # nni module is already installed, skip + return +else + # Install nni + pip3 install -v --user git+https://github.com/Microsoft/nni.git@v0.2 +fi`; + +export const PAI_TRIAL_COMMAND_FORMAT: string = +`export NNI_PLATFORM=pai NNI_SYS_DIR={0} NNI_OUTPUT_DIR={1} NNI_TRIAL_JOB_ID={2} NNI_EXP_ID={3} +&& cd $NNI_SYS_DIR && sh install_nni.sh +&& python3 -m trial_tool.trial_keeper --trial_command '{4}' --nnimanager_ip '{5}' --pai_hdfs_output_dir '{6}' +--pai_hdfs_host '{7}' --pai_user_name {8}`; + +export const PAI_OUTPUT_DIR_FORMAT: string = +`hdfs://{0}:9000/`; + +export const PAI_LOG_PATH_FORMAT: string = +`http://{0}:50070/explorer.html#{1}` diff --git a/src/nni_manager/training_service/pai/paiJobInfoCollector.ts b/src/nni_manager/training_service/pai/paiJobInfoCollector.ts new file mode 100644 index 0000000000..041151c47d --- /dev/null +++ b/src/nni_manager/training_service/pai/paiJobInfoCollector.ts @@ -0,0 +1,136 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +import * as request from 'request'; +import { EventEmitter } from 'events'; +import { Deferred } from 'ts-deferred'; +import { getLogger, Logger } from '../../common/log'; +import { NNIError, NNIErrorNames } from '../../common/errors'; +import { PAITrialJobDetail } from './paiData'; +import { PAIClusterConfig } from './paiConfig'; +import { TrialJobStatus } from '../../common/trainingService'; + +/** + * Collector PAI jobs info from PAI cluster, and update pai job status locally + */ +export class PAIJobInfoCollector { + private readonly trialJobsMap : Map; + private readonly log: Logger = getLogger(); + private readonly statusesNeedToCheck : TrialJobStatus[]; + private readonly finalStatuses : TrialJobStatus[]; + + constructor(jobMap: Map) { + this.trialJobsMap = jobMap; + this.statusesNeedToCheck = ['RUNNING', 'UNKNOWN', 'WAITING']; + this.finalStatuses = ['SUCCEEDED', 'FAILED', 'USER_CANCELED', 'SYS_CANCELED']; + } + + public async updateTrialStatusFromPAI(paiToken? : string, paiClusterConfig?: PAIClusterConfig) : Promise { + if (!paiClusterConfig || !paiToken) { + return Promise.resolve(); + } + + const updatePaiTrialJobs : Promise[] = []; + for(let [trialJobId, paiTrialJob] of this.trialJobsMap) { + if (!paiTrialJob) { + throw new NNIError(NNIErrorNames.NOT_FOUND, `trial job id ${trialJobId} not found`); + } + updatePaiTrialJobs.push(this.getSinglePAITrialJobInfo(paiTrialJob, paiToken, paiClusterConfig)) + } + + await Promise.all(updatePaiTrialJobs); + } + + private getSinglePAITrialJobInfo(paiTrialJob : PAITrialJobDetail, paiToken : string, paiClusterConfig: PAIClusterConfig) : Promise { + const deferred : Deferred = new Deferred(); + if (!this.statusesNeedToCheck.includes(paiTrialJob.status)) { + deferred.resolve(); + return deferred.promise; + } + + // Rest call to get PAI job info and update status + // Refer https://github.com/Microsoft/pai/blob/master/docs/rest-server/API.md for more detail about PAI Rest API + const getJobInfoRequest: request.Options = { + uri: `http://${paiClusterConfig.host}:9186/api/v1/jobs/${paiTrialJob.paiJobName}`, + method: 'GET', + json: true, + headers: { + "Content-Type": "application/json", + "Authorization": 'Bearer ' + paiToken + } + }; + //TODO : pass in request timeout param? + request(getJobInfoRequest, (error: Error, response: request.Response, body: any) => { + if (error || response.statusCode >= 500) { + this.log.error(`PAI Training service: get job info for trial ${paiTrialJob.id} from PAI Cluster failed!`); + // Queried PAI job info failed, set job status to UNKNOWN + if(paiTrialJob.status === 'WAITING' || paiTrialJob.status === 'RUNNING') { + paiTrialJob.status = 'UNKNOWN'; + } + } else { + if(response.body.jobStatus && response.body.jobStatus.state) { + switch(response.body.jobStatus.state) { + case 'WAITING': + paiTrialJob.status = 'WAITING'; + break; + case 'RUNNING': + paiTrialJob.status = 'RUNNING'; + if(!paiTrialJob.startTime) { + paiTrialJob.startTime = response.body.jobStatus.appLaunchedTime; + } + if(!paiTrialJob.url) { + paiTrialJob.url = response.body.jobStatus.appTrackingUrl; + } + break; + case 'SUCCEEDED': + paiTrialJob.status = 'SUCCEEDED'; + break; + case 'STOPPED': + paiTrialJob.status = 'USER_CANCELED'; + break; + case 'FAILED': + paiTrialJob.status = 'FAILED'; + break; + default: + paiTrialJob.status = 'UNKNOWN'; + break; + } + // For final job statues, update startTime, endTime and url + if(this.finalStatuses.includes(paiTrialJob.status)) { + if(!paiTrialJob.startTime) { + paiTrialJob.startTime = response.body.jobStatus.appLaunchedTime; + } + if(!paiTrialJob.endTime) { + paiTrialJob.endTime = response.body.jobStatus.completedTime; + } + // Set pai trial job's url to WebHDFS output path + if(paiTrialJob.hdfsLogPath) { + paiTrialJob.url = paiTrialJob.hdfsLogPath; + } + } + } + } + deferred.resolve(); + }); + + return deferred.promise; + } +} \ No newline at end of file diff --git a/src/nni_manager/training_service/pai/paiJobRestServer.ts b/src/nni_manager/training_service/pai/paiJobRestServer.ts index 6375eee1c5..098ea74333 100644 --- a/src/nni_manager/training_service/pai/paiJobRestServer.ts +++ b/src/nni_manager/training_service/pai/paiJobRestServer.ts @@ -17,4 +17,82 @@ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ - \ No newline at end of file +'use strict'; + +import { Request, Response, Router } from 'express'; +import * as bodyParser from 'body-parser'; +import * as component from '../../common/component'; +import { getExperimentId } from '../../common/experimentStartupInfo'; +import { Inject } from 'typescript-ioc'; +import { PAITrainingService } from './paiTrainingService'; +import { RestServer } from '../../common/restServer' + +/** + * PAI Training service Rest server, provides rest API to support pai job metrics update + * + */ +@component.Singleton +export class PAIJobRestServer extends RestServer{ + /** NNI main rest service default port */ + private static readonly DEFAULT_PORT: number = 51189; + + private readonly API_ROOT_URL: string = '/api/v1/nni-pai'; + + private readonly expId: string = getExperimentId(); + + @Inject + private readonly paiTrainingService : PAITrainingService; + + /** + * constructor to provide NNIRestServer's own rest property, e.g. port + */ + constructor() { + super(); + this.port = PAIJobRestServer.DEFAULT_PORT; + this.paiTrainingService = component.get(PAITrainingService); + } + + /** + * NNIRestServer's own router registration + */ + protected registerRestHandler(): void { + this.app.use(bodyParser.json()); + this.app.use(this.API_ROOT_URL, this.createRestHandler()); + } + + private createRestHandler() : Router { + const router: Router = Router(); + + // tslint:disable-next-line:typedef + router.use((req: Request, res: Response, next) => { + this.log.info(`${req.method}: ${req.url}: body:\n${JSON.stringify(req.body, undefined, 4)}`); + res.setHeader('Content-Type', 'application/json'); + next(); + }); + + router.post(`/update-metrics/${this.expId}/:trialId`, (req: Request, res: Response) => { + try { + this.log.info(`Get update-metrics request, trial job id is ${req.params.trialId}`); + this.log.info(`update-metrics body is ${JSON.stringify(req.body)}`); + + // Split metrics array into single metric, then emit + // Warning: If not split metrics into single ones, the behavior will be UNKNOWN + for (const singleMetric of req.body.metrics) { + this.paiTrainingService.MetricsEmitter.emit('metric', { + id : req.body.jobId, + data : singleMetric + }); + } + + res.send(); + } + catch(err) { + this.log.error(`json parse metrics error: ${err}`); + res.status(500); + res.send(err.message); + } + }); + + return router; + } +} \ No newline at end of file diff --git a/src/nni_manager/training_service/pai/paiTrainingService.ts b/src/nni_manager/training_service/pai/paiTrainingService.ts new file mode 100644 index 0000000000..f7f8b3c4e7 --- /dev/null +++ b/src/nni_manager/training_service/pai/paiTrainingService.ts @@ -0,0 +1,432 @@ + +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict' + +import * as component from '../../common/component'; +import * as cpp from 'child-process-promise'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as request from 'request'; + +import { Deferred } from 'ts-deferred'; +import { EventEmitter } from 'events'; +import { getExperimentId } from '../../common/experimentStartupInfo'; +import { HDFSClientUtility } from './hdfsClientUtility' +import { MethodNotImplementedError } from '../../common/errors'; +import { getLogger, Logger } from '../../common/log'; +import { TrialConfigMetadataKey } from '../common/trialConfigMetadataKey'; +import { + JobApplicationForm, TrainingService, TrialJobApplicationForm, + TrialJobDetail, TrialJobMetric +} from '../../common/trainingService'; +import { delay, getExperimentRootDir, getIPV4Address, uniqueString } from '../../common/utils'; +import { PAIJobRestServer } from './paiJobRestServer' +import { PAITrialJobDetail, PAI_INSTALL_NNI_SHELL_FORMAT, PAI_TRIAL_COMMAND_FORMAT, PAI_OUTPUT_DIR_FORMAT, PAI_LOG_PATH_FORMAT } from './paiData'; +import { PAIJobInfoCollector } from './paiJobInfoCollector'; +import { String } from 'typescript-string-operations'; +import { NNIPAITrialConfig, PAIClusterConfig, PAIJobConfig, PAITaskRole } from './paiConfig'; + +var WebHDFS = require('webhdfs'); + +/** + * Training Service implementation for OpenPAI (Open Platform for AI) + * Refer https://github.com/Microsoft/pai for more info about OpenPAI + */ +@component.Singleton +class PAITrainingService implements TrainingService { + private readonly log!: Logger; + private readonly metricsEmitter: EventEmitter; + private readonly trialJobsMap: Map; + private readonly expRootDir: string; + private paiTrialConfig: NNIPAITrialConfig | undefined; + private paiClusterConfig?: PAIClusterConfig; + private stopping: boolean = false; + private hdfsClient: any; + private paiToken? : string; + private experimentId! : string; + private readonly paiJobCollector : PAIJobInfoCollector; + private readonly hdfsDirPattern: string; + private hdfsBaseDir: string | undefined; + private hdfsOutputHost: string | undefined; + + constructor() { + this.log = getLogger(); + this.metricsEmitter = new EventEmitter(); + this.trialJobsMap = new Map(); + // Root dir on HDFS + this.expRootDir = path.join('/nni', 'experiments', getExperimentId()); + this.experimentId = getExperimentId(); + this.paiJobCollector = new PAIJobInfoCollector(this.trialJobsMap); + this.hdfsDirPattern = 'hdfs://(?([0-9]{1,3}.){3}[0-9]{1,3})(:[0-9]{2,5})?(?/.*)?'; + } + + public async run(): Promise { + const restServer: PAIJobRestServer = component.get(PAIJobRestServer); + await restServer.start(); + this.log.info(`PAI Training service rest server listening on: ${restServer.endPoint}`); + while (!this.stopping) { + await this.paiJobCollector.updateTrialStatusFromPAI(this.paiToken, this.paiClusterConfig); + await delay(3000); + } + } + + public async listTrialJobs(): Promise { + const jobs: TrialJobDetail[] = []; + + this.trialJobsMap.forEach(async (value: PAITrialJobDetail, key: string) => { + if (value.form.jobType === 'TRIAL') { + jobs.push(await this.getTrialJob(key)); + } + }); + + return Promise.resolve(jobs); + } + + public getTrialJob(trialJobId: string): Promise { + if(!this.paiClusterConfig) { + throw new Error('PAI Cluster config is not initialized'); + } + + const paiTrialJob: PAITrialJobDetail | undefined = this.trialJobsMap.get(trialJobId); + + if (!paiTrialJob) { + return Promise.reject(`trial job ${trialJobId} not found`) + } + + return Promise.resolve(paiTrialJob); + } + + public addTrialJobMetricListener(listener: (metric: TrialJobMetric) => void) { + this.metricsEmitter.on('metric', listener); + } + + public removeTrialJobMetricListener(listener: (metric: TrialJobMetric) => void) { + this.metricsEmitter.off('metric', listener); + } + + public async submitTrialJob(form: JobApplicationForm): Promise { + const deferred : Deferred = new Deferred(); + if(!this.paiClusterConfig) { + throw new Error('PAI Cluster config is not initialized'); + } + if (!this.paiTrialConfig) { + throw new Error('trial config is not initialized'); + } + if (!this.paiToken) { + throw new Error('PAI token is not initialized'); + } + + if(!this.hdfsBaseDir){ + throw new Error('hdfsBaseDir is not initialized'); + } + + if(!this.hdfsOutputHost){ + throw new Error('hdfsOutputHost is not initialized'); + } + + this.log.info(`submitTrialJob: form: ${JSON.stringify(form)}`); + + const trialJobId: string = uniqueString(5); + //TODO: use HDFS working folder instead + const trialWorkingFolder: string = path.join(this.expRootDir, 'trials', trialJobId); + + const trialLocalTempFolder: string = path.join(getExperimentRootDir(), 'trials-local', trialJobId); + //create tmp trial working folder locally. + await cpp.exec(`mkdir -p ${path.dirname(trialLocalTempFolder)}`); + await cpp.exec(`cp -r ${this.paiTrialConfig.codeDir} ${trialLocalTempFolder}`); + + const runScriptContent : string = PAI_INSTALL_NNI_SHELL_FORMAT; + // Write NNI installation file to local tmp files + await fs.promises.writeFile(path.join(trialLocalTempFolder, 'install_nni.sh'), runScriptContent, { encoding: 'utf8' }); + + // Write file content ( parameter.cfg ) to local tmp folders + const trialForm : TrialJobApplicationForm = (form) + if(trialForm) { + await fs.promises.writeFile(path.join(trialLocalTempFolder, 'parameter.cfg'), trialForm.hyperParameters, { encoding: 'utf8' }); + } + + // Step 1. Prepare PAI job configuration + const paiJobName : string = `nni_exp_${this.experimentId}_trial_${trialJobId}`; + const hdfsCodeDir : string = path.join(this.expRootDir, trialJobId); + + const hdfsOutputDir : string = path.join(this.hdfsBaseDir, this.experimentId, trialJobId); + const hdfsLogPath : string = String.Format( + PAI_LOG_PATH_FORMAT, + this.hdfsOutputHost, + hdfsOutputDir); + + const trialJobDetail: PAITrialJobDetail = new PAITrialJobDetail( + trialJobId, + 'WAITING', + paiJobName, + Date.now(), + trialWorkingFolder, + form, + hdfsLogPath); + this.trialJobsMap.set(trialJobId, trialJobDetail); + + const nniPaiTrialCommand : string = String.Format( + PAI_TRIAL_COMMAND_FORMAT, + // PAI will copy job's codeDir into /root directory + `/root/${trialJobId}`, + `/root/${trialJobId}/nnioutput`, + trialJobId, + this.experimentId, + this.paiTrialConfig.command, + getIPV4Address(), + hdfsOutputDir, + this.hdfsOutputHost, + this.paiClusterConfig.userName + ).replace(/\r\n|\n|\r/gm, ''); + + console.log(`nniPAItrial command is ${nniPaiTrialCommand.trim()}`); + const paiTaskRoles : PAITaskRole[] = [new PAITaskRole('nni_trail_' + trialJobId, + // Task role number + 1, + // Task CPU number + this.paiTrialConfig.cpuNum, + // Task memory + this.paiTrialConfig.memoryMB, + // Task GPU number + this.paiTrialConfig.gpuNum, + // Task command + nniPaiTrialCommand)]; + + const paiJobConfig : PAIJobConfig = new PAIJobConfig( + // Job name + paiJobName, + // Docker image + this.paiTrialConfig.image, + // dataDir + this.paiTrialConfig.dataDir, + // outputDir + this.paiTrialConfig.outputDir, + // codeDir + `$PAI_DEFAULT_FS_URI${hdfsCodeDir}`, + // TODO: Add Virutal Cluster + // PAI Task roles + paiTaskRoles); + + // Step 2. Upload code files in codeDir onto HDFS + try { + await HDFSClientUtility.copyDirectoryToHdfs(trialLocalTempFolder, hdfsCodeDir, this.hdfsClient); + } catch (error) { + this.log.error(`PAI Training service: copy ${this.paiTrialConfig.codeDir} to HDFS ${hdfsCodeDir} failed, error is ${error}`); + throw new Error(error.message); + } + + // Step 3. Submit PAI job via Rest call + // Refer https://github.com/Microsoft/pai/blob/master/docs/rest-server/API.md for more detail about PAI Rest API + const submitJobRequest: request.Options = { + uri: `http://${this.paiClusterConfig.host}:9186/api/v1/jobs`, + method: 'POST', + json: true, + body: paiJobConfig, + headers: { + "Content-Type": "application/json", + "Authorization": 'Bearer ' + this.paiToken + } + }; + request(submitJobRequest, (error: Error, response: request.Response, body: any) => { + if (error || response.statusCode >= 400) { + this.log.error(`PAI Training service: Submit trial ${trialJobId} to PAI Cluster failed!`); + trialJobDetail.status = 'FAILED'; + deferred.reject(error ? error.message : 'Submit trial failed, http code: ' + response.statusCode); + } else { + trialJobDetail.submitTime = Date.now(); + deferred.resolve(trialJobDetail); + } + }); + + return deferred.promise; + } + + public updateTrialJob(trialJobId: string, form: JobApplicationForm): Promise { + throw new MethodNotImplementedError(); + } + + public get isMultiPhaseJobSupported(): boolean { + return false; + } + + public cancelTrialJob(trialJobId: string): Promise { + const trialJobDetail : PAITrialJobDetail | undefined = this.trialJobsMap.get(trialJobId); + const deferred : Deferred = new Deferred(); + if(!trialJobDetail) { + this.log.error(`cancelTrialJob: trial job id ${trialJobId} not found`); + return Promise.reject(); + } + + if(!this.paiClusterConfig) { + throw new Error('PAI Cluster config is not initialized'); + } + if (!this.paiToken) { + throw new Error('PAI token is not initialized'); + } + + const stopJobRequest: request.Options = { + uri: `http://${this.paiClusterConfig.host}:9186/api/v1/jobs/${trialJobDetail.paiJobName}/executionType`, + method: 'PUT', + json: true, + body: {'value' : 'STOP'}, + headers: { + "Content-Type": "application/json", + "Authorization": 'Bearer ' + this.paiToken + } + }; + request(stopJobRequest, (error: Error, response: request.Response, body: any) => { + if (error || response.statusCode >= 400) { + this.log.error(`PAI Training service: stop trial ${trialJobId} to PAI Cluster failed!`); + deferred.reject(error ? error.message : 'Stop trial failed, http code: ' + response.statusCode); + } else { + deferred.resolve(); + } + }); + + return deferred.promise; + } + + public async setClusterMetadata(key: string, value: string): Promise { + const deferred : Deferred = new Deferred(); + + switch (key) { + case TrialConfigMetadataKey.PAI_CLUSTER_CONFIG: + //TODO: try catch exception when setting up HDFS client and get PAI token + this.paiClusterConfig = JSON.parse(value); + + this.hdfsClient = WebHDFS.createClient({ + user: this.paiClusterConfig.userName, + port: 50070, + host: this.paiClusterConfig.host + }); + + // Get PAI authentication token + const authentication_req: request.Options = { + uri: `http://${this.paiClusterConfig.host}:9186/api/v1/token`, + method: 'POST', + json: true, + body: { + username: this.paiClusterConfig.userName, + password: this.paiClusterConfig.passWord + } + }; + + request(authentication_req, (error: Error, response: request.Response, body: any) => { + if (error) { + this.log.error(`Get PAI token failed: ${error.message}`); + deferred.reject(new Error(`Get PAI token failed: ${error.message}`)); + } else { + if(response.statusCode !== 200){ + this.log.error(`Get PAI token failed: get PAI Rest return code ${response.statusCode}`); + deferred.reject(new Error(`Get PAI token failed, please check paiConfig username or password`)); + } + this.paiToken = body.token; + + deferred.resolve(); + } + }); + break; + case TrialConfigMetadataKey.TRIAL_CONFIG: + if (!this.paiClusterConfig){ + this.log.error('pai cluster config is not initialized'); + deferred.reject(new Error('pai cluster config is not initialized')); + break; + } + this.paiTrialConfig = JSON.parse(value); + //paiTrialConfig.outputDir could be null if it is not set in nnictl + if(this.paiTrialConfig.outputDir === undefined || this.paiTrialConfig.outputDir === null){ + this.paiTrialConfig.outputDir = String.Format( + PAI_OUTPUT_DIR_FORMAT, + this.paiClusterConfig.host + ).replace(/\r\n|\n|\r/gm, ''); + } + + const hdfsDirContent = this.paiTrialConfig.outputDir.match(this.hdfsDirPattern); + + if(hdfsDirContent === null) { + throw new Error('Trial outputDir format Error'); + } + const groups = hdfsDirContent.groups; + if(groups === undefined) { + throw new Error('Trial outputDir format Error'); + } + + this.hdfsOutputHost = groups['host']; + this.hdfsBaseDir = groups['baseDir']; + if(this.hdfsBaseDir === undefined) { + this.hdfsBaseDir = "/"; + } + + const hdfsClient = WebHDFS.createClient({ + user: this.paiClusterConfig.userName, + port: 50070, + host: this.hdfsOutputHost + }); + + try { + const exist : boolean = await HDFSClientUtility.pathExists("/", hdfsClient); + if(!exist) { + deferred.reject(new Error(`Please check hdfsOutputDir host!`)); + } + } catch(error) { + deferred.reject(new Error(`HDFS encounters problem, error is ${error}. Please check hdfsOutputDir host!`)); + } + + deferred.resolve(); + break; + default: + //Reject for unknown keys + throw new Error(`Uknown key: ${key}`); + } + + return deferred.promise; + } + + public getClusterMetadata(key: string): Promise { + const deferred : Deferred = new Deferred(); + + deferred.resolve(); + return deferred.promise; + } + + public async cleanUp(): Promise { + this.stopping = true; + + const deferred : Deferred = new Deferred(); + const restServer: PAIJobRestServer = component.get(PAIJobRestServer); + try { + await restServer.stop(); + deferred.resolve(); + this.log.info('PAI Training service rest server stopped successfully.'); + } catch (error) { + this.log.error(`PAI Training service rest server stopped failed, error: ${error.message}`); + deferred.reject(error); + } + + return deferred.promise; + } + + public get MetricsEmitter() : EventEmitter { + return this.metricsEmitter; + } +} + +export { PAITrainingService } \ No newline at end of file diff --git a/src/nni_manager/training_service/pai/paiTrialConfig.ts b/src/nni_manager/training_service/pai/paiTrialConfig.ts new file mode 100644 index 0000000000..583db9e725 --- /dev/null +++ b/src/nni_manager/training_service/pai/paiTrialConfig.ts @@ -0,0 +1,39 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +import {TrialConfig} from '../common/trialConfig' + +export class PAITrialConfig extends TrialConfig{ + public readonly cpuNum: number; + public readonly memoryMB: number; + public readonly image: string; + public readonly dataDir: string; + public readonly outputDir: string; + + constructor(command : string, codeDir : string, gpuNum : number, cpuNum: number, memoryMB: number, image: string, dataDir: string, outputDir: string) { + super(command, codeDir, gpuNum); + this.cpuNum = cpuNum; + this.memoryMB = memoryMB; + this.image = image; + this.dataDir = dataDir; + this.outputDir = outputDir; + } +} \ No newline at end of file diff --git a/src/nni_manager/training_service/remote_machine/metricsCollector.ts b/src/nni_manager/training_service/remote_machine/metricsCollector.ts index 3e346e7000..eb59a51d99 100644 --- a/src/nni_manager/training_service/remote_machine/metricsCollector.ts +++ b/src/nni_manager/training_service/remote_machine/metricsCollector.ts @@ -25,7 +25,8 @@ import * as path from 'path'; import { Client } from 'ssh2'; import { getLogger, Logger } from '../../common/log'; import { TrialJobStatus, TrialJobDetail } from '../../common/trainingService'; -import { JobMetrics, RemoteCommandResult, RemoteMachineMeta, RemoteMachineTrialJobDetail } from './remoteMachineData'; +import { JobMetrics } from '../common/jobMetrics'; +import { RemoteCommandResult, RemoteMachineMeta, RemoteMachineTrialJobDetail } from './remoteMachineData'; import { SSHClientUtility } from './sshClientUtility'; export class MetricsCollector { diff --git a/src/nni_manager/training_service/remote_machine/remoteMachineData.ts b/src/nni_manager/training_service/remote_machine/remoteMachineData.ts index 1e52458790..0cd3a028dc 100644 --- a/src/nni_manager/training_service/remote_machine/remoteMachineData.ts +++ b/src/nni_manager/training_service/remote_machine/remoteMachineData.ts @@ -65,21 +65,6 @@ export class RemoteCommandResult { } } -// tslint:disable-next-line:max-classes-per-file -export class JobMetrics { - public readonly jobId: string; - public readonly metrics: string[]; - public readonly jobStatus: TrialJobStatus; - public readonly endTimestamp: number; - - constructor(jobId : string, metrics : string[], jobStatus : TrialJobStatus, endTimestamp : number) { - this.jobId = jobId; - this.metrics = metrics; - this.jobStatus = jobStatus; - this.endTimestamp = endTimestamp; - } -} - /** * RemoteMachineTrialJobDetail */ @@ -121,7 +106,7 @@ export enum ScheduleResultType { REQUIRE_EXCEED_TOTAL } -export const REMOTEMACHINERUNSHELLFORMAT: string = +export const REMOTEMACHINE_RUN_SHELL_FORMAT: string = `#!/bin/bash export NNI_PLATFORM=remote NNI_SYS_DIR={0} NNI_TRIAL_JOB_ID={1} NNI_OUTPUT_DIR={0} cd $NNI_SYS_DIR @@ -129,7 +114,7 @@ echo $$ >{2} eval {3}{4} 2>{5} echo $? \`date +%s%3N\` >{6}`; -export const HOSTJOBSHELLFORMAT: string = +export const HOST_JOB_SHELL_FORMAT: string = `#!/bin/bash cd {0} echo $$ >{1} diff --git a/src/nni_manager/training_service/remote_machine/remoteMachineTrainingService.ts b/src/nni_manager/training_service/remote_machine/remoteMachineTrainingService.ts index 772b93ff5d..e1cff16f22 100644 --- a/src/nni_manager/training_service/remote_machine/remoteMachineTrainingService.ts +++ b/src/nni_manager/training_service/remote_machine/remoteMachineTrainingService.ts @@ -43,8 +43,8 @@ import { TrialConfigMetadataKey } from '../common/trialConfigMetadataKey'; import { GPUScheduler } from './gpuScheduler'; import { MetricsCollector } from './metricsCollector'; import { - HOSTJOBSHELLFORMAT, RemoteCommandResult, RemoteMachineMeta, - REMOTEMACHINERUNSHELLFORMAT, RemoteMachineScheduleInfo, RemoteMachineScheduleResult, + HOST_JOB_SHELL_FORMAT, RemoteCommandResult, RemoteMachineMeta, + REMOTEMACHINE_RUN_SHELL_FORMAT, RemoteMachineScheduleInfo, RemoteMachineScheduleResult, RemoteMachineTrialJobDetail, ScheduleResultType } from './remoteMachineData'; import { SSHClientUtility } from './sshClientUtility'; @@ -427,7 +427,7 @@ class RemoteMachineTrainingService implements TrainingService { // RemoteMachineRunShellFormat is the run shell format string, // See definition in remoteMachineData.ts const runScriptContent: string = String.Format( - REMOTEMACHINERUNSHELLFORMAT, + REMOTEMACHINE_RUN_SHELL_FORMAT, trialWorkingFolder, trialJobId, path.join(trialWorkingFolder, '.nni', 'jobpid'), @@ -470,7 +470,7 @@ class RemoteMachineTrainingService implements TrainingService { await cpp.exec(`mkdir -p ${localDir}`); await SSHClientUtility.remoteExeCommand(`mkdir -p ${remoteDir}`, sshClient); const runScriptContent: string = String.Format( - HOSTJOBSHELLFORMAT, remoteDir, path.join(remoteDir, 'jobpid'), form.cmd, path.join(remoteDir, 'code') + HOST_JOB_SHELL_FORMAT, remoteDir, path.join(remoteDir, 'jobpid'), form.cmd, path.join(remoteDir, 'code') ); await fs.promises.writeFile(path.join(localDir, 'run.sh'), runScriptContent, { encoding: 'utf8' }); await SSHClientUtility.copyFileToRemote( diff --git a/src/nni_manager/training_service/test/hdfsClientUtility.test.ts b/src/nni_manager/training_service/test/hdfsClientUtility.test.ts new file mode 100644 index 0000000000..b8cf30e83a --- /dev/null +++ b/src/nni_manager/training_service/test/hdfsClientUtility.test.ts @@ -0,0 +1,143 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; +import * as chai from 'chai'; +import * as chaiAsPromised from 'chai-as-promised'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import * as tmp from 'tmp'; +import { cleanupUnitTest, prepareUnitTest, uniqueString } from '../../common/utils'; +import { HDFSClientUtility } from '../pai/hdfsClientUtility'; + +var WebHDFS = require('webhdfs'); +var rmdir = require('rmdir'); + +describe('WebHDFS', function () { + /* + To enable web HDFS client unit test, HDFS information needs to be configured in: + Default/.vscode/hdfsInfo.json, whose content looks like: + { + "user": "user1", + "port": 50070, + "host": "10.0.0.0" + } + */ + let skip: boolean = false; + let testHDFSInfo: any; + let hdfsClient: any; + try { + testHDFSInfo = JSON.parse(fs.readFileSync('../../.vscode/hdfsInfo.json', 'utf8')); + console.log(testHDFSInfo); + hdfsClient = WebHDFS.createClient({ + user: testHDFSInfo.user, + port: testHDFSInfo.port, + host: testHDFSInfo.host + }); + } catch (err) { + console.log('Please configure rminfo.json to enable remote machine unit test.'); + skip = true; + } + + before(() => { + chai.should(); + chai.use(chaiAsPromised); + tmp.setGracefulCleanup(); + prepareUnitTest(); + }); + + after(() => { + cleanupUnitTest(); + }); + + it('Test HDFS utility path functions', async () => { + if (skip) { + return; + } + const testPath : string = '/nni_unittest_' + uniqueString(6); + let exists : boolean = await HDFSClientUtility.pathExists(testPath, hdfsClient); + // The new random named path is expected to not exist + chai.expect(exists).to.be.equals(false); + + const mkdirResult : boolean = await HDFSClientUtility.mkdir(testPath, hdfsClient); + // Mkdir is expected to be successful + chai.expect(mkdirResult).to.be.equals(true); + + exists = await HDFSClientUtility.pathExists(testPath, hdfsClient); + // The newly created path is expected to exist + chai.expect(exists).to.be.equals(true); + + const deleteResult : boolean = await HDFSClientUtility.deletePath(testPath, hdfsClient); + // Delete path is expected to be successful + chai.expect(deleteResult).to.be.equals(true); + + exists = await HDFSClientUtility.pathExists(testPath, hdfsClient); + // The deleted path is not expected to exist + chai.expect(exists).to.be.equals(false); + }); + + it('Test HDFS utility copyFileToHdfs', async() => { + if (skip) { + return; + } + // Prepare local directory and files + const tmpLocalDirectoryPath : string = path.join(os.tmpdir(), 'nni_unittest_dir_' + uniqueString(6)); + const tmpDataFilePath : string = path.join(tmpLocalDirectoryPath, 'file_' + uniqueString(6)); + const testFileData : string = 'TestContent123'; + fs.mkdirSync(tmpLocalDirectoryPath); + fs.writeFileSync(tmpDataFilePath, testFileData); + + const testHDFSFilePath : string = '/nni_unittest_' + uniqueString(6); + let exists : boolean = await HDFSClientUtility.pathExists(testHDFSFilePath, hdfsClient); + // The new random named path is expected to not exist + chai.expect(exists).to.be.equals(false); + + await HDFSClientUtility.copyFileToHdfs(tmpDataFilePath, testHDFSFilePath, hdfsClient); + exists = await HDFSClientUtility.pathExists(testHDFSFilePath, hdfsClient); + // After copy local file to HDFS, the target file path in HDFS is expected to exist + chai.expect(exists).to.be.equals(true); + + const buffer : Buffer = await HDFSClientUtility.readFileFromHDFS(testHDFSFilePath, hdfsClient); + const actualFileData : string = buffer.toString('utf8'); + // The file content read from HDFS is expected to equal to the content of local file + chai.expect(actualFileData).to.be.equals(testFileData); + + const testHDFSDirPath : string = path.join('/nni_unittest_' + uniqueString(6) + '_dir'); + + await HDFSClientUtility.copyDirectoryToHdfs(tmpLocalDirectoryPath, testHDFSDirPath, hdfsClient); + + const files : any[] = await HDFSClientUtility.readdir(testHDFSDirPath, hdfsClient); + + // Expected file count under HDFS target directory is 1 + chai.expect(files.length).to.be.equals(1); + + // Expected file name under HDFS target directory is equal to local file name + chai.expect(files[0].pathSuffix).to.be.equals(path.parse(tmpDataFilePath).base); + + // Cleanup + rmdir(tmpLocalDirectoryPath); + + let deleteRestult : boolean = await HDFSClientUtility.deletePath(testHDFSFilePath, hdfsClient); + chai.expect(deleteRestult).to.be.equals(true); + + deleteRestult = await HDFSClientUtility.deletePath(testHDFSDirPath, hdfsClient); + chai.expect(deleteRestult).to.be.equals(true); + }); +}); \ No newline at end of file diff --git a/src/nni_manager/training_service/test/paiTrainingService.test.ts b/src/nni_manager/training_service/test/paiTrainingService.test.ts new file mode 100644 index 0000000000..4294e4ddc1 --- /dev/null +++ b/src/nni_manager/training_service/test/paiTrainingService.test.ts @@ -0,0 +1,95 @@ +/** + * Copyright (c) Microsoft Corporation + * All rights reserved. + * + * MIT License + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated + * documentation files (the "Software"), to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and + * to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING + * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +import * as chai from 'chai'; +import * as chaiAsPromised from 'chai-as-promised'; +import * as fs from 'fs'; +import * as tmp from 'tmp'; +import * as component from '../../common/component'; +import { cleanupUnitTest, prepareUnitTest } from '../../common/utils'; +import { TrialConfigMetadataKey } from '../common/trialConfigMetadataKey'; +import { PAITrainingService } from '../pai/paiTrainingService'; + +// TODO: copy mockedTrail.py to local folder +const localCodeDir: string = tmp.dirSync().name +const mockedTrialPath: string = './training_service/test/mockedTrial.py' +fs.copyFileSync(mockedTrialPath, localCodeDir + '/mockedTrial.py') + +describe('Unit Test for PAITrainingService', () => { + let skip: boolean = false; + let testPaiClusterInfo: any; + let paiCluster: any; + let paiTrialConfig : any; + try { + testPaiClusterInfo = JSON.parse(fs.readFileSync('../../.vscode/paiCluster.json', 'utf8')); + paiCluster = `{\"userName\":\"${testPaiClusterInfo.userName}\",\"passWord\":\"${testPaiClusterInfo.passWord}\",\"host\":\"${testPaiClusterInfo.host}\"}`; + paiTrialConfig = `{\"command\":\"echo hello && ls\",\"codeDir\":\"/home/desy/nni/examples/trials/mnist",\"gpuNum\":\"1\", +\"cpuNum\":\"1\",\"memoryMB\":\"8196\",\"image\":\"openpai/pai.example.tensorflow\",\"dataDir\":\"\",\"outputDir\":\"\"}`; + } catch (err) { + console.log('Please configure rminfo.json to enable remote machine unit test.'); + skip = true; + } + + let paiTrainingService: PAITrainingService; + + console.log(tmp.dirSync().name); + + before(() => { + chai.should(); + chai.use(chaiAsPromised); + prepareUnitTest(); + }); + + after(() => { + cleanupUnitTest(); + }); + + beforeEach(() => { + if (skip) { + return; + } + paiTrainingService = component.get(PAITrainingService); + paiTrainingService.run(); + }); + + afterEach(() => { + if (skip) { + return; + } + paiTrainingService.cleanUp(); + }); + + it('Get PAI token', async () => { + if (skip) { + return; + } + console.log(`paiCluster is ${paiCluster}`) + await paiTrainingService.setClusterMetadata(TrialConfigMetadataKey.PAI_CLUSTER_CONFIG, paiCluster); + await paiTrainingService.setClusterMetadata(TrialConfigMetadataKey.TRIAL_CONFIG, paiTrialConfig); + try { + const trialDetail = await paiTrainingService.submitTrialJob({jobType : 'TRIAL'}); + chai.expect(trialDetail.status).to.be.equals('WAITING'); + } catch(error) { + console.log('Submit job failed:' + error); + chai.assert(error) + } + }); +}); \ No newline at end of file diff --git a/src/nni_manager/yarn.lock b/src/nni_manager/yarn.lock index 8611053414..b568f7d179 100644 --- a/src/nni_manager/yarn.lock +++ b/src/nni_manager/yarn.lock @@ -5,6 +5,7 @@ "@types/body-parser@*": version "1.17.0" resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.17.0.tgz#9f5c9d9bd04bb54be32d5eb9fc0d8c974e6cf58c" + integrity sha512-a2+YeUjPkztKJu5aIF2yArYFQQp8d51wZ7DavSHjFuY1mqVgidGyzEQ41JIVNy82fXj8yPgy2vJmfIywgESW6w== dependencies: "@types/connect" "*" "@types/node" "*" @@ -12,30 +13,36 @@ "@types/caseless@*": version "0.12.1" resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.1.tgz#9794c69c8385d0192acc471a540d1f8e0d16218a" + integrity sha512-FhlMa34NHp9K5MY1Uz8yb+ZvuX0pnvn3jScRSNAb75KHGB8d3rEU6hqMs3Z2vjuytcMfRg6c5CHMc3wtYyD2/A== "@types/chai-as-promised@^7.1.0": version "7.1.0" resolved "https://registry.yarnpkg.com/@types/chai-as-promised/-/chai-as-promised-7.1.0.tgz#010b04cde78eacfb6e72bfddb3e58fe23c2e78b9" + integrity sha512-MFiW54UOSt+f2bRw8J7LgQeIvE/9b4oGvwU7XW30S9QGAiHGnU/fmiOprsyMkdmH2rl8xSPc0/yrQw8juXU6bQ== dependencies: "@types/chai" "*" "@types/chai@*", "@types/chai@^4.1.4": version "4.1.4" resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.4.tgz#5ca073b330d90b4066d6ce18f60d57f2084ce8ca" + integrity sha512-h6+VEw2Vr3ORiFCyyJmcho2zALnUq9cvdB/IO8Xs9itrJVCenC7o26A6+m7D0ihTTr65eS259H5/Ghl/VjYs6g== "@types/connect@*": version "3.4.32" resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.32.tgz#aa0e9616b9435ccad02bc52b5b454ffc2c70ba28" + integrity sha512-4r8qa0quOvh7lGD0pre62CAb1oni1OO6ecJLGCezTmhQ8Fz50Arx9RUszryR8KlgK6avuSXvviL6yWyViQABOg== dependencies: "@types/node" "*" "@types/events@*": version "1.2.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-1.2.0.tgz#81a6731ce4df43619e5c8c945383b3e62a89ea86" + integrity sha512-KEIlhXnIutzKwRbQkGWb/I4HFqBuUykAdHgDED6xqwXJfONCjF5VoE0cXEiurh3XauygxzeDzgtXUqvLkxFzzA== "@types/express-serve-static-core@*": version "4.16.0" resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.16.0.tgz#fdfe777594ddc1fe8eb8eccce52e261b496e43e7" + integrity sha512-lTeoCu5NxJU4OD9moCgm0ESZzweAx0YqsAcab6OB0EB3+As1OaHtKnaGJvcngQxYsi9UNv0abn4/DRavrRxt4w== dependencies: "@types/events" "*" "@types/node" "*" @@ -44,6 +51,7 @@ "@types/express@^4.16.0": version "4.16.0" resolved "https://registry.yarnpkg.com/@types/express/-/express-4.16.0.tgz#6d8bc42ccaa6f35cf29a2b7c3333cb47b5a32a19" + integrity sha512-TtPEYumsmSTtTetAPXlJVf3kEqb6wZK0bZojpJQrnD/djV4q1oB6QQ8aKvKqwNPACoe02GNiy5zDzcYivR5Z2w== dependencies: "@types/body-parser" "*" "@types/express-serve-static-core" "*" @@ -52,32 +60,39 @@ "@types/form-data@*": version "2.2.1" resolved "https://registry.yarnpkg.com/@types/form-data/-/form-data-2.2.1.tgz#ee2b3b8eaa11c0938289953606b745b738c54b1e" + integrity sha512-JAMFhOaHIciYVh8fb5/83nmuO/AHwmto+Hq7a9y8FzLDcC1KCU344XDOMEmahnrTFlHjgh4L0WJFczNIX2GxnQ== dependencies: "@types/node" "*" "@types/mime@*": version "2.0.0" resolved "https://registry.yarnpkg.com/@types/mime/-/mime-2.0.0.tgz#5a7306e367c539b9f6543499de8dd519fac37a8b" + integrity sha512-A2TAGbTFdBw9azHbpVd+/FkdW2T6msN1uct1O9bH3vTerEHKZhTXJUQXy+hNq1B0RagfU8U+KBdqiZpxjhOUQA== "@types/mocha@^5.2.5": version "5.2.5" resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-5.2.5.tgz#8a4accfc403c124a0bafe8a9fc61a05ec1032073" + integrity sha512-lAVp+Kj54ui/vLUFxsJTMtWvZraZxum3w3Nwkble2dNuV5VnPA+Mi2oGX9XYJAaIvZi3tn3cbjS/qcJXRb6Bww== "@types/node@*": version "10.5.2" resolved "https://registry.yarnpkg.com/@types/node/-/node-10.5.2.tgz#f19f05314d5421fe37e74153254201a7bf00a707" + integrity sha512-m9zXmifkZsMHZBOyxZWilMwmTlpC8x5Ty360JKTiXvlXZfBWYpsg9ZZvP/Ye+iZUh+Q+MxDLjItVTWIsfwz+8Q== "@types/node@^10.5.5": version "10.5.5" resolved "https://registry.yarnpkg.com/@types/node/-/node-10.5.5.tgz#8e84d24e896cd77b0d4f73df274027e3149ec2ba" + integrity sha512-6Qnb1gXbp3g1JX9QVJj3A6ORzc9XCyhokxUKaoonHgNXcQhmk8adhotxfkeK8El9TnFeUuH72yI6jQ5nDJKS6w== "@types/range-parser@*": version "1.2.2" resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.2.tgz#fa8e1ad1d474688a757140c91de6dace6f4abc8d" + integrity sha512-HtKGu+qG1NPvYe1z7ezLsyIaXYyi8SoAVqWDZgDQ8dLrsZvSzUNCwZyfX33uhWxL/SU0ZDQZ3nwZ0nimt507Kw== "@types/request@^2.47.1": version "2.47.1" resolved "https://registry.yarnpkg.com/@types/request/-/request-2.47.1.tgz#25410d3afbdac04c91a94ad9efc9824100735824" + integrity sha512-TV3XLvDjQbIeVxJ1Z3oCTDk/KuYwwcNKVwz2YaT0F5u86Prgc4syDAp6P96rkTQQ4bIdh+VswQIC9zS6NjY7/g== dependencies: "@types/caseless" "*" "@types/form-data" "*" @@ -87,70 +102,82 @@ "@types/rx-core-binding@*": version "4.0.4" resolved "https://registry.yarnpkg.com/@types/rx-core-binding/-/rx-core-binding-4.0.4.tgz#d969d32f15a62b89e2862c17b3ee78fe329818d3" + integrity sha512-5pkfxnC4w810LqBPUwP5bg7SFR/USwhMSaAeZQQbEHeBp57pjKXRlXmqpMrLJB4y1oglR/c2502853uN0I+DAQ== dependencies: "@types/rx-core" "*" "@types/rx-core@*": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/rx-core/-/rx-core-4.0.3.tgz#0b3354b1238cedbe2b74f6326f139dbc7a591d60" + integrity sha1-CzNUsSOM7b4rdPYybxOdvHpZHWA= "@types/rx-lite-aggregates@*": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/rx-lite-aggregates/-/rx-lite-aggregates-4.0.3.tgz#6efb2b7f3d5f07183a1cb2bd4b1371d7073384c2" + integrity sha512-MAGDAHy8cRatm94FDduhJF+iNS5//jrZ/PIfm+QYw9OCeDgbymFHChM8YVIvN2zArwsRftKgE33QfRWvQk4DPg== dependencies: "@types/rx-lite" "*" "@types/rx-lite-async@*": version "4.0.2" resolved "https://registry.yarnpkg.com/@types/rx-lite-async/-/rx-lite-async-4.0.2.tgz#27fbf0caeff029f41e2d2aae638b05e91ceb600c" + integrity sha512-vTEv5o8l6702ZwfAM5aOeVDfUwBSDOs+ARoGmWAKQ6LOInQ8J4/zjM7ov12fuTpktUKdMQjkeCp07Vd73mPkxw== dependencies: "@types/rx-lite" "*" "@types/rx-lite-backpressure@*": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/rx-lite-backpressure/-/rx-lite-backpressure-4.0.3.tgz#05abb19bdf87cc740196c355e5d0b37bb50b5d56" + integrity sha512-Y6aIeQCtNban5XSAF4B8dffhIKu6aAy/TXFlScHzSxh6ivfQBQw6UjxyEJxIOt3IT49YkS+siuayM2H/Q0cmgA== dependencies: "@types/rx-lite" "*" "@types/rx-lite-coincidence@*": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/rx-lite-coincidence/-/rx-lite-coincidence-4.0.3.tgz#80bd69acc4054a15cdc1638e2dc8843498cd85c0" + integrity sha512-1VNJqzE9gALUyMGypDXZZXzR0Tt7LC9DdAZQ3Ou/Q0MubNU35agVUNXKGHKpNTba+fr8GdIdkC26bRDqtCQBeQ== dependencies: "@types/rx-lite" "*" "@types/rx-lite-experimental@*": version "4.0.1" resolved "https://registry.yarnpkg.com/@types/rx-lite-experimental/-/rx-lite-experimental-4.0.1.tgz#c532f5cbdf3f2c15da16ded8930d1b2984023cbd" + integrity sha1-xTL1y98/LBXaFt7Ykw0bKYQCPL0= dependencies: "@types/rx-lite" "*" "@types/rx-lite-joinpatterns@*": version "4.0.1" resolved "https://registry.yarnpkg.com/@types/rx-lite-joinpatterns/-/rx-lite-joinpatterns-4.0.1.tgz#f70fe370518a8432f29158cc92ffb56b4e4afc3e" + integrity sha1-9w/jcFGKhDLykVjMkv+1a05K/D4= dependencies: "@types/rx-lite" "*" "@types/rx-lite-testing@*": version "4.0.1" resolved "https://registry.yarnpkg.com/@types/rx-lite-testing/-/rx-lite-testing-4.0.1.tgz#21b19d11f4dfd6ffef5a9d1648e9c8879bfe21e9" + integrity sha1-IbGdEfTf1v/vWp0WSOnIh5v+Iek= dependencies: "@types/rx-lite-virtualtime" "*" "@types/rx-lite-time@*": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/rx-lite-time/-/rx-lite-time-4.0.3.tgz#0eda65474570237598f3448b845d2696f2dbb1c4" + integrity sha512-ukO5sPKDRwCGWRZRqPlaAU0SKVxmWwSjiOrLhoQDoWxZWg6vyB9XLEZViKOzIO6LnTIQBlk4UylYV0rnhJLxQw== dependencies: "@types/rx-lite" "*" "@types/rx-lite-virtualtime@*": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/rx-lite-virtualtime/-/rx-lite-virtualtime-4.0.3.tgz#4b30cacd0fe2e53af29f04f7438584c7d3959537" + integrity sha512-3uC6sGmjpOKatZSVHI2xB1+dedgml669ZRvqxy+WqmGJDVusOdyxcKfyzjW0P3/GrCiN4nmRkLVMhPwHCc5QLg== dependencies: "@types/rx-lite" "*" "@types/rx-lite@*": version "4.0.5" resolved "https://registry.yarnpkg.com/@types/rx-lite/-/rx-lite-4.0.5.tgz#b3581525dff69423798daa9a0d33c1e66a5e8c4c" + integrity sha512-KZk5XTR1dm/kNgBx8iVpjno6fRYtAUQWBOmj+O8j724+nk097sz4fOoHJNpCkOJUtHUurZlJC7QvSFCZHbkC+w== dependencies: "@types/rx-core" "*" "@types/rx-core-binding" "*" @@ -158,6 +185,7 @@ "@types/rx@^4.1.1": version "4.1.1" resolved "https://registry.yarnpkg.com/@types/rx/-/rx-4.1.1.tgz#598fc94a56baed975f194574e0f572fd8e627a48" + integrity sha1-WY/JSla67ZdfGUV04PVy/Y5iekg= dependencies: "@types/rx-core" "*" "@types/rx-core-binding" "*" @@ -175,6 +203,7 @@ "@types/serve-static@*": version "1.13.2" resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.2.tgz#f5ac4d7a6420a99a6a45af4719f4dcd8cd907a48" + integrity sha512-/BZ4QRLpH/bNYgZgwhKEh+5AsboDBcUdlBYgzoLX0fpj3Y2gp6EApyOlM3bK53wQS/OE1SrdSYBAbux2D1528Q== dependencies: "@types/express-serve-static-core" "*" "@types/mime" "*" @@ -182,6 +211,7 @@ "@types/sqlite3@^3.1.3": version "3.1.3" resolved "https://registry.yarnpkg.com/@types/sqlite3/-/sqlite3-3.1.3.tgz#580d547203b8ad6e11aa6a6769c8ca5d7e197d13" + integrity sha512-BgGToABnI/8/HnZtZz2Qac6DieU2Dm/j3rtbMmUlDVo4T/uLu8cuVfU/n2UkHowiiwXb6/7h/CmSqBIVKgcTMA== dependencies: "@types/events" "*" "@types/node" "*" @@ -189,12 +219,14 @@ "@types/ssh2-streams@*": version "0.1.2" resolved "https://registry.yarnpkg.com/@types/ssh2-streams/-/ssh2-streams-0.1.2.tgz#7aa18b8c2450f17699e9ea18a76efc838188d58d" + integrity sha1-eqGLjCRQ8XaZ6eoYp278g4GI1Y0= dependencies: "@types/node" "*" "@types/ssh2@^0.5.35": version "0.5.35" resolved "https://registry.yarnpkg.com/@types/ssh2/-/ssh2-0.5.35.tgz#d6e60d59b7fc22db10abf4730aa7448babde7e3b" + integrity sha1-1uYNWbf8ItsQq/RzCqdEi6vefjs= dependencies: "@types/node" "*" "@types/ssh2-streams" "*" @@ -202,31 +234,37 @@ "@types/stream-buffers@^3.0.2": version "3.0.2" resolved "https://registry.yarnpkg.com/@types/stream-buffers/-/stream-buffers-3.0.2.tgz#b73bfcceae39ecb259750b44ef38a36cfc20e370" + integrity sha1-tzv8zq457LJZdQtE7zijbPwg43A= dependencies: "@types/node" "*" "@types/tmp@^0.0.33": version "0.0.33" resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.0.33.tgz#1073c4bc824754ae3d10cfab88ab0237ba964e4d" + integrity sha1-EHPEvIJHVK49EM+riKsCN7qWTk0= "@types/tough-cookie@*": version "2.3.3" resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-2.3.3.tgz#7f226d67d654ec9070e755f46daebf014628e9d9" + integrity sha512-MDQLxNFRLasqS4UlkWMSACMKeSm1x4Q3TxzUC7KQUsh6RK1ZrQ0VEyE3yzXcBu+K8ejVj4wuX32eUG02yNp+YQ== abbrev@1: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== accepts@~1.3.5: version "1.3.5" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.5.tgz#eb777df6011723a3b14e8a72c0805c8e86746bd2" + integrity sha1-63d99gEXI6OxTopywIBcjoZ0a9I= dependencies: mime-types "~2.1.18" negotiator "0.6.1" -ajv@^5.1.0: +ajv@^5.1.0, ajv@^5.3.0: version "5.5.2" resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" + integrity sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU= dependencies: co "^4.6.0" fast-deep-equal "^1.0.0" @@ -236,28 +274,34 @@ ajv@^5.1.0: ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" aproba@^1.0.3: version "1.2.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" + integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" @@ -265,54 +309,71 @@ are-we-there-yet@~1.1.2: argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= dependencies: array-uniq "^1.0.1" array-uniq@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= arrify@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= asn1@~0.2.0, asn1@~0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" + integrity sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y= assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= assertion-error@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" + integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= aws4@^1.6.0: version "1.7.0" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.7.0.tgz#d4d0e9b9dbfca77bf08eeb0a8a471550fe39e289" + integrity sha512-32NDda82rhwD9/JBCCkB+MRYDp0oSvlo2IL6rQWA10PQi7tDUM3eqMSltXmY+Oyl/7N3P3qNtAlv7X0d9bI28w== + +aws4@^1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" + integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-code-frame@^6.22.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" + integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: chalk "^1.1.3" esutils "^2.0.2" @@ -321,16 +382,19 @@ babel-code-frame@^6.22.0: balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" body-parser@1.18.2: version "1.18.2" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.18.2.tgz#87678a19d84b47d859b83199bd59bce222b10454" + integrity sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ= dependencies: bytes "3.0.0" content-type "~1.0.4" @@ -346,12 +410,14 @@ body-parser@1.18.2: boom@2.6.x: version "2.6.1" resolved "https://registry.yarnpkg.com/boom/-/boom-2.6.1.tgz#4dc8ef9b6dfad9c43bbbfbe71fa4c21419f22753" + integrity sha1-Tcjvm2362cQ7u/vnH6TCFBnyJ1M= dependencies: hoek "2.x.x" brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" @@ -359,36 +425,49 @@ brace-expansion@^1.1.7: browser-stdout@1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" + integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== buffer-from@^1.0.0, buffer-from@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" + integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + +buffer-stream-reader@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/buffer-stream-reader/-/buffer-stream-reader-0.1.1.tgz#ca8bf93631deedd8b8f8c3bb44991cc30951e259" + integrity sha1-yov5NjHe7di4+MO7RJkcwwlR4lk= builtin-modules@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" + integrity sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8= bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= callsites@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/callsites/-/callsites-1.0.1.tgz#c14c24188ce8e1d6a030b4c3c942e6ba895b6a1a" + integrity sha1-wUwkGIzo4dagMLTDyULmuolbaho= caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= chai-as-promised@^7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/chai-as-promised/-/chai-as-promised-7.1.1.tgz#08645d825deb8696ee61725dbf590c012eb00ca0" + integrity sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA== dependencies: check-error "^1.0.2" chai@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chai/-/chai-4.1.2.tgz#0f64584ba642f0f2ace2806279f4f06ca23ad73c" + integrity sha1-D2RYS6ZC8PKs4oBiefTwbKI61zw= dependencies: assertion-error "^1.0.1" check-error "^1.0.1" @@ -400,6 +479,7 @@ chai@^4.1.2: chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" @@ -410,6 +490,7 @@ chalk@^1.1.3: chalk@^2.3.0: version "2.4.1" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e" + integrity sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" @@ -418,10 +499,12 @@ chalk@^2.3.0: check-error@^1.0.1, check-error@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" + integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII= child-process-promise@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/child-process-promise/-/child-process-promise-2.2.1.tgz#4730a11ef610fad450b8f223c79d31d7bdad8074" + integrity sha1-RzChHvYQ+tRQuPIjx50x172tgHQ= dependencies: cross-spawn "^4.0.2" node-version "^1.0.0" @@ -430,70 +513,93 @@ child-process-promise@^2.2.1: chownr@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181" + integrity sha1-4qdQQqlVGQi+vSW4Uj1fl2nXkYE= co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= color-convert@^1.9.0: version "1.9.2" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.2.tgz#49881b8fba67df12a96bdf3f56c0aab9e7913147" + integrity sha512-3NUJZdhMhcdPn8vJ9v2UQJoH0qqoGUkYTgFEPZaPjEtwmmKUfNV46zZmgB2M5M4DCEQHMaCfWHCxiBflLm04Tg== dependencies: color-name "1.1.1" color-name@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.1.tgz#4b1415304cf50028ea81643643bd82ea05803689" + integrity sha1-SxQVMEz1ACjqgWQ2Q72C6gWANok= combined-stream@1.0.6, combined-stream@~1.0.5: version "1.0.6" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.6.tgz#723e7df6e801ac5613113a7e445a9b69cb632818" + integrity sha1-cj599ugBrFYTETp+RFqbactjKBg= + dependencies: + delayed-stream "~1.0.0" + +combined-stream@~1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.7.tgz#2d1d24317afb8abe95d6d2c0b07b57813539d828" + integrity sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w== dependencies: delayed-stream "~1.0.0" commander@2.15.1: version "2.15.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.15.1.tgz#df46e867d0fc2aec66a34662b406a9ccafff5b0f" + integrity sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag== commander@^2.12.1: version "2.16.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.16.0.tgz#f16390593996ceb4f3eeb020b31d78528f7f8a50" + integrity sha512-sVXqklSaotK9at437sFlFpyOcJonxe0yST/AG9DkQKUdIE6IqGIMv4SfAQSKaJbSdVEJYItASCrBiVQHq1HQew== concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" + integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= content-disposition@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4" + integrity sha1-DPaLud318r55YcOoUXjLhdunjLQ= content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= cookie@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" + integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cross-spawn@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-4.0.2.tgz#7b9247621c23adfdd3856004a823cbe397424d41" + integrity sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE= dependencies: lru-cache "^4.0.1" which "^1.2.9" @@ -501,96 +607,117 @@ cross-spawn@^4.0.2: dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" debug@2.6.9, debug@^2.1.2: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" + integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== dependencies: ms "2.0.0" deep-eql@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df" + integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw== dependencies: type-detect "^4.0.0" deep-extend@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" + integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= depd@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359" + integrity sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k= depd@~1.1.1, depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= destroy@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" + integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= detect-libc@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" + integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= diff@3.5.0, diff@^3.1.0, diff@^3.2.0: version "3.5.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" + integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== ecc-jsbn@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" + integrity sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU= dependencies: jsbn "~0.1.0" ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esutils@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + integrity sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs= etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= express-joi-validator@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/express-joi-validator/-/express-joi-validator-2.0.0.tgz#24e26e6a8327f69985ed72588f00e295dc3e3234" + integrity sha1-JOJuaoMn9pmF7XJYjwDildw+MjQ= dependencies: boom "2.6.x" extend "2.0.x" @@ -599,6 +726,7 @@ express-joi-validator@^2.0.0: express@^4.16.3: version "4.16.3" resolved "https://registry.yarnpkg.com/express/-/express-4.16.3.tgz#6af8a502350db3246ecc4becf6b5a34d22f7ed53" + integrity sha1-avilAjUNsyRuzEvs9rWjTSL37VM= dependencies: accepts "~1.3.5" array-flatten "1.1.1" @@ -634,30 +762,37 @@ express@^4.16.3: extend@2.0.x: version "2.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-2.0.2.tgz#1b74985400171b85554894459c978de6ef453ab7" + integrity sha512-AgFD4VU+lVLP6vjnlNfF7OeInLTyeyckCNPEsuxz1vi786UuK/nk6ynPuhn/h+Ju9++TQyr5EpLRI14fc1QtTQ== -extend@~3.0.1: +extend@^3.0.0, extend@~3.0.1, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= extsprintf@^1.2.0: version "1.4.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= fast-deep-equal@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz#c053477817c86b51daa853c81e059b733d023614" + integrity sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ= fast-json-stable-stringify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" + integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= finalhandler@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.1.tgz#eebf4ed840079c83f4249038c9d703008301b105" + integrity sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg== dependencies: debug "2.6.9" encodeurl "~1.0.2" @@ -670,10 +805,12 @@ finalhandler@1.1.1: forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= -form-data@~2.3.1: +form-data@~2.3.1, form-data@~2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.2.tgz#4970498be604c20c005d4f5c23aecd21d6b49099" + integrity sha1-SXBJi+YEwgwAXU9cI67NIda0kJk= dependencies: asynckit "^0.4.0" combined-stream "1.0.6" @@ -682,24 +819,29 @@ form-data@~2.3.1: forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" + integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= fs-minipass@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.5.tgz#06c277218454ec288df77ada54a03b8702aacb9d" + integrity sha512-JhBl0skXjUPCFH7x6x61gQxrKyXsxB5gcgePLZCwfyCGGsTISMoIeObbrvVeP6Xmyaudw4TT43qV2Gz+iyd2oQ== dependencies: minipass "^2.2.1" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" + integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" @@ -713,16 +855,19 @@ gauge@~2.7.3: get-func-name@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" + integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-parent@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" + integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= dependencies: is-glob "^3.1.0" path-dirname "^1.0.0" @@ -730,6 +875,7 @@ glob-parent@^3.0.0: glob@7.1.2, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1: version "7.1.2" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" + integrity sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -741,6 +887,7 @@ glob@7.1.2, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1: globby@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" + integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= dependencies: array-union "^1.0.1" glob "^7.0.3" @@ -751,43 +898,60 @@ globby@^6.0.0: growl@1.10.5: version "1.10.5" resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e" + integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA== har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= har-validator@~5.0.3: version "5.0.3" resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd" + integrity sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0= dependencies: ajv "^5.1.0" har-schema "^2.0.0" +har-validator@~5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.0.tgz#44657f5688a22cfd4b72486e81b3a3fb11742c29" + integrity sha512-+qnmNjI4OfH2ipQ9VQOw23bBd/ibtfbVdK2fYbY4acTDqKTW/YDp9McimZdDbG8iV9fZizUqQMD5xvriB146TA== + dependencies: + ajv "^5.3.0" + har-schema "^2.0.0" + has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" + integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= he@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd" + integrity sha1-k0EP0hsAlzUVH4howvJx80J+I/0= hoek@2.x.x: version "2.16.3" resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" + integrity sha1-ILt0A9POo5jpHcRxCo/xuCdKJe0= http-errors@1.6.2: version "1.6.2" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.2.tgz#0a002cc85707192a7e7946ceedc11155f60ec736" + integrity sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY= dependencies: depd "1.1.1" inherits "2.0.3" @@ -797,6 +961,7 @@ http-errors@1.6.2: http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= dependencies: depd "~1.1.2" inherits "2.0.3" @@ -806,6 +971,7 @@ http-errors@~1.6.2: http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: assert-plus "^1.0.0" jsprim "^1.2.2" @@ -814,22 +980,26 @@ http-signature@~1.2.0: iconv-lite@0.4.19: version "0.4.19" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b" + integrity sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ== iconv-lite@^0.4.4: version "0.4.23" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.23.tgz#297871f63be507adcfbfca715d0cd0eed84e9a63" + integrity sha512-neyTUVFtahjf0mB3dZT77u+8O0QB89jFdnBkd5P1JgYPbPaia3gXXOVL2fq8VyU2gMMD7SaN7QukTB/pmXYvDA== dependencies: safer-buffer ">= 2.1.2 < 3" ignore-walk@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.1.tgz#a83e62e7d272ac0e3b551aaa82831a19b69f82f8" + integrity sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ== dependencies: minimatch "^3.0.4" inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" @@ -837,58 +1007,76 @@ inflight@^1.0.4: inherits@2, inherits@2.0.3, inherits@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@~1.3.0: version "1.3.5" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== ipaddr.js@1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.6.0.tgz#e3fa357b773da619f26e95f049d055c72796f86b" + integrity sha1-4/o1e3c9phnybpXwSdBVxyeW+Gs= is-extglob@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" + integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= dependencies: is-extglob "^2.1.0" is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +is@~0.2.6: + version "0.2.7" + resolved "http://registry.npmjs.org/is/-/is-0.2.7.tgz#3b34a2c48f359972f35042849193ae7264b63562" + integrity sha1-OzSixI81mXLzUEKEkZOucmS2NWI= isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isemail@1.x.x: version "1.2.0" resolved "https://registry.yarnpkg.com/isemail/-/isemail-1.2.0.tgz#be03df8cc3e29de4d2c5df6501263f1fa4595e9a" + integrity sha1-vgPfjMPineTSxd9lASY/H6RZXpo= isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= joi@6.x.x: version "6.10.1" resolved "https://registry.yarnpkg.com/joi/-/joi-6.10.1.tgz#4d50c318079122000fe5f16af1ff8e1917b77e06" + integrity sha1-TVDDGAeRIgAP5fFq8f+OGRe3fgY= dependencies: hoek "2.x.x" isemail "1.x.x" @@ -898,10 +1086,12 @@ joi@6.x.x: js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" + integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@^3.7.0: version "3.12.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.12.0.tgz#eaed656ec8344f10f527c6bfa1b6e2244de167d1" + integrity sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A== dependencies: argparse "^1.0.7" esprima "^4.0.0" @@ -909,22 +1099,27 @@ js-yaml@^3.7.0: jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= json-schema-traverse@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" + integrity sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A= json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" extsprintf "1.3.0" @@ -934,6 +1129,7 @@ jsprim@^1.2.2: lru-cache@^4.0.1: version "4.1.3" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.3.tgz#a1175cf3496dfc8436c156c334b4955992bce69c" + integrity sha512-fFEhvcgzuIoJVUF8fYr5KR0YqxD238zgObTps31YdADwPPAp82a4M8TrckkWyx7ekNlf9aBcVn81cFwwXngrJA== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" @@ -941,50 +1137,73 @@ lru-cache@^4.0.1: make-error@^1.1.1: version "1.3.4" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.4.tgz#19978ed575f9e9545d2ff8c13e33b5d18a67d535" + integrity sha512-0Dab5btKVPhibSalc9QGXb559ED7G7iLjFXBaj9Wq8O3vorueR5K5jaE3hkG6ZQINyhA/JgG6Qk4qdFQjsYV6g== media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= mime-db@~1.35.0: version "1.35.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.35.0.tgz#0569d657466491283709663ad379a99b90d9ab47" + integrity sha512-JWT/IcCTsB0Io3AhWUMjRqucrHSPsSf2xKLaRldJVULioggvkJvggZ3VXNNSRkCddE6D+BUI4HEIZIA2OjwIvg== + +mime-db@~1.36.0: + version "1.36.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.36.0.tgz#5020478db3c7fe93aad7bbcc4dcf869c43363397" + integrity sha512-L+xvyD9MkoYMXb1jAmzI/lWYAxAMCPvIBSWur0PZ5nOf5euahRLVqH//FKW9mWp2lkqUgYiXPgkzfMUFi4zVDw== mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.18: version "2.1.19" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.19.tgz#71e464537a7ef81c15f2db9d97e913fc0ff606f0" + integrity sha512-P1tKYHVSZ6uFo26mtnve4HQFE3koh1UWVkp8YUC+ESBHe945xWSoXuHHiGarDqcEZ+whpCDnlNw5LON0kLo+sw== dependencies: mime-db "~1.35.0" +mime-types@~2.1.19: + version "2.1.20" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.20.tgz#930cb719d571e903738520f8470911548ca2cc19" + integrity sha512-HrkrPaP9vGuWbLK1B1FfgAkbqNjIuy4eHlIYnFi7kamZyLLrGlo2mpcx0bBmNpKqBtYtAfGbodDddIgddSJC2A== + dependencies: + mime-db "~1.36.0" + mime@1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/mime/-/mime-1.4.1.tgz#121f9ebc49e3766f311a76e1fa1c8003c4b03aa6" + integrity sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ== minimatch@3.0.4, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= minipass@^2.2.1, minipass@^2.3.3: version "2.3.3" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.3.tgz#a7dcc8b7b833f5d368759cce544dccb55f50f233" + integrity sha512-/jAn9/tEX4gnpyRATxgHEOV6xbcyxgT7iUnxo9Y3+OB0zX00TgKIv/2FZCf5brBbICcwbLqVv2ImjvWWrQMSYw== dependencies: safe-buffer "^5.1.2" yallist "^3.0.0" @@ -992,18 +1211,21 @@ minipass@^2.2.1, minipass@^2.3.3: minizlib@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.1.0.tgz#11e13658ce46bc3a70a267aac58359d1e0c29ceb" + integrity sha512-4T6Ur/GctZ27nHfpt9THOdRZNgyJ9FZchYO1ceg5S8Q3DNLCKYy44nCZzgCJgcvx2UM8czmqak5BCxJMrq37lA== dependencies: minipass "^2.2.1" mkdirp@0.5.1, mkdirp@^0.5.0, mkdirp@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= dependencies: minimist "0.0.8" mocha@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/mocha/-/mocha-5.2.0.tgz#6d8ae508f59167f940f2b5b3c4a612ae50c90ae6" + integrity sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ== dependencies: browser-stdout "1.3.1" commander "2.15.1" @@ -1020,18 +1242,22 @@ mocha@^5.2.0: moment@2.x.x: version "2.22.2" resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.2.tgz#3c257f9839fc0e93ff53149632239eb90783ff66" + integrity sha1-PCV/mDn8DpP/UxSWMiOeuQeD/2Y= ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= nan@~2.10.0: version "2.10.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.10.0.tgz#96d0cd610ebd58d4b4de9cc0c6828cda99c7548f" + integrity sha512-bAdJv7fBLhWC+/Bls0Oza+mvTaNQtP+1RyhhhvD95pgUJz6XM5IzgmxOkItJ9tkoCiplvAnXI1tNmmUD/eScyA== needle@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/needle/-/needle-2.2.1.tgz#b5e325bd3aae8c2678902fa296f729455d1d3a7d" + integrity sha512-t/ZswCM9JTWjAdXS9VpvqhI2Ct2sL2MdY4fUXqGJaGBk13ge99ObqRksRTbBE56K+wxUXwwfZYOuZHifFW9q+Q== dependencies: debug "^2.1.2" iconv-lite "^0.4.4" @@ -1040,16 +1266,19 @@ needle@^2.2.1: negotiator@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" + integrity sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk= node-nvidia-smi@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/node-nvidia-smi/-/node-nvidia-smi-1.0.0.tgz#6aa57574540b2bed91c9a80218516ffa686e5ac7" + integrity sha1-aqV1dFQLK+2RyagCGFFv+mhuWsc= dependencies: xml2js "^0.4.17" node-pre-gyp@^0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.10.3.tgz#3070040716afdc778747b61b6887bf78880b80fc" + integrity sha512-d1xFs+C/IPS8Id0qPTZ4bUT8wWryfR/OzzAFxweG+uLN85oPzyo2Iw6bVlLQ/JOdgNonXLCoRyqDzDWq4iw72A== dependencies: detect-libc "^1.0.2" mkdirp "^0.5.1" @@ -1065,10 +1294,27 @@ node-pre-gyp@^0.10.3: node-version@^1.0.0: version "1.2.0" resolved "https://registry.yarnpkg.com/node-version/-/node-version-1.2.0.tgz#34fde3ffa8e1149bd323983479dda620e1b5060d" + integrity sha512-ma6oU4Sk0qOoKEAymVoTvk8EdXEobdS7m/mAGhDJ8Rouugho48crHBORAmy5BoOcv8wraPM6xumapQp5hl4iIQ== + +node.extend@1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/node.extend/-/node.extend-1.0.8.tgz#bab04379f7383f4587990c9df07b6a7f65db772b" + integrity sha1-urBDefc4P0WHmQyd8Htqf2Xbdys= + dependencies: + is "~0.2.6" + object-keys "~0.4.0" + +node.flow@1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/node.flow/-/node.flow-1.2.3.tgz#e1c44a82aeca8d78b458a77fb3dc642f2eba2649" + integrity sha1-4cRKgq7KjXi0WKd/s9xkLy66Jkk= + dependencies: + node.extend "1.0.8" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" + integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" @@ -1076,10 +1322,12 @@ nopt@^4.0.1: npm-bundled@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.3.tgz#7e71703d973af3370a9591bafe3a63aca0be2308" + integrity sha512-ByQ3oJ/5ETLyglU2+8dBObvhfWXX8dtPZDMePCahptliFX2iIuhyEszyFk401PZUNQH20vvdW5MLjJxkwU80Ow== npm-packlist@^1.1.6: version "1.1.11" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.1.11.tgz#84e8c683cbe7867d34b1d357d893ce29e28a02de" + integrity sha512-CxKlZ24urLkJk+9kCm48RTQ7L4hsmgSVzEk0TLGPzzyuFxD7VNgy5Sl24tOLMzQv773a/NeJ1ce1DKeacqffEA== dependencies: ignore-walk "^3.0.1" npm-bundled "^1.0.1" @@ -1087,6 +1335,7 @@ npm-packlist@^1.1.6: npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" + integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" @@ -1096,38 +1345,56 @@ npmlog@^4.0.2: number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= oauth-sign@~0.8.2: version "0.8.2" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" + integrity sha1-Rqarfwrq2N6unsBWV4C31O/rnUM= + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.0.1, object-assign@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-keys@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336" + integrity sha1-KKaq50KN0sOpLz2V8hM13SBOAzY= on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" + integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= dependencies: ee-first "1.1.1" once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" + integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" @@ -1135,62 +1402,76 @@ osenv@^0.1.4: parent-module@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-0.1.0.tgz#b5292863a1e8c476ecf857e7d75c98920b24b8a6" + integrity sha1-tSkoY6HoxHbs+Ffn11yYkgskuKY= dependencies: callsites "^1.0.0" parseurl@~1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.2.tgz#fc289d4ed8993119460c156253262cdc8de65bf3" + integrity sha1-/CidTtiZMRlGDBViUyYs3I3mW/M= path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" + integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-parse@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" + integrity sha1-PBrfhx6pzWyUMbbqK9dKD/BVxME= path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= pathval@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0" + integrity sha1-uULm1L3mUwBe9rcTYd74cn0GReA= performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= process-nextick-args@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa" + integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw== promise-polyfill@^6.0.1: version "6.1.0" resolved "https://registry.yarnpkg.com/promise-polyfill/-/promise-polyfill-6.1.0.tgz#dfa96943ea9c121fca4de9b5868cb39d3472e057" + integrity sha1-36lpQ+qcEh/KTem1hoyznTRy4Fc= proxy-addr@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.3.tgz#355f262505a621646b3130a728eb647e22055341" + integrity sha512-jQTChiCJteusULxjBp8+jftSQE5Obdl3k4cnmLA6WXtK6XFuWRnvVL7aCiBqaLPM8c4ph0S4tKna8XvmIwEnXQ== dependencies: forwarded "~0.1.2" ipaddr.js "1.6.0" @@ -1198,26 +1479,37 @@ proxy-addr@~2.0.3: pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= + +psl@^1.1.24: + version "1.1.29" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.1.29.tgz#60f580d360170bb722a797cc704411e6da850c67" + integrity sha512-AeUmQ0oLN02flVHXWh9sSJF7mcdFq0ppid/JkErufc3hGIV/AMa8Fo9VgDo/cT2jFdOWoFvHp90qqBH54W+gjQ== punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= qs@6.5.1: version "6.5.1" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8" + integrity sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A== -qs@~6.5.1: +qs@~6.5.1, qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== range-parser@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e" + integrity sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4= raw-body@2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.3.2.tgz#bcd60c77d3eb93cde0050295c3f379389bc88f89" + integrity sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k= dependencies: bytes "3.0.0" http-errors "1.6.2" @@ -1227,6 +1519,7 @@ raw-body@2.3.2: rc@^1.2.7: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: deep-extend "^0.6.0" ini "~1.3.0" @@ -1236,6 +1529,7 @@ rc@^1.2.7: readable-stream@^2.0.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" + integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" @@ -1248,10 +1542,38 @@ readable-stream@^2.0.6: reflect-metadata@^0.1.10: version "0.1.12" resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.12.tgz#311bf0c6b63cd782f228a81abe146a2bfa9c56f2" + integrity sha512-n+IyV+nGz3+0q3/Yf1ra12KpCyi001bi4XFxSjbiWWjfqb52iTTtpGXmCCAOWWIAn9KEuFZKGqBERHmrtScZ3A== + +request@^2.74.0: + version "2.88.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" + integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.0" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.4.3" + tunnel-agent "^0.6.0" + uuid "^3.3.2" request@^2.87.0: version "2.87.0" resolved "https://registry.yarnpkg.com/request/-/request-2.87.0.tgz#32f00235cd08d482b4d0d68db93a829c0ed5756e" + integrity sha512-fcogkm7Az5bsS6Sl0sibkbhcKsnyon/jV1kF3ajGmF0c8HrttdKTPRT9hieOaQHA5HEq6r8OyWOo/o781C1tNw== dependencies: aws-sign2 "~0.7.0" aws4 "^1.6.0" @@ -1277,6 +1599,7 @@ request@^2.87.0: require-glob@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/require-glob/-/require-glob-3.2.0.tgz#90bfe2c8efb4b9f972eb9a3f5e580832e04f64d3" + integrity sha1-kL/iyO+0ufly65o/XlgIMuBPZNM= dependencies: glob-parent "^3.0.0" globby "^6.0.0" @@ -1285,42 +1608,58 @@ require-glob@^3.2.0: resolve@^1.3.2: version "1.8.1" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.8.1.tgz#82f1ec19a423ac1fbd080b0bab06ba36e84a7a26" + integrity sha512-AicPrAC7Qu1JxPCZ9ZgCZlY35QgFnNqc+0LtbRNxnVw4TXvjQ72wnuL9JQcEBgXkI9JM8MsT9kaQoHcpCRJOYA== dependencies: path-parse "^1.0.5" rimraf@^2.6.1: version "2.6.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" + integrity sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w== dependencies: glob "^7.0.5" +rmdir@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/rmdir/-/rmdir-1.2.0.tgz#4fe0357cb06168c258e73e968093dc4e8a0f3253" + integrity sha1-T+A1fLBhaMJY5z6WgJPcTooPMlM= + dependencies: + node.flow "1.2.3" + rx@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" + integrity sha1-pfE/957zt0D+MKqAP7CfmIBdR4I= safe-buffer@5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" + integrity sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg== safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== "safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@>=0.6.0, sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== semver@^5.1.0, semver@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab" + integrity sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA== send@0.16.2: version "0.16.2" resolved "https://registry.yarnpkg.com/send/-/send-0.16.2.tgz#6ecca1e0f8c156d141597559848df64730a6bbc1" + integrity sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw== dependencies: debug "2.6.9" depd "~1.1.2" @@ -1339,6 +1678,7 @@ send@0.16.2: serve-static@1.13.2: version "1.13.2" resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.13.2.tgz#095e8472fd5b46237db50ce486a43f4b86c6cec1" + integrity sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" @@ -1348,22 +1688,27 @@ serve-static@1.13.2: set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= setprototypeof@1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04" + integrity sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ= setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= source-map-support@^0.5.6: version "0.5.6" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.6.tgz#4435cee46b1aab62b8e8610ce60f788091c51c13" + integrity sha512-N4KXEz7jcKqPf2b2vZF11lQIz9W5ZMuUcIOGj243lduidkf2fjkVKJS9vNxVWn3u/uxX38AcE8U9nnH9FPcq+g== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" @@ -1371,14 +1716,17 @@ source-map-support@^0.5.6: source-map@^0.6.0: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sqlite3@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/sqlite3/-/sqlite3-4.0.2.tgz#1bbeb68b03ead5d499e42a3a1b140064791c5a64" + integrity sha512-51ferIRwYOhzUEtogqOa/y9supADlAht98bF/gbIi6WkzRJX6Yioldxbzj1MV4yV+LgdKD/kkHwFTeFXOG4htA== dependencies: nan "~2.10.0" node-pre-gyp "^0.10.3" @@ -1387,6 +1735,7 @@ sqlite3@^4.0.2: ssh2-streams@~0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/ssh2-streams/-/ssh2-streams-0.2.1.tgz#9c9c9964be60e9644575af328677f64b1e5cbd79" + integrity sha512-3zCOsmunh1JWgPshfhKmBCL3lUtHPoh+a/cyQ49Ft0Q0aF7xgN06b76L+oKtFi0fgO57FLjFztb1GlJcEZ4a3Q== dependencies: asn1 "~0.2.0" semver "^5.1.0" @@ -1395,12 +1744,14 @@ ssh2-streams@~0.2.0: ssh2@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-0.6.1.tgz#5dde1a7394bb978b1f9c2f014affee2f5493bd40" + integrity sha512-fNvocq+xetsaAZtBG/9Vhh0GDjw1jQeW7Uq/DPh4fVrJd0XxSfXAqBjOGVk4o2jyWHvyC6HiaPFpfHlR12coDw== dependencies: ssh2-streams "~0.2.0" sshpk@^1.7.0: version "1.14.2" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.14.2.tgz#c6fc61648a3d9c4e764fd3fcdf4ea105e492ba98" + integrity sha1-xvxhZIo9nE52T9P8306hBeSSupg= dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" @@ -1416,22 +1767,27 @@ sshpk@^1.7.0: "statuses@>= 1.3.1 < 2", "statuses@>= 1.4.0 < 2": version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= statuses@~1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.4.0.tgz#bb73d446da2796106efcc1b601a253d6c46bd087" + integrity sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew== stream-buffers@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/stream-buffers/-/stream-buffers-3.0.2.tgz#5249005a8d5c2d00b3a32e6e0a6ea209dc4f3521" + integrity sha512-DQi1h8VEBA/lURbSwFtEHnSTb9s2/pwLEaFuNhXwy1Dx3Sa0lOuYT2yNUr4/j2fs8oCAMANtrZ5OrPZtyVs3MQ== streamsearch@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a" + integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo= string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" @@ -1440,6 +1796,7 @@ string-width@^1.0.1: "string-width@^1.0.2 || 2": version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" @@ -1447,42 +1804,50 @@ string-width@^1.0.1: string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= supports-color@5.4.0, supports-color@^5.3.0: version "5.4.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54" + integrity sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w== dependencies: has-flag "^3.0.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= tail-stream@^0.3.4: version "0.3.4" resolved "https://registry.yarnpkg.com/tail-stream/-/tail-stream-0.3.4.tgz#bc675a20e92732b1a6a7cc65d6be66f7817fd5c1" + integrity sha1-vGdaIOknMrGmp8xl1r5m94F/1cE= tar@^4: version "4.4.4" resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.4.tgz#ec8409fae9f665a4355cc3b4087d0820232bb8cd" + integrity sha512-mq9ixIYfNF9SK0IS/h2HKMu8Q2iaCuhDDsZhdEag/FHv8fOaYld4vN7ouMgcSSt5WKZzPs8atclTcJm36OTh4w== dependencies: chownr "^1.0.1" fs-minipass "^1.2.5" @@ -1495,32 +1860,46 @@ tar@^4: tmp@^0.0.33: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== dependencies: os-tmpdir "~1.0.2" topo@1.x.x: version "1.1.0" resolved "https://registry.yarnpkg.com/topo/-/topo-1.1.0.tgz#e9d751615d1bb87dc865db182fa1ca0a5ef536d5" + integrity sha1-6ddRYV0buH3IZdsYL6HKCl71NtU= dependencies: hoek "2.x.x" tough-cookie@~2.3.3: version "2.3.4" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.4.tgz#ec60cee38ac675063ffc97a5c18970578ee83655" + integrity sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA== + dependencies: + punycode "^1.4.1" + +tough-cookie@~2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" + integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: + psl "^1.1.24" punycode "^1.4.1" tree-kill@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.0.tgz#5846786237b4239014f05db156b643212d4c6f36" + integrity sha512-DlX6dR0lOIRDFxI0mjL9IYg6OTncLm/Zt+JiBhE5OlFcAR8yc9S7FFXU9so0oda47frdM/JFsk7UjNt9vscKcg== ts-deferred@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/ts-deferred/-/ts-deferred-1.0.4.tgz#58145ebaeef5b8f2a290b8cec3d060839f9489c7" + integrity sha1-WBReuu71uPKikLjOw9Bgg5+Uicc= ts-node@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-7.0.0.tgz#a94a13c75e5e1aa6b82814b84c68deb339ba7bff" + integrity sha512-klJsfswHP0FuOLsvBZ/zzCfUvakOSSxds78mVeK7I+qP76YWtxf16hEZsp3U+b0kIo82R5UatGFeblYMqabb2Q== dependencies: arrify "^1.0.0" buffer-from "^1.1.0" @@ -1534,16 +1913,19 @@ ts-node@^7.0.0: tslib@^1.8.0, tslib@^1.8.1: version "1.9.3" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" + integrity sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ== tslint-microsoft-contrib@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/tslint-microsoft-contrib/-/tslint-microsoft-contrib-5.1.0.tgz#777c32d51aba16f4565e47aac749a1631176cd9f" + integrity sha512-p7xN6cN6y2REFT/11Xl4OAPdhPLHcsZk2IfA8rFS9wi3hhkY6Shz+yoJ61Z+GJ8L4TsRhIbG/09w3e1sdOHs9g== dependencies: tsutils "^2.12.1" tslint@^5.11.0: version "5.11.0" resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.11.0.tgz#98f30c02eae3cde7006201e4c33cb08b48581eed" + integrity sha1-mPMMAurjzecAYgHkwzywi0hYHu0= dependencies: babel-code-frame "^6.22.0" builtin-modules "^1.1.1" @@ -1561,26 +1943,31 @@ tslint@^5.11.0: tsutils@^2.12.1, tsutils@^2.27.2: version "2.29.0" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.29.0.tgz#32b488501467acbedd4b85498673a0812aca0b99" + integrity sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA== dependencies: tslib "^1.8.1" tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-detect@^4.0.0: version "4.0.8" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== type-is@~1.6.15, type-is@~1.6.16: version "1.6.16" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.16.tgz#f89ce341541c672b25ee7ae3c73dee3b2be50194" + integrity sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q== dependencies: media-typer "0.3.0" mime-types "~2.1.18" @@ -1588,6 +1975,7 @@ type-is@~1.6.15, type-is@~1.6.16: typescript-ioc@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/typescript-ioc/-/typescript-ioc-1.2.4.tgz#21290097b163632de58a3abba7553daef8651f49" + integrity sha512-KO+isZO1tmhgKL5RWMU+AZvFGzyk0LnUMBcSLVm2Xo/iZlIyu/HD2o5vdg5kXJTJMs8otbDzOUsPt8/JFr96cw== dependencies: reflect-metadata "^0.1.10" require-glob "^3.2.0" @@ -1595,58 +1983,79 @@ typescript-ioc@^1.2.4: typescript-string-operations@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/typescript-string-operations/-/typescript-string-operations-1.3.1.tgz#461b886cc9ccd4dd16810b1f248b2e6f6580956b" + integrity sha512-DsT4kq8k3WT48EhdI/6DanReYGbX4Wg18z8vSeHH2wMfSFqdjiI40jrVABDH2WZ1RhCt7WoN/iY+LPhxrUHCqw== typescript@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.0.1.tgz#43738f29585d3a87575520a4b93ab6026ef11fdb" + integrity sha512-zQIMOmC+372pC/CCVLqnQ0zSBiY7HHodU7mpQdjiZddek4GMj31I3dUJ7gAs9o65X7mnRma6OokOkc6f9jjfBg== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= -uuid@^3.1.0: +uuid@^3.1.0, uuid@^3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" + integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: assert-plus "^1.0.0" core-util-is "1.0.2" extsprintf "^1.2.0" +webhdfs@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/webhdfs/-/webhdfs-1.2.0.tgz#c41b08ae33944a0220863bfd4b6719b9aaec1d37" + integrity sha512-h8D/NT7ruDMuGCdJNEJHJh8vDTEtZ5hBL+eRzXTq/INTd92LKOhsTCwlQI+8kTt79qPZq5O8ev7j/Y19VeYCHQ== + dependencies: + buffer-stream-reader "^0.1.1" + extend "^3.0.0" + request "^2.74.0" + which@^1.2.9: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" + integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= xml2js@^0.4.17: version "0.4.19" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7" + integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q== dependencies: sax ">=0.6.0" xmlbuilder "~9.0.1" @@ -1654,15 +2063,19 @@ xml2js@^0.4.17: xmlbuilder@~9.0.1: version "9.0.7" resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d" + integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0= yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= yallist@^3.0.0, yallist@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.2.tgz#8452b4bb7e83c7c188d8041c1a837c773d6d8bb9" + integrity sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k= yn@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/yn/-/yn-2.0.0.tgz#e5adabc8acf408f6385fc76495684c88e6af689a" + integrity sha1-5a2ryKz0CPY4X8dklWhMiOavaJo= diff --git a/src/sdk/pynni/nni/README.md b/src/sdk/pynni/nni/README.md index 8c0c41278c..bfb752b7f1 100644 --- a/src/sdk/pynni/nni/README.md +++ b/src/sdk/pynni/nni/README.md @@ -1,20 +1,21 @@ -# How to use Tuner that NNI support? +# How to use Tuner that NNI supports? -For now, NNI could support tuner algorithm as following: +For now, NNI has supported the following tuner algorithms. Note that NNI installation only installs a subset of those algorithms, other algorithms should be installed through `nnictl package install` before you use them. For example, for SMAC the installation command is `nnictl package install --name=SMAC`. - TPE - Random Search - Anneal - Naive Evolution - - ENAS (on going) + - SMAC (to install through `nnictl`) + - ENAS (ongoing) + - Batch (ongoing) - - **1. Tuner algorithm introduction** + ## 1. Tuner algorithm introduction We will introduce some basic knowledge about tuner algorithm here. If you are an expert, you could skip this part and jump to how to use. -*1.1 TPE* +**TPE** The Tree-structured Parzen Estimator (TPE) is a sequential model-based optimization (SMBO) approach. SMBO methods sequentially construct models to approximate the performance of hyperparameters based on historical measurements, and then subsequently choose new hyperparameters to test based on this model. @@ -22,20 +23,31 @@ The TPE approach models P(x|y) and P(y) where x represents hyperparameters and y Comparing with other algorithm, TPE could be achieve better result when the number of trial experiment is small. Also TPE support continuous or discrete hyper-parameters. From a large amount of experiments, we could found that TPE is far better than Random Search. -*1.2 Random Search* +**Random Search** In [Random Search for Hyper-Parameter Optimization][2] show that Random Search might be surprsingly simple and effective. We suggests that we could use Random Search as basline when we have no knowledge about the prior distribution of hyper-parameters. -*1.3 Anneal* +**Anneal** -*1.4 Naive Evolution* +**Naive Evolution** + Naive Evolution comes from [Large-Scale Evolution of Image Classifiers][3]. Naive Evolution requir more experiments to works, but it's very simple and easily to expand new features. There are some tips for user: 1) large initial population could avoid to fall into local optimum 2) use some strategies to keep the deversity of population could be better. +**SMAC** + +[SMAC][4] is based on Sequential Model-Based Optimization (SMBO). It adapts the most prominent previously used model class (Gaussian stochastic process models) and introduces the model class of random forests to SMBO, in order to handle categorical parameters. The SMAC supported by nni is a wrapper on [the SMAC3 github repo][5]. + +Note that SMAC only supports a subset of the types in [search space spec](../../../../docs/SearchSpaceSpec.md), including `choice`, `randint`, `uniform`, `loguniform`, `quniform(q=1)`. + +**Batch** + +Batch allows users to simply provide several configurations (i.e., choices of hyper-parameters) for their trial code. After finishing all the configurations, the experiment is done. + - **2. How to use the tuner algorithm in NNI?** + ## 2. How to use the tuner algorithm in NNI? User only need to do one thing: choose a Tuner```config.yaml```. Here is an example: @@ -61,4 +73,6 @@ There are two filed you need to set: [1]: https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf [2]: http://www.jmlr.org/papers/volume13/bergstra12a/bergstra12a.pdf - [3]: https://arxiv.org/pdf/1703.01041.pdf \ No newline at end of file + [3]: https://arxiv.org/pdf/1703.01041.pdf + [4]: https://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf + [5]: https://github.com/automl/SMAC3 diff --git a/src/sdk/pynni/nni/__main__.py b/src/sdk/pynni/nni/__main__.py index e3a39bac96..206cd1f5c1 100644 --- a/src/sdk/pynni/nni/__main__.py +++ b/src/sdk/pynni/nni/__main__.py @@ -27,28 +27,39 @@ import json import importlib +from .constants import ModuleName, ClassName, ClassArgs from nni.msg_dispatcher import MsgDispatcher -from nni.hyperopt_tuner.hyperopt_tuner import HyperoptTuner -from nni.evolution_tuner.evolution_tuner import EvolutionTuner -from nni.batch_tuner.batch_tuner import BatchTuner -from nni.medianstop_assessor.medianstop_assessor import MedianstopAssessor logger = logging.getLogger('nni.main') logger.debug('START') -BUILT_IN_CLASS_NAMES = ['HyperoptTuner', 'EvolutionTuner', 'BatchTuner', 'MedianstopAssessor'] +def augment_classargs(input_class_args, classname): + if classname in ClassArgs: + for key, value in ClassArgs[classname].items(): + if key not in input_class_args: + input_class_args[key] = value + return input_class_args def create_builtin_class_instance(classname, jsonstr_args): + if classname not in ModuleName or \ + importlib.util.find_spec(ModuleName[classname]) is None: + raise RuntimeError('Tuner module is not found: {}'.format(classname)) + class_module = importlib.import_module(ModuleName[classname]) + class_constructor = getattr(class_module, ClassName[classname]) if jsonstr_args: - class_args = json.loads(jsonstr_args) - instance = eval(classname)(**class_args) + class_args = augment_classargs(json.loads(jsonstr_args), classname) + else: + class_args = augment_classargs({}, classname) + if class_args: + instance = class_constructor(**class_args) else: - instance = eval(classname)() + instance = class_constructor() return instance def create_customized_class_instance(class_dir, class_filename, classname, jsonstr_args): if not os.path.isfile(os.path.join(class_dir, class_filename)): - raise ValueError('Class file not found: {}'.format(os.path.join(class_dir, class_filename))) + raise ValueError('Class file not found: {}'.format( + os.path.join(class_dir, class_filename))) sys.path.append(class_dir) module_name = class_filename.split('.')[0] class_module = importlib.import_module(module_name) @@ -64,12 +75,12 @@ def parse_args(): parser = argparse.ArgumentParser(description='parse command line parameters.') parser.add_argument('--tuner_class_name', type=str, required=True, help='Tuner class name, the class must be a subclass of nni.Tuner') + parser.add_argument('--tuner_class_filename', type=str, required=False, + help='Tuner class file path') parser.add_argument('--tuner_args', type=str, required=False, help='Parameters pass to tuner __init__ constructor') parser.add_argument('--tuner_directory', type=str, required=False, help='Tuner directory') - parser.add_argument('--tuner_class_filename', type=str, required=False, - help='Tuner class file path') parser.add_argument('--assessor_class_name', type=str, required=False, help='Assessor class name, the class must be a subclass of nni.Assessor') @@ -93,23 +104,34 @@ def main(): tuner = None assessor = None - if args.tuner_class_name is None: - raise ValueError('Tuner must be specified') - if args.tuner_class_name in BUILT_IN_CLASS_NAMES: - tuner = create_builtin_class_instance(args.tuner_class_name, args.tuner_args) + if args.tuner_class_name in ModuleName: + tuner = create_builtin_class_instance( + args.tuner_class_name, + args.tuner_args) else: - tuner = create_customized_class_instance(args.tuner_directory, args.tuner_class_filename, args.tuner_class_name, args.tuner_args) - - if args.assessor_class_name: - if args.assessor_class_name in BUILT_IN_CLASS_NAMES: - assessor = create_builtin_class_instance(args.assessor_class_name, args.assessor_args) - else: - assessor = create_customized_class_instance(args.assessor_directory, \ - args.assessor_class_filename, args.assessor_class_name, args.assessor_args) + tuner = create_customized_class_instance( + args.tuner_directory, + args.tuner_class_filename, + args.tuner_class_name, + args.tuner_args) if tuner is None: raise AssertionError('Failed to create Tuner instance') + if args.assessor_class_name: + if args.assessor_class_name in ModuleName: + assessor = create_builtin_class_instance( + args.assessor_class_name, + args.assessor_args) + else: + assessor = create_customized_class_instance( + args.assessor_directory, + args.assessor_class_filename, + args.assessor_class_name, + args.assessor_args) + if assessor is None: + raise AssertionError('Failed to create Assessor instance') + dispatcher = MsgDispatcher(tuner, assessor) try: diff --git a/src/sdk/pynni/nni/common.py b/src/sdk/pynni/nni/common.py index 644f13d15b..79ee214aa2 100644 --- a/src/sdk/pynni/nni/common.py +++ b/src/sdk/pynni/nni/common.py @@ -20,6 +20,7 @@ from collections import namedtuple +from datetime import datetime from io import TextIOBase import logging import os @@ -39,13 +40,16 @@ def _load_env_args(): '''Arguments passed from environment''' -class _LoggerFile(TextIOBase): - def __init__(self, logger): - self.logger = logger +_time_format = '%Y-%m-%d %H:%M:%S' +class _LoggerFileWrapper(TextIOBase): + def __init__(self, logger_file): + self.file = logger_file def write(self, s): - if s != '\n': # ignore line break, since logger will add it - self.logger.info(s) + if s != '\n': + time = datetime.now().strftime(_time_format) + self.file.write('[{}] PRINT '.format(time) + s + '\n') + self.file.flush() return len(s) @@ -58,12 +62,12 @@ def init_logger(logger_file_path): logger_file_path = 'unittest.log' elif env_args.log_dir is not None: logger_file_path = os.path.join(env_args.log_dir, logger_file_path) + logger_file = open(logger_file_path, 'w') fmt = '[%(asctime)s] %(levelname)s (%(name)s) %(message)s' - datefmt = '%Y-%m-%d %H:%M:%S' - formatter = logging.Formatter(fmt, datefmt) + formatter = logging.Formatter(fmt, _time_format) - handler = logging.FileHandler(logger_file_path) + handler = logging.StreamHandler(logger_file) handler.setFormatter(formatter) root_logger = logging.getLogger() @@ -73,4 +77,4 @@ def init_logger(logger_file_path): # these modules are too verbose logging.getLogger('matplotlib').setLevel(logging.INFO) - sys.stdout = _LoggerFile(logging.getLogger('print')) + sys.stdout = _LoggerFileWrapper(logger_file) diff --git a/src/sdk/pynni/nni/constants.py b/src/sdk/pynni/nni/constants.py new file mode 100644 index 0000000000..cf611bebfd --- /dev/null +++ b/src/sdk/pynni/nni/constants.py @@ -0,0 +1,51 @@ +# Copyright (c) Microsoft Corporation +# All rights reserved. +# +# MIT License +# +# Permission is hereby granted, free of charge, +# to any person obtaining a copy of this software and associated +# documentation files (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and +# to permit persons to whom the Software is furnished to do so, subject to the following conditions: +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING +# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +ModuleName = { + 'TPE': 'nni.hyperopt_tuner.hyperopt_tuner', + 'Random': 'nni.hyperopt_tuner.hyperopt_tuner', + 'Anneal': 'nni.hyperopt_tuner.hyperopt_tuner', + 'Evolution': 'nni.evolution_tuner.evolution_tuner', + 'SMAC': 'nni.smac_tuner.smac_tuner', + + 'Medianstop': 'nni.medianstop_assessor.medianstop_assessor' +} + +ClassName = { + 'TPE': 'HyperoptTuner', + 'Random': 'HyperoptTuner', + 'Anneal': 'HyperoptTuner', + 'Evolution': 'EvolutionTuner', + 'SMAC': 'SMACTuner', + + 'Medianstop': 'MedianstopAssessor' +} + +ClassArgs = { + 'TPE': { + 'algorithm_name': 'tpe' + }, + 'Random': { + 'algorithm_name': 'random_search' + }, + 'Anneal': { + 'algorithm_name': 'anneal' + } +} diff --git a/src/sdk/pynni/nni/platform/__init__.py b/src/sdk/pynni/nni/platform/__init__.py index e0b44e49cb..fed452fc47 100644 --- a/src/sdk/pynni/nni/platform/__init__.py +++ b/src/sdk/pynni/nni/platform/__init__.py @@ -27,7 +27,7 @@ from .standalone import * elif env_args.platform == 'unittest': from .test import * -elif env_args.platform in ('local', 'remote'): +elif env_args.platform in ('local', 'remote', 'pai'): from .local import * else: raise RuntimeError('Unknown platform %s' % env_args.platform) diff --git a/src/sdk/pynni/nni/platform/local.py b/src/sdk/pynni/nni/platform/local.py index 3dda9c4c57..7a9df82971 100644 --- a/src/sdk/pynni/nni/platform/local.py +++ b/src/sdk/pynni/nni/platform/local.py @@ -24,16 +24,20 @@ from ..common import init_logger - -_dir = os.environ['NNI_SYS_DIR'] -_metric_file = open(os.path.join(_dir, '.nni', 'metrics'), 'wb') - -_log_file_path = os.path.join(_dir, 'trial.log') +_sysdir = os.environ['NNI_SYS_DIR'] +if not os.path.exists(os.path.join(_sysdir, '.nni')): + os.makedirs(os.path.join(_sysdir, '.nni')) +_metric_file = open(os.path.join(_sysdir, '.nni', 'metrics'), 'wb') + +_outputdir = os.environ['NNI_OUTPUT_DIR'] +if not os.path.exists(_outputdir): + os.makedirs(_outputdir) +_log_file_path = os.path.join(_outputdir, 'trial.log') init_logger(_log_file_path) def get_parameters(): - params_file = open(os.path.join(_dir, 'parameter.cfg'), 'r') + params_file = open(os.path.join(_sysdir, 'parameter.cfg'), 'r') return json.load(params_file) def send_metric(string): diff --git a/src/sdk/pynni/nni/smac_tuner/README.md b/src/sdk/pynni/nni/smac_tuner/README.md new file mode 100644 index 0000000000..a1a8b37190 --- /dev/null +++ b/src/sdk/pynni/nni/smac_tuner/README.md @@ -0,0 +1 @@ +# Integration doc: SMAC on nni \ No newline at end of file diff --git a/src/sdk/pynni/nni/smac_tuner/__init__.py b/src/sdk/pynni/nni/smac_tuner/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/sdk/pynni/nni/smac_tuner/convert_ss_to_scenario.py b/src/sdk/pynni/nni/smac_tuner/convert_ss_to_scenario.py new file mode 100644 index 0000000000..578f8faf43 --- /dev/null +++ b/src/sdk/pynni/nni/smac_tuner/convert_ss_to_scenario.py @@ -0,0 +1,122 @@ +# Copyright (c) Microsoft Corporation +# All rights reserved. +# +# MIT License +# +# Permission is hereby granted, free of charge, +# to any person obtaining a copy of this software and associated +# documentation files (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and +# to permit persons to whom the Software is furnished to do so, subject to the following conditions: +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING +# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import os +import json + +def get_json_content(file_path): + '''Load json file content''' + try: + with open(file_path, 'r') as file: + return json.load(file) + except TypeError as err: + print('Error: ', err) + return None + +def generate_pcs(nni_search_space_content): + ''' + # parameter_name categorical {value_1, ..., value_N} [default value] + # parameter_name ordinal {value_1, ..., value_N} [default value] + # parameter_name integer [min_value, max_value] [default value] + # parameter_name integer [min_value, max_value] [default value] log + # parameter_name real [min_value, max_value] [default value] + # parameter_name real [min_value, max_value] [default value] log + # https://automl.github.io/SMAC3/stable/options.html + ''' + search_space = nni_search_space_content + with open('param_config_space.pcs', 'w') as pcs_fd: + if isinstance(search_space, dict): + for key in search_space.keys(): + if isinstance(search_space[key], dict): + try: + if search_space[key]['_type'] == 'choice': + pcs_fd.write('%s categorical {%s} [%s]\n' % ( + key, + json.dumps(search_space[key]['_value'])[1:-1], + json.dumps(search_space[key]['_value'][0]))) + elif search_space[key]['_type'] == 'randint': + # TODO: support lower bound in randint + pcs_fd.write('%s integer [0, %d] [%d]\n' % ( + key, + search_space[key]['_value'][0], + search_space[key]['_value'][0])) + elif search_space[key]['_type'] == 'uniform': + pcs_fd.write('%s real %s [%s]\n' % ( + key, + json.dumps(search_space[key]['_value']), + json.dumps(search_space[key]['_value'][0]))) + elif search_space[key]['_type'] == 'loguniform': + pcs_fd.write('%s real %s [%s] log\n' % ( + key, + json.dumps(search_space[key]['_value']), + json.dumps(search_space[key]['_value'][0]))) + elif search_space[key]['_type'] == 'quniform' \ + and search_space[key]['_value'][2] == 1: + pcs_fd.write('%s integer [%d, %d] [%d]\n' % ( + key, + search_space[key]['_value'][0], + search_space[key]['_value'][1], + search_space[key]['_value'][0])) + else: + raise RuntimeError('unsupported _type %s' % search_space[key]['_type']) + except: + raise RuntimeError('_type or _value error.') + else: + raise RuntimeError('incorrect search space.') + +def generate_scenario(ss_content): + ''' + # deterministic, 1/0 + # output_dir, + # paramfile, + # run_obj, 'quality' + + # the following keys use default value or empty + # algo, not required by tuner, but required by nni's training service for running trials + # abort_on_first_run_crash, because trials reported to nni tuner would always in success state + # always_race_default, + # cost_for_crash, trials reported to nni tuner would always in success state + # cutoff_time, + # execdir, trials are executed by nni's training service + # feature_file, no features specified or feature file is not supported + # initial_incumbent, use default value + # input_psmac_dirs, parallelism is supported by nni + # instance_file, not supported + # intensification_percentage, not supported, trials are controlled by nni's training service and kill be assessor + # maxR, use default, 2000 + # minR, use default, 1 + # overall_obj, timeout is not supported + # shared_model, parallelism is supported by nni + # test_instance_file, instance is not supported + # tuner-timeout, not supported + # runcount_limit, default: inf., use default because this is controlled by nni + # wallclock_limit,default: inf., use default because this is controlled by nni + # please refer to https://automl.github.io/SMAC3/stable/options.html + ''' + with open('scenario.txt', 'w') as sce_fd: + sce_fd.write('deterministic = 0\n') + #sce_fd.write('output_dir = \n') + sce_fd.write('paramfile = param_config_space.pcs\n') + sce_fd.write('run_obj = quality\n') + + generate_pcs(ss_content) + +if __name__ == '__main__': + generate_scenario('search_space.json') diff --git a/src/sdk/pynni/nni/smac_tuner/requirements.txt b/src/sdk/pynni/nni/smac_tuner/requirements.txt new file mode 100644 index 0000000000..a3027fb6fe --- /dev/null +++ b/src/sdk/pynni/nni/smac_tuner/requirements.txt @@ -0,0 +1,2 @@ +git+https://github.com/QuanluZhang/ConfigSpace.git +git+https://github.com/QuanluZhang/SMAC3.git diff --git a/src/sdk/pynni/nni/smac_tuner/smac_tuner.py b/src/sdk/pynni/nni/smac_tuner/smac_tuner.py new file mode 100644 index 0000000000..36c14b330a --- /dev/null +++ b/src/sdk/pynni/nni/smac_tuner/smac_tuner.py @@ -0,0 +1,190 @@ +# Copyright (c) Microsoft Corporation +# All rights reserved. +# +# MIT License +# +# Permission is hereby granted, free of charge, +# to any person obtaining a copy of this software and associated +# documentation files (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and +# to permit persons to whom the Software is furnished to do so, subject to the following conditions: +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING +# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +''' +smac_tuner.py +''' + +from nni.tuner import Tuner + +import sys +import logging +import numpy as np +import json_tricks +from enum import Enum, unique +from .convert_ss_to_scenario import generate_scenario + +from smac.utils.io.cmd_reader import CMDReader +from smac.scenario.scenario import Scenario +from smac.facade.smac_facade import SMAC +from smac.facade.roar_facade import ROAR +from smac.facade.epils_facade import EPILS + +@unique +class OptimizeMode(Enum): + ''' + Oprimize Mode class + ''' + Minimize = 'minimize' + Maximize = 'maximize' + +class SMACTuner(Tuner): + def __init__(self, optimize_mode): + ''' + Constructor + ''' + self.logger = logging.getLogger( + self.__module__ + "." + self.__class__.__name__) + self.optimize_mode = OptimizeMode(optimize_mode) + self.total_data = {} + self.optimizer = None + self.smbo_solver = None + self.first_one = True + self.update_ss_done = False + + def _main_cli(self): + ''' + Main function of SMAC for CLI interface + ''' + self.logger.info("SMAC call: %s" % (" ".join(sys.argv))) + + cmd_reader = CMDReader() + args, _ = cmd_reader.read_cmd() + + root_logger = logging.getLogger() + root_logger.setLevel(args.verbose_level) + logger_handler = logging.StreamHandler( + stream=sys.stdout) + if root_logger.level >= logging.INFO: + formatter = logging.Formatter( + "%(levelname)s:\t%(message)s") + else: + formatter = logging.Formatter( + "%(asctime)s:%(levelname)s:%(name)s:%(message)s", + "%Y-%m-%d %H:%M:%S") + logger_handler.setFormatter(formatter) + root_logger.addHandler(logger_handler) + # remove default handler + root_logger.removeHandler(root_logger.handlers[0]) + + # Create defaults + rh = None + initial_configs = None + stats = None + incumbent = None + + # Create scenario-object + scen = Scenario(args.scenario_file, []) + + if args.mode == "SMAC": + optimizer = SMAC( + scenario=scen, + rng=np.random.RandomState(args.seed), + runhistory=rh, + initial_configurations=initial_configs, + stats=stats, + restore_incumbent=incumbent, + run_id=args.seed) + elif args.mode == "ROAR": + optimizer = ROAR( + scenario=scen, + rng=np.random.RandomState(args.seed), + runhistory=rh, + initial_configurations=initial_configs, + run_id=args.seed) + elif args.mode == "EPILS": + optimizer = EPILS( + scenario=scen, + rng=np.random.RandomState(args.seed), + runhistory=rh, + initial_configurations=initial_configs, + run_id=args.seed) + else: + optimizer = None + + return optimizer + + def update_search_space(self, search_space): + ''' + TODO: this is urgly, we put all the initialization work in this method, + because initialization relies on search space, also because update_search_space is called at the beginning. + NOTE: updating search space is not supported. + ''' + if not self.update_ss_done: + generate_scenario(search_space) + self.optimizer = self._main_cli() + self.smbo_solver = self.optimizer.solver + self.update_ss_done = True + else: + self.logger.warning('update search space is not supported.') + + def receive_trial_result(self, parameter_id, parameters, reward): + ''' + receive_trial_result + ''' + if self.optimize_mode is OptimizeMode.Maximize: + reward = -reward + + if parameter_id not in self.total_data: + raise RuntimeError('Received parameter_id not in total_data.') + if self.first_one: + self.smbo_solver.nni_smac_receive_first_run(self.total_data[parameter_id], reward) + self.first_one = False + else: + self.smbo_solver.nni_smac_receive_runs(self.total_data[parameter_id], reward) + + def generate_parameters(self, parameter_id): + ''' + generate one instance of hyperparameters + ''' + if self.first_one: + init_challenger = self.smbo_solver.nni_smac_start() + self.total_data[parameter_id] = init_challenger + json_tricks.dumps(init_challenger.get_dictionary()) + return init_challenger.get_dictionary() + else: + challengers = self.smbo_solver.nni_smac_request_challengers() + for challenger in challengers: + self.total_data[parameter_id] = challenger + json_tricks.dumps(challenger.get_dictionary()) + return challenger.get_dictionary() + + def generate_multiple_parameters(self, parameter_id_list): + ''' + generate mutiple instances of hyperparameters + ''' + if self.first_one: + params = [] + for one_id in parameter_id_list: + init_challenger = self.smbo_solver.nni_smac_start() + self.total_data[one_id] = init_challenger + json_tricks.dumps(init_challenger.get_dictionary()) + params.append(init_challenger.get_dictionary()) + else: + challengers = self.smbo_solver.nni_smac_request_challengers() + cnt = 0 + params = [] + for challenger in challengers: + if cnt >= len(parameter_id_list): + break + self.total_data[parameter_id_list[cnt]] = challenger + json_tricks.dumps(challenger.get_dictionary()) + params.append(challenger.get_dictionary()) + cnt += 1 + return params diff --git a/src/sdk/pynni/nni/smartparam.py b/src/sdk/pynni/nni/smartparam.py index 10eef92487..ca035be575 100644 --- a/src/sdk/pynni/nni/smartparam.py +++ b/src/sdk/pynni/nni/smartparam.py @@ -124,6 +124,6 @@ def _get_param(func, name): del frame # see official doc module = inspect.getmodulename(filename) if name is None: - name = '#{:d}'.format(lineno) + name = '__line{:d}'.format(lineno) key = '{}/{}/{}'.format(module, name, func) return trial.get_parameter(key) diff --git a/src/sdk/pynni/setup.py b/src/sdk/pynni/setup.py index a24758e9db..7962c225b8 100644 --- a/src/sdk/pynni/setup.py +++ b/src/sdk/pynni/setup.py @@ -27,7 +27,7 @@ def read(fname): setuptools.setup( name = 'nni', - version = '0.0.1', + version = '0.2.0', packages = setuptools.find_packages(exclude=['tests']), python_requires = '>=3.5', @@ -35,16 +35,17 @@ def read(fname): 'hyperopt', 'json_tricks', 'numpy', - 'scipy', + 'scipy' ], + package_data = {'nni': ['**/requirements.txt']}, test_suite = 'tests', author = 'Microsoft NNI Team', author_email = 'nni@microsoft.com', description = 'Python SDK for Neural Network Intelligence project', - license = 'MIT', - url = 'https://msrasrg.visualstudio.com/NeuralNetworkIntelligence', + license = 'MIT', + url = 'https://github.com/Microsoft/nni', long_description = read('README.md') ) diff --git a/src/sdk/pynni/tests/test_smartparam.py b/src/sdk/pynni/tests/test_smartparam.py index 4a971c842f..9625603213 100644 --- a/src/sdk/pynni/tests/test_smartparam.py +++ b/src/sdk/pynni/tests/test_smartparam.py @@ -33,9 +33,9 @@ class SmartParamTestCase(TestCase): def setUp(self): params = { 'test_smartparam/choice1/choice': 2, - 'test_smartparam/#{:d}/uniform'.format(lineno1): '5', + 'test_smartparam/__line{:d}/uniform'.format(lineno1): '5', 'test_smartparam/func/function_choice': 1, - 'test_smartparam/#{:d}/function_choice'.format(lineno2): 0 + 'test_smartparam/__line{:d}/function_choice'.format(lineno2): 0 } nni.trial._params = { 'parameter_id': 'test_trial', 'parameters': params } diff --git a/src/webui/README.md b/src/webui/README.md index 3ebf6d1fb4..ee7d0121f8 100644 --- a/src/webui/README.md +++ b/src/webui/README.md @@ -1,48 +1,37 @@ -# webui +# WebUI -NNI is a research platform for metalearning. It provides easy-to-use interface so that you could perform neural architecture search, hyperparameter optimization and optimizer design for your own problems and models. -Web UI allows user to monitor the status of the NNI system using a graphical interface. - -## Deployment - -### To start the webui - -> $ yarn -> $ yarn start - -## Usage - -### View summary page +## View summary page Click the tab "Overview". +* See the experiment parameters. * See good performance trial. * See search_space json. -### View job accuracy +## View job accuracy Click the tab "Optimization Progress" to see the point graph of all trials. Hover every point to see its specific accuracy. -### View hyper parameter +## View hyper parameter Click the tab "Hyper Parameter" to see the parallel graph. -* You can select the percentage to cut down some lines. +* You can select the percentage to see top trials. * Choose two axis to swap its positions -### View trial status +## View trial status Click the tab "Trial Status" to see the status of the all trials. Specifically: -* Running trial: running trial's duration in the bar graph. -* Trial detail: trial's id, trial's duration, start time, end time, status and accuracy. +* Trial duration: trial's duration in the bar graph. +* Trial detail: trial's id, trial's duration, start time, end time, status, accuracy and search space file. * Kill: you can kill a job that status is running. * Tensor: you can see a job in the tensorflow graph, it will link to the Tensorboard page. -### Control +## Control Click the tab "Control" to add a new trial or update the search_space file and some experiment parameters. -### View Tensorboard Graph - -Click the tab "Tensorboard" to see a job in the tensorflow graph. \ No newline at end of file +## Feedback + +[Known Issues](https://github.com/Microsoft/nni/issues). \ No newline at end of file diff --git a/src/webui/src/components/Para.tsx b/src/webui/src/components/Para.tsx index 32d099ad8e..2706db1d60 100644 --- a/src/webui/src/components/Para.tsx +++ b/src/webui/src/components/Para.tsx @@ -10,13 +10,6 @@ require('echarts/lib/component/title'); require('echarts/lib/component/visualMap'); require('../style/para.css'); -const chartMulineStyle = { - width: '100%', - height: 600, - margin: '0 auto', - padding: 15 -}; - interface Dimobj { dim: number; name: string; @@ -227,6 +220,22 @@ class Para extends React.Component<{}, ParaState> { const { visualValue } = this.state; let parallelAxis = dataObj.parallelAxis; let paralleData = dataObj.data; + const maxAccuracy = visualValue.maxAccuracy; + const minAccuracy = visualValue.minAccuracy; + let visualMapObj = {}; + if (maxAccuracy === minAccuracy) { + visualMapObj = { + type: 'continuous', + color: ['#fb7c7c', 'yellow', 'lightblue'] + }; + } else { + visualMapObj = { + type: 'continuous', + min: visualValue.minAccuracy, + max: visualValue.maxAccuracy, + color: ['#fb7c7c', 'yellow', 'lightblue'] + }; + } let optionown = { parallelAxis, tooltip: { @@ -252,13 +261,7 @@ class Para extends React.Component<{}, ParaState> { } } }, - visualMap: { - type: 'continuous', - min: visualValue.minAccuracy, - max: visualValue.maxAccuracy, - // gradient color - color: ['#fb7c7c', 'yellow', 'lightblue'] - }, + visualMap: visualMapObj, highlight: { type: 'highlight' }, @@ -375,6 +378,12 @@ class Para extends React.Component<{}, ParaState> { render() { const { option, paraNodata, dimName } = this.state; + const chartMulineStyle = { + width: '100%', + height: 600, + margin: '0 auto', + padding: 15 + }; return (
@@ -384,7 +393,7 @@ class Para extends React.Component<{}, ParaState> { top - + + + @@ -475,7 +431,6 @@ class Sessionpro extends React.Component<{}, SessionState> { dataSource={tableData} className="tables" bordered={true} - scroll={{ x: '100%', y: 540 }} />
diff --git a/src/webui/src/components/SlideBar.tsx b/src/webui/src/components/SlideBar.tsx index 0eb5538d50..2219491971 100644 --- a/src/webui/src/components/SlideBar.tsx +++ b/src/webui/src/components/SlideBar.tsx @@ -11,13 +11,13 @@ class SlideBar extends React.Component<{}, {}> {
  • - Overview + Overview
  • - Optimization Progress + Optimization Progress
  • @@ -39,6 +39,11 @@ class SlideBar extends React.Component<{}, {}> { +
  • + + Feedback + +
); diff --git a/src/webui/src/components/TrialStatus.tsx b/src/webui/src/components/TrialStatus.tsx index f6a889ebbe..d0e0980ee2 100644 --- a/src/webui/src/components/TrialStatus.tsx +++ b/src/webui/src/components/TrialStatus.tsx @@ -12,6 +12,7 @@ require('echarts/lib/chart/scatter'); require('echarts/lib/component/tooltip'); require('echarts/lib/component/title'); require('../style/trialStatus.css'); +require('../style/logPath.css'); echarts.registerTheme('my_theme', { color: '#3c8dbc' }); @@ -19,6 +20,7 @@ echarts.registerTheme('my_theme', { interface DescObj { parameters: Object; logPath?: string; + isLink?: boolean; } interface TableObj { @@ -84,22 +86,17 @@ class TrialStatus extends React.Component<{}, TabState> { showIntermediateModal = (id: string) => { axios(`${MANAGER_IP}/metric-data/${id}`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json;charset=utf-8' - } + method: 'GET' }) .then(res => { if (res.status === 200) { const intermediateArr: number[] = []; - const xinter: number[] = []; Object.keys(res.data).map(item => { intermediateArr.push(parseFloat(res.data[item].data)); - xinter.push(res.data[item].sequence); }); if (this._isMounted) { this.setState({ - intermediateOption: this.intermediateGraphOption(intermediateArr, id, xinter) + intermediateOption: this.intermediateGraphOption(intermediateArr, id) }); } } @@ -131,7 +128,7 @@ class TrialStatus extends React.Component<{}, TabState> { }, title: { left: 'center', - text: 'Running Trial', + text: 'Trial Duration', textStyle: { fontSize: 18, color: '#333' @@ -221,6 +218,11 @@ class TrialStatus extends React.Component<{}, TabState> { const trialTable: Array = []; Object.keys(trialJobs).map(item => { // only succeeded trials have finalMetricData + let desc: DescObj = { + parameters: {} + }; + let acc = 0; + let duration = 0; const id = trialJobs[item].id !== undefined ? trialJobs[item].id : ''; @@ -228,25 +230,24 @@ class TrialStatus extends React.Component<{}, TabState> { ? trialJobs[item].status : ''; const startTime = trialJobs[item].startTime !== undefined - ? new Date(trialJobs[item].startTime).toLocaleString() + ? new Date(trialJobs[item].startTime).toLocaleString('en-US') : ''; const endTime = trialJobs[item].endTime !== undefined - ? new Date(trialJobs[item].endTime).toLocaleString() + ? new Date(trialJobs[item].endTime).toLocaleString('en-US') : ''; - let desc: DescObj = { - parameters: {} - }; if (trialJobs[item].hyperParameters !== undefined) { desc.parameters = JSON.parse(trialJobs[item].hyperParameters).parameters; } if (trialJobs[item].logPath !== undefined) { desc.logPath = trialJobs[item].logPath; + const isHyperLink = /^http/gi.test(trialJobs[item].logPath); + if (isHyperLink) { + desc.isLink = true; + } } - let acc = 0; if (trialJobs[item].finalMetricData !== undefined) { acc = parseFloat(trialJobs[item].finalMetricData.data); } - let duration = 0; if (startTime !== '' && endTime !== '') { duration = (trialJobs[item].endTime - trialJobs[item].startTime) / 1000; } else if (startTime !== '' && endTime === '') { @@ -254,7 +255,6 @@ class TrialStatus extends React.Component<{}, TabState> { } else { duration = 0; } - trialTable.push({ key: trialTable.length, id: id, @@ -267,9 +267,9 @@ class TrialStatus extends React.Component<{}, TabState> { }); }); if (this._isMounted) { - this.setState({ + this.setState(() => ({ tableData: trialTable - }); + })); } } }); @@ -292,6 +292,11 @@ class TrialStatus extends React.Component<{}, TabState> { } else { message.error('fail to cancel the job'); } + }) + .catch(error => { + if (error.response.status === 500) { + message.error('500 error, fail to cancel the job'); + } }); } @@ -306,14 +311,27 @@ class TrialStatus extends React.Component<{}, TabState> { browserHistory.push(path); } - intermediateGraphOption = (intermediateArr: number[], id: string, xinter: number[]) => { + intermediateGraphOption = (intermediateArr: number[], id: string) => { + const sequence: number[] = []; + const lengthInter = intermediateArr.length; + for (let i = 1; i <= lengthInter; i++) { + sequence.push(i); + } return { + title: { + text: id, + left: 'center', + textStyle: { + fontSize: 16, + color: '#333', + } + }, tooltip: { trigger: 'item' }, xAxis: { name: 'Trial', - data: xinter + data: sequence }, yAxis: { name: 'Accuracy', @@ -376,7 +394,7 @@ class TrialStatus extends React.Component<{}, TabState> { dataIndex: 'start', key: 'start', width: '15%', - sorter: (a: TableObj, b: TableObj): number => a.start.localeCompare(b.start) + sorter: (a: TableObj, b: TableObj): number => (Date.parse(a.start) - Date.parse(b.start)) }, { title: 'End', dataIndex: 'end', @@ -460,14 +478,35 @@ class TrialStatus extends React.Component<{}, TabState> { ]; const openRow = (record: TableObj) => { + const parametersRow = { + parameters: record.description.parameters + }; + let isLogLink: boolean = false; + const logPathRow = record.description.logPath; + if (record.description.isLink !== undefined) { + isLogLink = true; + } return (
                      true}  // default expandNode
                         getItemString={() => ()}  // remove the {} items
-                        data={record.description}
+                        data={parametersRow}
                     />
+                    {
+                        isLogLink
+                            ?
+                            
+ logPath: + {logPathRow} +
+ : +
+ logPath: + {logPathRow} +
+ }