From c0f90b0cf5e46ecaddd4035c83b17ed90f46e372 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 6 Nov 2018 09:41:30 +0800 Subject: [PATCH 01/12] update mnist-annotation --- examples/trials/mnist-annotation/mnist.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/examples/trials/mnist-annotation/mnist.py b/examples/trials/mnist-annotation/mnist.py index 69ef283336..83657eb339 100644 --- a/examples/trials/mnist-annotation/mnist.py +++ b/examples/trials/mnist-annotation/mnist.py @@ -180,10 +180,10 @@ def main(params): test_acc = 0.0 with tf.Session() as sess: sess.run(tf.global_variables_initializer()) - """@nni.variable(nni.choice(50, 250, 500), name=batch_num)""" - batch_num = params['batch_num'] - for i in range(batch_num): - batch = mnist.train.next_batch(batch_num) + """@nni.variable(nni.choice(1, 4, 8, 16, 32), name=batch_size)""" + batch_size = params['batch_size'] + for i in range(params['batch_num']): + batch = mnist.train.next_batch(batch_size) """@nni.variable(nni.choice(1, 5), name=dropout_rate)""" dropout_rate = params['dropout_rate'] mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], @@ -224,7 +224,8 @@ def generate_defualt_params(): 'pool_size': 2, 'hidden_size': 1024, 'learning_rate': 1e-4, - 'batch_num': 200} + 'batch_num': 2000, + 'batch_size': 32} return params From 18e184d4077dc52f164fadadad000243420e8b73 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 6 Nov 2018 09:45:54 +0800 Subject: [PATCH 02/12] fix mnist-annotation typo --- examples/trials/mnist-annotation/mnist.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/trials/mnist-annotation/mnist.py b/examples/trials/mnist-annotation/mnist.py index 83657eb339..f2e161ea00 100644 --- a/examples/trials/mnist-annotation/mnist.py +++ b/examples/trials/mnist-annotation/mnist.py @@ -157,8 +157,8 @@ def main(params): ''' # Import data mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) - print('Mnist download data down.') - logger.debug('Mnist download data down.') + print('Mnist download data done.') + logger.debug('Mnist download data done.') # Create the model # Build the graph for the deep net From d7bd43c29e37c4c8262e5f3b5fc6c941e901db84 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 6 Nov 2018 09:58:54 +0800 Subject: [PATCH 03/12] update mnist example --- examples/trials/mnist/mnist.py | 5 +++-- examples/trials/mnist/search_space.json | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/trials/mnist/mnist.py b/examples/trials/mnist/mnist.py index d5c6347b5a..6bca6eb793 100644 --- a/examples/trials/mnist/mnist.py +++ b/examples/trials/mnist/mnist.py @@ -173,7 +173,7 @@ def main(params): with tf.Session() as sess: sess.run(tf.global_variables_initializer()) for i in range(params['batch_num']): - batch = mnist.train.next_batch(params['batch_num']) + batch = mnist.train.next_batch(params['batch_size']) mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], mnist_network.keep_prob: params['dropout_rate']} @@ -212,7 +212,8 @@ def generate_default_params(): 'pool_size': 2, 'hidden_size': 1024, 'learning_rate': 1e-4, - 'batch_num': 200} + 'batch_num': 2000, + 'batch_size': 32} return params diff --git a/examples/trials/mnist/search_space.json b/examples/trials/mnist/search_space.json index 1d416cbfa6..3ddeb0ba82 100644 --- a/examples/trials/mnist/search_space.json +++ b/examples/trials/mnist/search_space.json @@ -2,5 +2,6 @@ "dropout_rate":{"_type":"uniform","_value":[0.1,0.5]}, "conv_size":{"_type":"choice","_value":[2,3,5,7]}, "hidden_size":{"_type":"choice","_value":[124, 512, 1024]}, + "batch_size": {"_type":"choice", "_value": [1, 4, 8, 16, 32]}, "learning_rate":{"_type":"choice","_value":[0.0001, 0.001, 0.01, 0.1]} } From c889813373968c6d48b55c5e221142cbd623d6d0 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 6 Nov 2018 10:16:17 +0800 Subject: [PATCH 04/12] update mnist-smartparam --- examples/trials/mnist-smartparam/mnist.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/trials/mnist-smartparam/mnist.py b/examples/trials/mnist-smartparam/mnist.py index a3d8a98578..23f6679c75 100644 --- a/examples/trials/mnist-smartparam/mnist.py +++ b/examples/trials/mnist-smartparam/mnist.py @@ -180,9 +180,9 @@ def main(params): test_acc = 0.0 with tf.Session() as sess: sess.run(tf.global_variables_initializer()) - batch_num = nni.choice(50, 250, 500, name='batch_num') - for i in range(batch_num): - batch = mnist.train.next_batch(batch_num) + batch_size = nni.choice(50, 250, 500, name='batch_size') + for i in range(2000): + batch = mnist.train.next_batch(batch_size) dropout_rate = nni.choice(1, 5, name='dropout_rate') mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], From 125a9544c6e8444a76a7f477997900cf9afd4c45 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 6 Nov 2018 10:23:51 +0800 Subject: [PATCH 05/12] update mnist-annotation --- examples/trials/mnist-annotation/mnist.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/trials/mnist-annotation/mnist.py b/examples/trials/mnist-annotation/mnist.py index f2e161ea00..3fe72b1f13 100644 --- a/examples/trials/mnist-annotation/mnist.py +++ b/examples/trials/mnist-annotation/mnist.py @@ -32,7 +32,7 @@ def __init__(self, """@nni.variable(nni.choice(124, 512, 1024), name=self.hidden_size)""" self.hidden_size = hidden_size self.pool_size = pool_size - """@nni.variable(nni.uniform(0.0001, 0.1), name=self.learning_rate)""" + """@nni.variable(nni.loguniform(0.0001, 0.1), name=self.learning_rate)""" self.learning_rate = learning_rate self.x_dim = x_dim self.y_dim = y_dim @@ -184,7 +184,7 @@ def main(params): batch_size = params['batch_size'] for i in range(params['batch_num']): batch = mnist.train.next_batch(batch_size) - """@nni.variable(nni.choice(1, 5), name=dropout_rate)""" + """@nni.variable(nni.choice(0.1, 0.5, 1), name=dropout_rate)""" dropout_rate = params['dropout_rate'] mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], From b3ac62aeb49b7bd0fdd8e3736c044d39ee2fe3b2 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 6 Nov 2018 10:28:08 +0800 Subject: [PATCH 06/12] update mnist-smartparam --- examples/trials/mnist-smartparam/mnist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/trials/mnist-smartparam/mnist.py b/examples/trials/mnist-smartparam/mnist.py index 23f6679c75..ba33c3d9fc 100644 --- a/examples/trials/mnist-smartparam/mnist.py +++ b/examples/trials/mnist-smartparam/mnist.py @@ -180,7 +180,7 @@ def main(params): test_acc = 0.0 with tf.Session() as sess: sess.run(tf.global_variables_initializer()) - batch_size = nni.choice(50, 250, 500, name='batch_size') + batch_size = nni.choice(1, 4, 8, 16, 32, name='batch_size') for i in range(2000): batch = mnist.train.next_batch(batch_size) dropout_rate = nni.choice(1, 5, name='dropout_rate') From c3e539da9f9c7425124300c41e9ff872284245b5 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 6 Nov 2018 18:05:45 +0800 Subject: [PATCH 07/12] change learning rate --- examples/trials/mnist-annotation/mnist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/trials/mnist-annotation/mnist.py b/examples/trials/mnist-annotation/mnist.py index 3fe72b1f13..33442be1ef 100644 --- a/examples/trials/mnist-annotation/mnist.py +++ b/examples/trials/mnist-annotation/mnist.py @@ -32,7 +32,7 @@ def __init__(self, """@nni.variable(nni.choice(124, 512, 1024), name=self.hidden_size)""" self.hidden_size = hidden_size self.pool_size = pool_size - """@nni.variable(nni.loguniform(0.0001, 0.1), name=self.learning_rate)""" + """@nni.variable(nni.uniform(0.0001, 0.1), name=self.learning_rate)""" self.learning_rate = learning_rate self.x_dim = x_dim self.y_dim = y_dim From a39d79402ea6e89c8345e05abea022bab5d719f3 Mon Sep 17 00:00:00 2001 From: quzha Date: Wed, 7 Nov 2018 14:18:50 +0800 Subject: [PATCH 08/12] update mnist assessor maxTrialNum --- examples/trials/mnist/config_assessor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/trials/mnist/config_assessor.yml b/examples/trials/mnist/config_assessor.yml index 862a262a28..4decacb804 100644 --- a/examples/trials/mnist/config_assessor.yml +++ b/examples/trials/mnist/config_assessor.yml @@ -2,7 +2,7 @@ authorName: default experimentName: example_mnist trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 1 +maxTrialNum: 100 #choice: local, remote trainingServicePlatform: local searchSpacePath: ~/nni/examples/trials/mnist/search_space.json From 0cd3d5a45ffabcc71eee9d1a56f845fa44613daa Mon Sep 17 00:00:00 2001 From: quzha Date: Wed, 7 Nov 2018 16:38:05 +0800 Subject: [PATCH 09/12] update examples --- examples/trials/mnist-annotation/mnist.py | 4 ++-- examples/trials/mnist-smartparam/mnist.py | 4 ++-- examples/trials/mnist/mnist.py | 2 +- examples/trials/mnist/search_space.json | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/trials/mnist-annotation/mnist.py b/examples/trials/mnist-annotation/mnist.py index 33442be1ef..6844da34a0 100644 --- a/examples/trials/mnist-annotation/mnist.py +++ b/examples/trials/mnist-annotation/mnist.py @@ -184,11 +184,11 @@ def main(params): batch_size = params['batch_size'] for i in range(params['batch_num']): batch = mnist.train.next_batch(batch_size) - """@nni.variable(nni.choice(0.1, 0.5, 1), name=dropout_rate)""" + """@nni.variable(nni.choice(0.1, 0.2, 0.5, 0.8), name=dropout_rate)""" dropout_rate = params['dropout_rate'] mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], - mnist_network.keep_prob: dropout_rate} + mnist_network.keep_prob: 1 - dropout_rate} ) if i % 100 == 0: diff --git a/examples/trials/mnist-smartparam/mnist.py b/examples/trials/mnist-smartparam/mnist.py index ba33c3d9fc..29f1a5a862 100644 --- a/examples/trials/mnist-smartparam/mnist.py +++ b/examples/trials/mnist-smartparam/mnist.py @@ -183,10 +183,10 @@ def main(params): batch_size = nni.choice(1, 4, 8, 16, 32, name='batch_size') for i in range(2000): batch = mnist.train.next_batch(batch_size) - dropout_rate = nni.choice(1, 5, name='dropout_rate') + dropout_rate = nni.choice(0.1, 0.2, 0.5, 0.8, name='dropout_rate') mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], - mnist_network.keep_prob: dropout_rate} + mnist_network.keep_prob: 1 - dropout_rate} ) if i % 100 == 0: diff --git a/examples/trials/mnist/mnist.py b/examples/trials/mnist/mnist.py index 6bca6eb793..d6384ecdef 100644 --- a/examples/trials/mnist/mnist.py +++ b/examples/trials/mnist/mnist.py @@ -176,7 +176,7 @@ def main(params): batch = mnist.train.next_batch(params['batch_size']) mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], - mnist_network.keep_prob: params['dropout_rate']} + mnist_network.keep_prob: 1 - params['dropout_rate']} ) if i % 100 == 0: diff --git a/examples/trials/mnist/search_space.json b/examples/trials/mnist/search_space.json index 3ddeb0ba82..acaf02e809 100644 --- a/examples/trials/mnist/search_space.json +++ b/examples/trials/mnist/search_space.json @@ -1,5 +1,5 @@ { - "dropout_rate":{"_type":"uniform","_value":[0.1,0.5]}, + "dropout_rate":{"_type":"uniform","_value":[0.1, 0.2, 0.5, 0.8]}, "conv_size":{"_type":"choice","_value":[2,3,5,7]}, "hidden_size":{"_type":"choice","_value":[124, 512, 1024]}, "batch_size": {"_type":"choice", "_value": [1, 4, 8, 16, 32]}, From 46fb85f2277b315cf761a26220021042fc57764f Mon Sep 17 00:00:00 2001 From: quzha Date: Wed, 7 Nov 2018 17:24:23 +0800 Subject: [PATCH 10/12] update examples --- examples/trials/mnist-annotation/mnist.py | 2 +- examples/trials/mnist-smartparam/mnist.py | 2 +- examples/trials/mnist/search_space.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/trials/mnist-annotation/mnist.py b/examples/trials/mnist-annotation/mnist.py index 6844da34a0..829c2fe65d 100644 --- a/examples/trials/mnist-annotation/mnist.py +++ b/examples/trials/mnist-annotation/mnist.py @@ -184,7 +184,7 @@ def main(params): batch_size = params['batch_size'] for i in range(params['batch_num']): batch = mnist.train.next_batch(batch_size) - """@nni.variable(nni.choice(0.1, 0.2, 0.5, 0.8), name=dropout_rate)""" + """@nni.variable(nni.choice(0.5, 0.9), name=dropout_rate)""" dropout_rate = params['dropout_rate'] mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], diff --git a/examples/trials/mnist-smartparam/mnist.py b/examples/trials/mnist-smartparam/mnist.py index 29f1a5a862..28b9fb8b31 100644 --- a/examples/trials/mnist-smartparam/mnist.py +++ b/examples/trials/mnist-smartparam/mnist.py @@ -183,7 +183,7 @@ def main(params): batch_size = nni.choice(1, 4, 8, 16, 32, name='batch_size') for i in range(2000): batch = mnist.train.next_batch(batch_size) - dropout_rate = nni.choice(0.1, 0.2, 0.5, 0.8, name='dropout_rate') + dropout_rate = nni.choice(0.5, 0.9, name='dropout_rate') mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0], mnist_network.labels: batch[1], mnist_network.keep_prob: 1 - dropout_rate} diff --git a/examples/trials/mnist/search_space.json b/examples/trials/mnist/search_space.json index acaf02e809..c9b68a7a4f 100644 --- a/examples/trials/mnist/search_space.json +++ b/examples/trials/mnist/search_space.json @@ -1,5 +1,5 @@ { - "dropout_rate":{"_type":"uniform","_value":[0.1, 0.2, 0.5, 0.8]}, + "dropout_rate":{"_type":"uniform","_value":[0.5, 0.9]}, "conv_size":{"_type":"choice","_value":[2,3,5,7]}, "hidden_size":{"_type":"choice","_value":[124, 512, 1024]}, "batch_size": {"_type":"choice", "_value": [1, 4, 8, 16, 32]}, From 29381ff9796d2aecf8c8f09d988807ded47b8ffc Mon Sep 17 00:00:00 2001 From: quzha Date: Wed, 7 Nov 2018 17:43:22 +0800 Subject: [PATCH 11/12] update maxTrialNum --- examples/trials/mnist/config_assessor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/trials/mnist/config_assessor.yml b/examples/trials/mnist/config_assessor.yml index 4decacb804..a1ae0dcd94 100644 --- a/examples/trials/mnist/config_assessor.yml +++ b/examples/trials/mnist/config_assessor.yml @@ -2,7 +2,7 @@ authorName: default experimentName: example_mnist trialConcurrency: 1 maxExecDuration: 1h -maxTrialNum: 100 +maxTrialNum: 20 #choice: local, remote trainingServicePlatform: local searchSpacePath: ~/nni/examples/trials/mnist/search_space.json From dcbf0da4f17d7666c8099acd8763ce5f2eac97ee Mon Sep 17 00:00:00 2001 From: quzha Date: Wed, 7 Nov 2018 17:57:31 +0800 Subject: [PATCH 12/12] fix breaking path in config_assessor.yml --- examples/trials/mnist/config_assessor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/trials/mnist/config_assessor.yml b/examples/trials/mnist/config_assessor.yml index a1ae0dcd94..d5a3c33d23 100644 --- a/examples/trials/mnist/config_assessor.yml +++ b/examples/trials/mnist/config_assessor.yml @@ -5,7 +5,7 @@ maxExecDuration: 1h maxTrialNum: 20 #choice: local, remote trainingServicePlatform: local -searchSpacePath: ~/nni/examples/trials/mnist/search_space.json +searchSpacePath: search_space.json #choice: true, false useAnnotation: false tuner: @@ -23,5 +23,5 @@ assessor: optimize_mode: maximize trial: command: python3 mnist.py - codeDir: ~/nni/examples/trials/mnist + codeDir: . gpuNum: 0