This repository has been archived by the owner on Sep 18, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1.8k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add iterative search space example (#119)
* update readme in ga_squad * update readme * fix typo * Update README.md * Update README.md * Update README.md * update readme * add iterative search space example * update * update readme * change name
- Loading branch information
1 parent
1c22c76
commit 6ef6511
Showing
5 changed files
with
250 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
authorName: default | ||
experimentName: mnist-cascading-search-space | ||
trialConcurrency: 2 | ||
maxExecDuration: 1h | ||
maxTrialNum: 100 | ||
#choice: local, remote | ||
trainingServicePlatform: local | ||
searchSpacePath: search_space.json | ||
#choice: true, false | ||
useAnnotation: false | ||
tuner: | ||
#choice: TPE, Random, Anneal, Evolution | ||
builtinTunerName: TPE | ||
classArgs: | ||
#choice: maximize, minimize | ||
optimize_mode: maximize | ||
trial: | ||
command: python3 mnist.py | ||
codeDir: . | ||
gpuNum: 0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,164 @@ | ||
''' | ||
mnist.py is an example to show: how to use iterative search space to tune architecture network for mnist. | ||
''' | ||
from __future__ import absolute_import | ||
from __future__ import division | ||
from __future__ import print_function | ||
|
||
import argparse | ||
import codecs | ||
import json | ||
import logging | ||
import math | ||
import sys | ||
import tempfile | ||
import tensorflow as tf | ||
|
||
from tensorflow.examples.tutorials.mnist import input_data | ||
|
||
import nni | ||
|
||
|
||
logger = logging.getLogger('mnist_cascading_search_space') | ||
FLAGS = None | ||
|
||
class MnistNetwork(object): | ||
def __init__(self, params, feature_size = 784): | ||
config = [] | ||
|
||
for i in range(10): | ||
config.append(params['layer'+str(i)]) | ||
self.config = config | ||
self.feature_size = feature_size | ||
self.label_size = 10 | ||
|
||
|
||
def is_expand_dim(self, input): | ||
# input is a tensor | ||
shape = len(input.get_shape().as_list()) | ||
if shape < 4: | ||
return True | ||
return False | ||
|
||
|
||
def is_flatten(self, input): | ||
# input is a tensor | ||
shape = len(input.get_shape().as_list()) | ||
if shape > 2: | ||
return True | ||
return False | ||
|
||
|
||
def get_layer(self, layer_config, input, in_height, in_width, id): | ||
if layer_config[0] == 'Empty': | ||
return input | ||
|
||
if self.is_expand_dim(input): | ||
input = tf.reshape(input, [-1, in_height, in_width, 1]) | ||
h, w = layer_config[1], layer_config[2] | ||
|
||
if layer_config[0] == 'Conv': | ||
conv_filter = tf.Variable(tf.random_uniform([h, w, 1, 1]), name='id_%d_conv_%d_%d' % (id, h, w)) | ||
return tf.nn.conv2d(input, filter=conv_filter, strides=[1, 1, 1, 1], padding='SAME') | ||
if layer_config[0] == 'Max_pool': | ||
return tf.nn.max_pool(input, ksize=[1, h, w, 1], strides=[1, 1, 1, 1], padding='SAME') | ||
if layer_config[0] == 'Avg_pool': | ||
return tf.nn.avg_pool(input, ksize=[1, h, w, 1], strides=[1, 1, 1, 1], padding='SAME') | ||
|
||
print('error:', layer_config) | ||
raise Exception('%s layer is illegal'%layer_config[0]) | ||
|
||
|
||
def build_network(self): | ||
layer_configs = self.config | ||
feature_size = 784 | ||
|
||
# define placeholder | ||
self.x = tf.placeholder(tf.float32, [None, feature_size], name="input_x") | ||
self.y = tf.placeholder(tf.int32, [None, self.label_size], name="input_y") | ||
label_number = 10 | ||
|
||
# define network | ||
input_layer = self.x | ||
in_height = in_width = int(math.sqrt(feature_size)) | ||
for i, layer_config in enumerate(layer_configs): | ||
input_layer = tf.nn.relu(self.get_layer(layer_config, input_layer, in_height, in_width, i)) | ||
|
||
output_layer = input_layer | ||
if self.is_flatten(output_layer): | ||
output_layer = tf.contrib.layers.flatten(output_layer) # flatten | ||
output_layer = tf.layers.dense(output_layer, label_number) | ||
child_logit = tf.nn.softmax_cross_entropy_with_logits(logits=output_layer, labels=self.y) | ||
child_loss = tf.reduce_mean(child_logit) | ||
|
||
self.train_step = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(child_loss) | ||
child_accuracy = tf.equal(tf.argmax(output_layer, 1), tf.argmax(self.y, 1)) | ||
self.accuracy = tf.reduce_mean(tf.cast(child_accuracy, "float")) # add a reduce_mean | ||
|
||
def main(params): | ||
# Import data | ||
mnist = input_data.read_data_sets(params['data_dir'], one_hot=True) | ||
|
||
# Create the model | ||
# Build the graph for the deep net | ||
mnist_network = MnistNetwork(params) | ||
mnist_network.build_network() | ||
print('build network done.') | ||
|
||
# Write log | ||
graph_location = tempfile.mkdtemp() | ||
#print('Saving graph to: %s' % graph_location) | ||
train_writer = tf.summary.FileWriter(graph_location) | ||
train_writer.add_graph(tf.get_default_graph()) | ||
|
||
test_acc = 0.0 | ||
with tf.Session() as sess: | ||
sess.run(tf.global_variables_initializer()) | ||
for i in range(params['batch_num']): | ||
batch = mnist.train.next_batch(params['batch_size']) | ||
mnist_network.train_step.run(feed_dict={mnist_network.x: batch[0], mnist_network.y: batch[1]}) | ||
|
||
if i % 100 == 0: | ||
train_accuracy = mnist_network.accuracy.eval(feed_dict={ | ||
mnist_network.x: batch[0], mnist_network.y: batch[1]}) | ||
print('step %d, training accuracy %g' % (i, train_accuracy)) | ||
|
||
test_acc = mnist_network.accuracy.eval(feed_dict={ | ||
mnist_network.x: mnist.test.images, mnist_network.y: mnist.test.labels}) | ||
|
||
nni.report_final_result(test_acc) | ||
|
||
def generate_defualt_params(): | ||
params = {'data_dir': '/tmp/tensorflow/mnist/input_data', | ||
'batch_num': 1000, | ||
'batch_size': 200} | ||
return params | ||
|
||
|
||
def parse_init_json(data): | ||
params = {} | ||
for key in data: | ||
value = data[key] | ||
if value == 'Empty': | ||
params[key] = ['Empty'] | ||
else: | ||
params[key] = [value[0], value[1]['_value'], value[1]['_value']] | ||
return params | ||
|
||
|
||
if __name__ == '__main__': | ||
try: | ||
# get parameters form tuner | ||
data = nni.get_parameters() | ||
logger.debug(data) | ||
|
||
RCV_PARAMS = parse_init_json(data) | ||
logger.debug(RCV_PARAMS) | ||
params = generate_defualt_params() | ||
params.update(RCV_PARAMS) | ||
print(RCV_PARAMS) | ||
|
||
main(params) | ||
except Exception as exception: | ||
logger.exception(exception) | ||
raise |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
tensorflow >= 1.3 | ||
six == 1.11.0 | ||
numpy == 1.13.3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
{"layer2": "Empty", "layer8": ["Conv", {"_index": 0, "_value": 2}], "layer3": ["Avg_pool", {"_index": 2, "_value": 5}], "layer0": ["Max_pool", {"_index": 2, "_value": 5}], "layer1": ["Conv", {"_index": 0, "_value": 2}], "layer6": ["Max_pool", {"_index": 1, "_value": 3}], "layer7": ["Max_pool", {"_index": 2, "_value": 5}], "layer9": ["Conv", {"_index": 0, "_value": 2}], "layer4": ["Avg_pool", {"_index": 1, "_value": 3}], "layer5": ["Avg_pool", {"_index": 2, "_value": 5}]} |
62 changes: 62 additions & 0 deletions
62
examples/trials/mnist-cascading-search-space/search_space.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
{ | ||
"layer0":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer1":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer2":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer3":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer4":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer5":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer6":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer7":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer8":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]}, | ||
"layer9":{"_type":"choice","_value":[ | ||
"Empty", | ||
["Conv", {"_type":"choice","_value":[2,3,5]}], | ||
["Max_pool", {"_type":"choice","_value":[2,3,5]}], | ||
["Avg_pool", {"_type":"choice","_value":[2,3,5]}] | ||
]} | ||
} |