Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

update docstring and pylint #1662

Merged
merged 27 commits into from
Nov 11, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions docs/en_US/sdk_reference.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ Tuner
.. autoclass:: nni.metis_tuner.metis_tuner.MetisTuner
:members:

.. autoclass:: nni.batch_tuner.batch_tuner.BatchTuner
:members:

Assessor
------------------------
.. autoclass:: nni.assessor.Assessor
Expand Down
63 changes: 42 additions & 21 deletions src/sdk/pynni/nni/batch_tuner/batch_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,22 +31,27 @@ class BatchTuner
CHOICE = 'choice'
VALUE = '_value'

logger = logging.getLogger('batch_tuner_AutoML')
LOGGER = logging.getLogger('batch_tuner_AutoML')

class BatchTuner(Tuner):
"""
BatchTuner is tuner will running all the configure that user want to run batchly.

Examples
--------
The search space only be accepted like:
```
{
'combine_params': { '_type': 'choice',
'_value': '[{...}, {...}, {...}]',
xuehui1991 marked this conversation as resolved.
Show resolved Hide resolved
}
}
```
"""

def __init__(self):
self.count = -1
self.values = []
self._count = -1
self._values = []

def is_valid(self, search_space):
"""
Expand All @@ -55,18 +60,26 @@ def is_valid(self, search_space):
Parameters
----------
search_space : dict

Returns
-------
None or list
If valid, return candidate values; else return None.
"""
if not len(search_space) == 1:
raise RuntimeError('BatchTuner only supprt one combined-paramreters key.')

for param in search_space:
param_type = search_space[param][TYPE]
if not param_type == CHOICE:
raise RuntimeError('BatchTuner only supprt one combined-paramreters type is choice.')
else:
if isinstance(search_space[param][VALUE], list):
return search_space[param][VALUE]
raise RuntimeError('The combined-paramreters value in BatchTuner is not a list.')
raise RuntimeError('BatchTuner only supprt \
one combined-paramreters type is choice.')

if isinstance(search_space[param][VALUE], list):
return search_space[param][VALUE]

raise RuntimeError('The combined-paramreters \
value in BatchTuner is not a list.')
return None

def update_search_space(self, search_space):
Expand All @@ -76,49 +89,57 @@ def update_search_space(self, search_space):
----------
search_space : dict
"""
self.values = self.is_valid(search_space)
self._values = self.is_valid(search_space)

def generate_parameters(self, parameter_id, **kwargs):
"""Returns a dict of trial (hyper-)parameters, as a serializable object.

Parameters
----------
parameter_id : int

Returns
-------
dict
A candidate parameter group.
"""
self.count += 1
if self.count > len(self.values) - 1:
self._count += 1
if self._count > len(self._values) - 1:
raise nni.NoMoreTrialError('no more parameters now.')
return self.values[self.count]
return self._values[self._count]

def receive_trial_result(self, parameter_id, parameters, value, **kwargs):
pass

def import_data(self, data):
"""Import additional data for tuning

Parameters
----------
data:
a list of dictionarys, each of which has at least two keys, 'parameter' and 'value'
"""
if not self.values:
logger.info("Search space has not been initialized, skip this data import")
if not self._values:
LOGGER.info("Search space has not been initialized, skip this data import")
return

self.values = self.values[(self.count+1):]
self.count = -1
self._values = self._values[(self._count+1):]
self._count = -1

_completed_num = 0
for trial_info in data:
logger.info("Importing data, current processing progress %s / %s", _completed_num, len(data))
LOGGER .info("Importing data, current processing \
xuehui1991 marked this conversation as resolved.
Show resolved Hide resolved
progress %s / %s", _completed_num, len(data))
# simply validate data format
assert "parameter" in trial_info
_params = trial_info["parameter"]
assert "value" in trial_info
_value = trial_info['value']
if not _value:
logger.info("Useless trial data, value is %s, skip this trial data.", _value)
LOGGER.info("Useless trial data, value is %s, skip this trial data.", _value)
continue
_completed_num += 1
if _params in self.values:
self.values.remove(_params)
logger.info("Successfully import data to batch tuner, total data: %d, imported data: %d.", len(data), _completed_num)
if _params in self._values:
self._values.remove(_params)
LOGGER .info("Successfully import data to batch tuner, \
total data: %d, imported data: %d.", len(data), _completed_num)
52 changes: 44 additions & 8 deletions src/sdk/pynni/nni/evolution_tuner/evolution_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@


def json2space(x, oldy=None, name=NodeType.ROOT):
"""Change search space from json format to hyperopt format
"""
Change search space from json format to hyperopt format

"""
y = list()
if isinstance(x, dict):
Expand All @@ -59,7 +61,9 @@ def json2space(x, oldy=None, name=NodeType.ROOT):
return y

def json2parameter(x, is_rand, random_state, oldy=None, Rand=False, name=NodeType.ROOT):
"""Json to pramaters.
"""
Json to pramaters.

"""
if isinstance(x, dict):
if NodeType.TYPE in x.keys():
Expand Down Expand Up @@ -117,13 +121,25 @@ def json2parameter(x, is_rand, random_state, oldy=None, Rand=False, name=NodeTyp
class Individual:
"""
Indicidual class to store the indv info.

Attributes
----------
config : str
Search space.
info : str
The str to save information of individual.
result : float
The final metric of a individual.
store_dir : str
save_dir : str
"""

def __init__(self, config=None, info=None, result=None, save_dir=None):
"""
Parameters
----------
config : str
A config to represent a group of parameters.
info : str
result : float
save_dir : str
Expand All @@ -140,6 +156,8 @@ def __str__(self):

def mutation(self, config=None, info=None, save_dir=None):
"""
Mutation by reset state information.

Parameters
----------
config : str
Expand Down Expand Up @@ -177,8 +195,11 @@ def __init__(self, optimize_mode, population_size=32):
self.population = None
self.space = None


def update_search_space(self, search_space):
"""Update search space.
"""
Update search space.

Search_space contains the information that user pre-defined.

Parameters
Expand All @@ -191,31 +212,39 @@ def update_search_space(self, search_space):
self.random_state = np.random.RandomState()
self.population = []
is_rand = dict()

for item in self.space:
is_rand[item] = True

for _ in range(self.population_size):
config = json2parameter(
self.searchspace_json, is_rand, self.random_state)
self.population.append(Individual(config=config))


def generate_parameters(self, parameter_id, **kwargs):
"""Returns a dict of trial (hyper-)parameters, as a serializable object.
"""
This function will returns a dict of trial (hyper-)parameters, as a serializable object.

Parameters
----------
parameter_id : int

Returns
-------
config : dict
dict
A group of candaidte parameters that evolution tuner generated.
"""
if not self.population:
raise RuntimeError('The population is empty')

pos = -1

for i in range(len(self.population)):
if self.population[i].result is None:
pos = i
break

if pos != -1:
indiv = copy.deepcopy(self.population[pos])
self.population.pop(pos)
Expand All @@ -230,6 +259,7 @@ def generate_parameters(self, parameter_id, **kwargs):
self.population[0].config)
is_rand = dict()
mutation_pos = space[random.randint(0, len(space)-1)]

for i in range(len(self.space)):
is_rand[self.space[i]] = (self.space[i] == mutation_pos)
config = json2parameter(
Expand All @@ -238,21 +268,27 @@ def generate_parameters(self, parameter_id, **kwargs):
# remove "_index" from config and save params-id

total_config = config

self.total_data[parameter_id] = total_config
config = split_index(total_config)

return config


def receive_trial_result(self, parameter_id, parameters, value, **kwargs):
'''Record the result from a trial
"""
Record the result from a trial

Parameters
----------
parameters: dict
parameter_id : int
parameters : dict
value : dict/float
if value is dict, it should have "default" key.
value is final metrics of the trial.
'''
"""
reward = extract_scalar_reward(value)

if parameter_id not in self.total_data:
raise RuntimeError('Received parameter_id not in total_data.')
# restore the paramsters contains "_index"
Expand Down
6 changes: 4 additions & 2 deletions src/sdk/pynni/nni/hyperopt_tuner/hyperopt_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,8 @@ def miscs_update_idxs_vals(self,
misc_by_id[tid]['vals'][key] = [val]

def get_suggestion(self, random_search=False):
"""get suggestion from hyperopt
"""
get suggestion from hyperopt

Parameters
----------
Expand Down Expand Up @@ -473,7 +474,8 @@ def get_suggestion(self, random_search=False):
return total_params

def import_data(self, data):
"""Import additional data for tuning
"""
Import additional data for tuning

Parameters
----------
Expand Down
16 changes: 10 additions & 6 deletions src/sdk/pynni/nni/metis_tuner/Regression_GMM/CreateModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
xuehui1991 marked this conversation as resolved.
Show resolved Hide resolved
# IN THE SOFTWARE.

import os
import sys
Expand All @@ -31,21 +32,25 @@ def create_model(samples_x, samples_y_aggregation, percentage_goodbatch=0.34):
'''
Create the Gaussian Mixture Model
'''
samples = [samples_x[i] + [samples_y_aggregation[i]] for i in range(0, len(samples_x))]
samples = [samples_x[i] + [samples_y_aggregation[i]]
for i in range(0, len(samples_x))]

# Sorts so that we can get the top samples
samples = sorted(samples, key=itemgetter(-1))
samples_goodbatch_size = int(len(samples) * percentage_goodbatch)
samples_goodbatch = samples[0:samples_goodbatch_size]
samples_badbatch = samples[samples_goodbatch_size:]

samples_x_goodbatch = [sample_goodbatch[0:-1] for sample_goodbatch in samples_goodbatch]
samples_x_goodbatch = [sample_goodbatch[0:-1]
for sample_goodbatch in samples_goodbatch]
#samples_y_goodbatch = [sample_goodbatch[-1] for sample_goodbatch in samples_goodbatch]
samples_x_badbatch = [sample_badbatch[0:-1] for sample_badbatch in samples_badbatch]
samples_x_badbatch = [sample_badbatch[0:-1]
for sample_badbatch in samples_badbatch]

# === Trains GMM clustering models === #
#sys.stderr.write("[%s] Train GMM's GMM model\n" % (os.path.basename(__file__)))
bgmm_goodbatch = mm.BayesianGaussianMixture(n_components=max(1, samples_goodbatch_size - 1))
bgmm_goodbatch = mm.BayesianGaussianMixture(
n_components=max(1, samples_goodbatch_size - 1))
bad_n_components = max(1, len(samples_x) - samples_goodbatch_size - 1)
bgmm_badbatch = mm.BayesianGaussianMixture(n_components=bad_n_components)
bgmm_goodbatch.fit(samples_x_goodbatch)
Expand All @@ -55,4 +60,3 @@ def create_model(samples_x, samples_y_aggregation, percentage_goodbatch=0.34):
model['clusteringmodel_good'] = bgmm_goodbatch
model['clusteringmodel_bad'] = bgmm_badbatch
return model

Loading