Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[CMAKE] Cmake changes, upgrade training test so it converge (#8343)
Browse files Browse the repository at this point in the history
  • Loading branch information
tqchen authored Oct 27, 2017
1 parent 9ca5caf commit d6f6416
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 30 deletions.
10 changes: 8 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,14 @@ FILE(GLOB_RECURSE SOURCE "src/*.cc" "src/*.h" "include/*.h")
FILE(GLOB_RECURSE CUDA "src/*.cu" "src/*.cuh")

# add nnvm to source
FILE(GLOB_RECURSE NNVMSOURCE "nnvm/src/*.cc" "nnvm/src/*.h" "nnvm/include/*.h")
FILE(GLOB_RECURSE NNVMSOURCE
nnvm/src/c_api/*.cc
nnvm/src/core/*.cc
nnvm/src/pass/*.cc
nnvm/src/c_api/*.h
nnvm/src/core/*.h
nnvm/src/pass/*.h
nnvm/include/*.h)
list(APPEND SOURCE ${NNVMSOURCE})

# add mshadow file
Expand Down Expand Up @@ -527,4 +534,3 @@ if(MSVC)
endif()
set(LINT_DIRS include src scripts python tests cpp-package)
add_custom_target(mxnet_lint COMMAND ${CMAKE_COMMAND} -DMSVC=${MSVC} -DPYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} -DLINT_DIRS=${LINT_DIRS} -DPROJECT_SOURCE_DIR=${CMAKE_CURRENT_SOURCE_DIR} -DPROJECT_NAME=mxnet -P ${CMAKE_CURRENT_SOURCE_DIR}/dmlc-core/cmake/lint.cmake)

11 changes: 7 additions & 4 deletions cpp-package/scripts/OpWrapperGenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,12 +124,15 @@ def __init__(self, opName = '', argName = '', typeString = '', descString = ''):
self.defaultString = self.enum.GetDefaultValueString(self.defaultString)
elif self.defaultString == 'None':
self.defaultString = self.type + '()'
elif self.defaultString == 'False':
self.defaultString = 'false'
elif self.defaultString == 'True':
self.defaultString = 'true'
elif self.type == "bool":
if self.defaultString == "1" or self.defaultString == "True":
self.defaultString = "true"
else:
self.defaultString = "false"
elif self.defaultString[0] == '(':
self.defaultString = 'Shape' + self.defaultString
elif self.defaultString[0] == '[':
self.defaultString = 'Shape(' + self.defaultString[1:-1] + ")"
elif self.type == 'dmlc::optional<int>':
self.defaultString = self.type + '(' + self.defaultString + ')'
elif typeString.startswith('caffe-layer-parameter'):
Expand Down
2 changes: 1 addition & 1 deletion nnvm
Submodule nnvm updated 176 files
44 changes: 22 additions & 22 deletions python/mxnet/visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,20 +134,20 @@ def print_layer_summary(node, out_shape):
pre_filter = pre_filter + int(shape[0])
cur_param = 0
if op == 'Convolution':
if ("no_bias" in node["attr"]) and (node["attr"]["no_bias"] == 'True'):
cur_param = pre_filter * int(node["attr"]["num_filter"])
for k in _str2tuple(node["attr"]["kernel"]):
if ("no_bias" in node["attrs"]) and int(node["attrs"]["no_bias"]):
cur_param = pre_filter * int(node["attrs"]["num_filter"])
for k in _str2tuple(node["attrs"]["kernel"]):
cur_param *= int(k)
else:
cur_param = pre_filter * int(node["attr"]["num_filter"])
for k in _str2tuple(node["attr"]["kernel"]):
cur_param = pre_filter * int(node["attrs"]["num_filter"])
for k in _str2tuple(node["attrs"]["kernel"]):
cur_param *= int(k)
cur_param += int(node["attr"]["num_filter"])
cur_param += int(node["attrs"]["num_filter"])
elif op == 'FullyConnected':
if ("no_bias" in node["attr"]) and (node["attr"]["no_bias"] == 'True'):
cur_param = pre_filter * (int(node["attr"]["num_hidden"]))
if ("no_bias" in node["attrs"]) and int(node["attrs"]["no_bias"]):
cur_param = pre_filter * (int(node["attrs"]["num_hidden"]))
else:
cur_param = (pre_filter+1) * (int(node["attr"]["num_hidden"]))
cur_param = (pre_filter+1) * (int(node["attrs"]["num_hidden"]))
elif op == 'BatchNorm':
key = node["name"] + "_output"
if show_shape:
Expand Down Expand Up @@ -291,24 +291,24 @@ def looks_like_weight(name):
label = node["name"]
attr["fillcolor"] = cm[0]
elif op == "Convolution":
label = r"Convolution\n%s/%s, %s" % ("x".join(_str2tuple(node["attr"]["kernel"])),
"x".join(_str2tuple(node["attr"]["stride"]))
if "stride" in node["attr"] else "1",
node["attr"]["num_filter"])
label = r"Convolution\n%s/%s, %s" % ("x".join(_str2tuple(node["attrs"]["kernel"])),
"x".join(_str2tuple(node["attrs"]["stride"]))
if "stride" in node["attrs"] else "1",
node["attrs"]["num_filter"])
attr["fillcolor"] = cm[1]
elif op == "FullyConnected":
label = r"FullyConnected\n%s" % node["attr"]["num_hidden"]
label = r"FullyConnected\n%s" % node["attrs"]["num_hidden"]
attr["fillcolor"] = cm[1]
elif op == "BatchNorm":
attr["fillcolor"] = cm[3]
elif op == "Activation" or op == "LeakyReLU":
label = r"%s\n%s" % (op, node["attr"]["act_type"])
label = r"%s\n%s" % (op, node["attrs"]["act_type"])
attr["fillcolor"] = cm[2]
elif op == "Pooling":
label = r"Pooling\n%s, %s/%s" % (node["attr"]["pool_type"],
"x".join(_str2tuple(node["attr"]["kernel"])),
"x".join(_str2tuple(node["attr"]["stride"]))
if "stride" in node["attr"] else "1")
label = r"Pooling\n%s, %s/%s" % (node["attrs"]["pool_type"],
"x".join(_str2tuple(node["attrs"]["kernel"])),
"x".join(_str2tuple(node["attrs"]["stride"]))
if "stride" in node["attrs"] else "1")
attr["fillcolor"] = cm[4]
elif op == "Concat" or op == "Flatten" or op == "Reshape":
attr["fillcolor"] = cm[5]
Expand All @@ -317,7 +317,7 @@ def looks_like_weight(name):
else:
attr["fillcolor"] = cm[7]
if op == "Custom":
label = node["attr"]["op_type"]
label = node["attrs"]["op_type"]

dot.node(name=name, label=label, **attr)

Expand All @@ -338,8 +338,8 @@ def looks_like_weight(name):
if draw_shape:
if input_node["op"] != "null":
key = input_name + "_output"
if "attr" in input_node:
params = input_node["attr"]
if "attrs" in input_node:
params = input_node["attrs"]
if "num_outputs" in params:
key += str(int(params["num_outputs"]) - 1)
shape = shape_dict[key][1:]
Expand Down
1 change: 0 additions & 1 deletion tests/python/train/test_dtype.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ def test_cifar10():
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
logging.getLogger('').addHandler(console)

kv = mx.kvstore.create("local")
# test float32 input
(train, val) = get_iterator_float32(kv)
Expand Down

0 comments on commit d6f6416

Please sign in to comment.