Skip to content
This repository has been archived by the owner on Dec 1, 2021. It is now read-only.

Commit

Permalink
Avoid adding key word argments in a meaningless way
Browse files Browse the repository at this point in the history
  • Loading branch information
ytfksw committed May 29, 2020
1 parent 6a31875 commit f172aa5
Showing 1 changed file with 7 additions and 7 deletions.
14 changes: 7 additions & 7 deletions blueoil/networks/classification/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,14 +90,14 @@ def _residual(self, inputs, in_filters, out_filters, strides, is_training):
with tf.compat.v1.variable_scope('sub_add'):
if in_filters != out_filters:
inputs = tf.nn.avg_pool2d(
input=inputs,
inputs,
ksize=[1, strides, strides, 1],
strides=[1, strides, strides, 1],
padding='VALID'
)
inputs = tf.pad(
tensor=inputs,
paddings=[[0, 0], [0, 0], [0, 0], [(out_filters - in_filters)//2, (out_filters - in_filters)//2]]
inputs,
[[0, 0], [0, 0], [0, 0], [(out_filters - in_filters)//2, (out_filters - in_filters)//2]]
)

output = conv2 + inputs
Expand Down Expand Up @@ -170,15 +170,15 @@ def loss(self, softmax, labels):
labels = tf.cast(labels, tf.float32)

if self.is_debug:
labels = tf.compat.v1.Print(labels, [tf.shape(input=labels), tf.argmax(input=labels, axis=1)], message="labels:", summarize=200)
softmax = tf.compat.v1.Print(softmax, [tf.shape(input=softmax), tf.argmax(input=softmax, axis=1)], message="softmax:", summarize=200)
labels = tf.compat.v1.Print(labels, [tf.shape(labels), tf.argmax(labels, axis=1)], message="labels:", summarize=200)
softmax = tf.compat.v1.Print(softmax, [tf.shape(softmax), tf.argmax(softmax, axis=1)], message="softmax:", summarize=200)

cross_entropy = -tf.reduce_sum(
input_tensor=labels * tf.math.log(tf.clip_by_value(softmax, 1e-10, 1.0)),
labels * tf.math.log(tf.clip_by_value(softmax, 1e-10, 1.0)),
axis=[1]
)

cross_entropy_mean = tf.reduce_mean(input_tensor=cross_entropy, name="cross_entropy_mean")
cross_entropy_mean = tf.reduce_mean(cross_entropy, name="cross_entropy_mean")

loss = cross_entropy_mean + self._decay()
tf.compat.v1.summary.scalar("loss", loss)
Expand Down

0 comments on commit f172aa5

Please sign in to comment.