-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathutils.py
43 lines (36 loc) · 1.52 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
""" Utility functions. """
import numpy as np
import os
import random
import tensorflow as tf
from tensorflow.contrib.layers.python import layers as tf_layers
from tensorflow.python.platform import flags
FLAGS = flags.FLAGS
## Network helpers
def conv_block(inp, cweight, bweight, reuse, scope, activation=tf.nn.relu, max_pool_pad='VALID', use_stride=True, residual=False):
""" Perform, conv, batch norm, nonlinearity, and max pool """
stride, no_stride = [1,2,2,1], [1,1,1,1]
if use_stride:
conv_output = tf.nn.conv2d(inp, cweight, stride, 'SAME') + bweight
else:
conv_output = tf.nn.conv2d(inp, cweight, no_stride, 'SAME') + bweight
normed = normalize(conv_output, activation, reuse, scope)
return normed
def normalize(inp, activation, reuse, scope):
if FLAGS.norm == 'batch_norm':
return tf_layers.batch_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'layer_norm':
return tf_layers.layer_norm(inp, activation_fn=activation, reuse=reuse, scope=scope)
elif FLAGS.norm == 'None':
if activation is not None:
return activation(inp)
else:
return inp
## Loss functions
def mse(pred, label):
pred = tf.reshape(pred, [-1])
label = tf.reshape(label, [-1])
return tf.reduce_mean(tf.square(pred-label))
def xent(pred, label):
# Note - with tf version <=0.12, this loss has incorrect 2nd derivatives
return tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=label) / FLAGS.update_batch_size