Skip to content

Commit

Permalink
Add python implementation of softshrink (tensorflow#1140)
Browse files Browse the repository at this point in the history
* Add softshrink python op
* Added check.
  • Loading branch information
gabrieldemarmiesse authored Feb 24, 2020
1 parent 5d07b9b commit 7e1c6a0
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 0 deletions.
16 changes: 16 additions & 0 deletions tensorflow_addons/activations/softshrink.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,19 @@ def _softshrink_grad(op, grad):
return _activation_so.ops.addons_softshrink_grad(
grad, op.inputs[0], op.get_attr("lower"), op.get_attr("upper")
)


def _softshrink_py(x, lower, upper):
if lower > upper:
raise ValueError(
"The value of lower is {} and should"
" not be higher than the value "
"variable upper, which is {} .".format(lower, upper)
)
mask_lower = x < lower
mask_upper = upper < x
mask_middle = tf.logical_not(tf.logical_or(mask_lower, mask_upper))
mask_lower = tf.cast(mask_lower, x.dtype)
mask_upper = tf.cast(mask_upper, x.dtype)
mask_middle = tf.cast(mask_middle, x.dtype)
return x * (1 - mask_middle) - mask_lower * lower - mask_upper * upper
25 changes: 25 additions & 0 deletions tensorflow_addons/activations/softshrink_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import numpy as np
import tensorflow as tf
from tensorflow_addons.activations import softshrink
from tensorflow_addons.activations.softshrink import _softshrink_py
from tensorflow_addons.utils import test_utils


Expand Down Expand Up @@ -53,6 +54,30 @@ def test_theoretical_gradients(self, dtype):
theoretical, numerical = tf.test.compute_gradient(softshrink, [x])
self.assertAllCloseAccordingToType(theoretical, numerical, atol=1e-4)

@parameterized.named_parameters(("float32", np.float32), ("float64", np.float64))
def test_same_as_py_func(self, dtype):
np.random.seed(1234)
for _ in range(20):
self.verify_funcs_are_equivalent(dtype)

def verify_funcs_are_equivalent(self, dtype):
x_np = np.random.uniform(-10, 10, size=(4, 4)).astype(dtype)
x = tf.convert_to_tensor(x_np)
lower = np.random.uniform(-10, 10)
upper = lower + np.random.uniform(0, 10)

with tf.GradientTape(persistent=True) as t:
t.watch(x)
y_native = softshrink(x, lower, upper)
y_py = _softshrink_py(x, lower, upper)

self.assertAllCloseAccordingToType(y_native, y_py, atol=1e-4)

grad_native = t.gradient(y_native, x)
grad_py = t.gradient(y_py, x)

self.assertAllCloseAccordingToType(grad_native, grad_py, atol=1e-4)


if __name__ == "__main__":
tf.test.main()

0 comments on commit 7e1c6a0

Please sign in to comment.