From 33f561463cd641b19ea7e9dfe139a4a94166f02a Mon Sep 17 00:00:00 2001 From: Dimitris Kavelidis Date: Wed, 16 Nov 2022 00:21:19 +0200 Subject: [PATCH 1/4] Reformatting RELU function, added docstring examples --- ivy/array/activations.py | 7 +++- ivy/container/activations.py | 42 ++++++++++--------- ivy/functional/backends/jax/activations.py | 7 +++- ivy/functional/backends/numpy/activations.py | 7 +++- .../backends/tensorflow/activations.py | 7 +++- ivy/functional/backends/torch/activations.py | 8 +++- ivy/functional/ivy/activations.py | 17 +++++++- 7 files changed, 67 insertions(+), 28 deletions(-) diff --git a/ivy/array/activations.py b/ivy/array/activations.py index 50d4e9058532a..7e485a9481439 100644 --- a/ivy/array/activations.py +++ b/ivy/array/activations.py @@ -10,7 +10,12 @@ class ArrayWithActivations(abc.ABC): - def relu(self: ivy.Array, /, *, out: Optional[ivy.Array] = None) -> ivy.Array: + def relu( + self: ivy.Array, + /, + *, + out: Optional[ivy.Array] = None + ) -> ivy.Array: """ ivy.Array instance method variant of ivy.relu. This method simply wraps the function, and so the docstring for ivy.relu also applies to this method diff --git a/ivy/container/activations.py b/ivy/container/activations.py index 948e83af1a248..7038e5defd350 100644 --- a/ivy/container/activations.py +++ b/ivy/container/activations.py @@ -11,14 +11,14 @@ class ContainerWithActivations(ContainerBase): @staticmethod def static_relu( - x: Union[ivy.Array, ivy.NativeArray, ivy.Container], - /, - *, - key_chains: Optional[Union[List[str], Dict[str, str]]] = None, - to_apply: bool = True, - prune_unapplied: bool = False, - map_sequences: bool = False, - out: Optional[ivy.Container] = None, + x: Union[ivy.Array, ivy.NativeArray, ivy.Container], + /, + *, + key_chains: Optional[Union[List[str], Dict[str, str]]] = None, + to_apply: bool = True, + prune_unapplied: bool = False, + map_sequences: bool = False, + out: Optional[ivy.Container] = None, ) -> ivy.Container: """ ivy.Container static method variant of ivy.relu. @@ -52,11 +52,12 @@ def static_relu( Examples -------- - >>> x = ivy.Container(a=ivy.array([1.0, 0, 1.0])) + >>> x = ivy.Container(a=ivy.array([1.0, -1.2]), b=ivy.array([0.4, -0.2])) >>> y = ivy.Container.static_relu(x) >>> print(y) { - a: ivy.array([1., 0., 1.]) + a: ivy.array([1., 0.]), + b: ivy.array([0.40000001, 0.]) } """ @@ -71,14 +72,14 @@ def static_relu( ) def relu( - self: ivy.Container, - /, - *, - key_chains: Optional[Union[List[str], Dict[str, str]]] = None, - to_apply: bool = True, - prune_unapplied: bool = False, - map_sequences: bool = False, - out: Optional[ivy.Container] = None, + self: ivy.Container, + /, + *, + key_chains: Optional[Union[List[str], Dict[str, str]]] = None, + to_apply: bool = True, + prune_unapplied: bool = False, + map_sequences: bool = False, + out: Optional[ivy.Container] = None, ) -> ivy.Container: """ ivy.Container instance method variant of ivy.relu. @@ -112,11 +113,12 @@ def relu( Examples -------- - >>> x = ivy.Container(a=ivy.array([1.0, 0, 1.0])) + >>> x = ivy.Container(a=ivy.array([1.0, -1.2]), b=ivy.array([0.4, -0.2])) >>> y = x.relu() >>> print(y) { - a: ivy.array([1., 0., 1.]) + a: ivy.array([1., 0.]), + b: ivy.array([0.40000001, 0.]) } """ diff --git a/ivy/functional/backends/jax/activations.py b/ivy/functional/backends/jax/activations.py index c8f1a85d6d975..9a9a6713ca7ea 100644 --- a/ivy/functional/backends/jax/activations.py +++ b/ivy/functional/backends/jax/activations.py @@ -28,7 +28,12 @@ def leaky_relu( return jnp.asarray(jnp.where(x > 0, x, jnp.multiply(x, alpha)), x.dtype) -def relu(x: JaxArray, /, *, out: Optional[JaxArray] = None) -> JaxArray: +def relu( + x: JaxArray, + /, + *, + out: Optional[JaxArray] = None +) -> JaxArray: return jnp.maximum(x, 0) diff --git a/ivy/functional/backends/numpy/activations.py b/ivy/functional/backends/numpy/activations.py index c5bc0c4ee06eb..460062e5c070b 100644 --- a/ivy/functional/backends/numpy/activations.py +++ b/ivy/functional/backends/numpy/activations.py @@ -15,7 +15,12 @@ @_scalar_output_to_0d_array -def relu(x: np.ndarray, /, *, out: Optional[np.ndarray] = None) -> np.ndarray: +def relu( + x: np.ndarray, + /, + *, + out: Optional[np.ndarray] = None +) -> np.ndarray: return np.maximum(x, 0, out=out, dtype=x.dtype) diff --git a/ivy/functional/backends/tensorflow/activations.py b/ivy/functional/backends/tensorflow/activations.py index af254a71a47e6..ed4e1178b5084 100644 --- a/ivy/functional/backends/tensorflow/activations.py +++ b/ivy/functional/backends/tensorflow/activations.py @@ -24,7 +24,12 @@ def leaky_relu( return tf.nn.leaky_relu(x, alpha) -def relu(x: Tensor, /, *, out: Optional[Tensor] = None) -> Tensor: +def relu( + x: Tensor, + /, + *, + out: Optional[Tensor] = None +) -> Tensor: return tf.nn.relu(x) diff --git a/ivy/functional/backends/torch/activations.py b/ivy/functional/backends/torch/activations.py index 0ae460f737ca1..5eaca070815f4 100644 --- a/ivy/functional/backends/torch/activations.py +++ b/ivy/functional/backends/torch/activations.py @@ -15,10 +15,14 @@ @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, backend_version) -def relu(x: torch.Tensor, /, *, out: Optional[torch.Tensor] = None) -> torch.Tensor: +def relu( + x: torch.Tensor, + /, + *, + out: Optional[torch.Tensor] = None +) -> torch.Tensor: return torch.relu(x) - @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, backend_version) def leaky_relu( x: torch.Tensor, diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index 38c427b4e74c0..b6aa4be22d45f 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -24,7 +24,10 @@ @handle_nestable @handle_exceptions def relu( - x: Union[ivy.Array, ivy.NativeArray], /, *, out: Optional[ivy.Array] = None + x: Union[ivy.Array, ivy.NativeArray], + /, + *, + out: Optional[ivy.Array] = None ) -> ivy.Array: """Applies the rectified linear unit function element-wise. @@ -42,7 +45,7 @@ def relu( an array containing the rectified linear unit activation of each element in ``x``. - Functional Examples + Examples ------------------- With :class:`ivy.Array` input: @@ -63,6 +66,16 @@ def relu( >>> y = ivy.relu(x) >>> print(y) ivy.array([0., 0., 2.]) + + With :class:`ivy.Container` input: + + >>> x = ivy.Container(a=ivy.array([1.0, -1.2]), b=ivy.array([0.4, -0.2])) + >>> x = ivy.relu(x, out = x) + >>> print(x) + { + a: ivy.array([1., 0.]), + b: ivy.array([0.40000001, 0.]) + } """ return current_backend(x).relu(x, out=out) From ac9b8d2ae657f626783cc5b2ac4dc98f971cb6a9 Mon Sep 17 00:00:00 2001 From: Dimitris Kavelidis Date: Wed, 16 Nov 2022 00:27:30 +0200 Subject: [PATCH 2/4] Update activations.py --- ivy/functional/backends/torch/activations.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ivy/functional/backends/torch/activations.py b/ivy/functional/backends/torch/activations.py index 5eaca070815f4..567236138d7fd 100644 --- a/ivy/functional/backends/torch/activations.py +++ b/ivy/functional/backends/torch/activations.py @@ -23,6 +23,7 @@ def relu( ) -> torch.Tensor: return torch.relu(x) + @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, backend_version) def leaky_relu( x: torch.Tensor, From 400e8ea5ddbdc65d06428b1a80e4863473cc486a Mon Sep 17 00:00:00 2001 From: Dimitris Kavelidis Date: Wed, 16 Nov 2022 01:39:42 +0200 Subject: [PATCH 3/4] Reformatting RELU --- .idea/ivy.iml | 4 ++-- .idea/misc.xml | 4 ++-- ivy/functional/ivy/activations.py | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.idea/ivy.iml b/.idea/ivy.iml index 20dc1eb3816b6..6b1d5bb031a2a 100644 --- a/.idea/ivy.iml +++ b/.idea/ivy.iml @@ -2,7 +2,7 @@ - + @@ -12,4 +12,4 @@ - + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml index 7fd019b943007..0f40e8a3142da 100644 --- a/.idea/misc.xml +++ b/.idea/misc.xml @@ -3,8 +3,8 @@ - + - + \ No newline at end of file diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index b6aa4be22d45f..f1a47b33abb71 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -46,7 +46,7 @@ def relu( ``x``. Examples - ------------------- + -------- With :class:`ivy.Array` input: >>> x = ivy.array([-1., 0., 1.]) @@ -108,8 +108,8 @@ def leaky_relu( ret The input array with leaky relu applied element-wise. - Functional Examples - ------------------- + Examples + -------- With :class:`ivy.Array` input: From 708857541708f8629f70a163a7e5a7c5693c369f Mon Sep 17 00:00:00 2001 From: Dimitris Kavelidis Date: Thu, 17 Nov 2022 16:22:01 +0200 Subject: [PATCH 4/4] Reformatting RELU update --- ivy/functional/ivy/activations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index f1a47b33abb71..23491014431ca 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -110,7 +110,6 @@ def leaky_relu( Examples -------- - With :class:`ivy.Array` input: >>> x = ivy.array([0.39, -0.85])