diff --git a/ivy/array/activations.py b/ivy/array/activations.py index 8c68e7480752e..838377194e3de 100644 --- a/ivy/array/activations.py +++ b/ivy/array/activations.py @@ -110,12 +110,33 @@ def gelu( """ return ivy.gelu(self._data, approximate=approximate, out=out) - def sigmoid(self: ivy.Array, /, *, out: Optional[ivy.Array] = None) -> ivy.Array: + def sigmoid( + self: ivy.Array, + /, + *, + out: Optional[ivy.Array] = None + ) -> ivy.Array: + """ - ivy.Array instance method variant of ivy.sigmoid. This method simply wraps the - function, and so the docstring for ivy.sigmoid also applies to this method + ivy.Array instance method variant of ivy.sigmoid. + + This method simply wraps the function, and so the docstring for ivy.sigmoid also applies to this method with minimal changes. + Parameters + ---------- + self + Input array + out + optional output array for writing the result to. It must have the same shape the input broadcast to + default: None + + Returns + ------- + ret + an array with the sigmoid activation function applied element-wise. + + Examples -------- >>> x = ivy.array([-1., 1., 2.]) diff --git a/ivy/functional/backends/jax/activations.py b/ivy/functional/backends/jax/activations.py index 27b39aedff193..2c0b2b0461034 100644 --- a/ivy/functional/backends/jax/activations.py +++ b/ivy/functional/backends/jax/activations.py @@ -35,7 +35,12 @@ def relu(x: JaxArray, /, *, out: Optional[JaxArray] = None) -> JaxArray: return jnp.maximum(x, 0) -def sigmoid(x: JaxArray, /, *, out: Optional[JaxArray] = None) -> JaxArray: +def sigmoid( + x: JaxArray, + /, + *, + out: Optional[JaxArray] = None +) -> JaxArray: return 1 / (1 + jnp.exp(-x)) diff --git a/ivy/functional/backends/numpy/activations.py b/ivy/functional/backends/numpy/activations.py index 267315763f969..8ba332b224164 100644 --- a/ivy/functional/backends/numpy/activations.py +++ b/ivy/functional/backends/numpy/activations.py @@ -37,7 +37,12 @@ def gelu( return ivy.astype(ret, x.dtype, copy=False) -def sigmoid(x: np.ndarray, /, *, out: Optional[np.ndarray] = None) -> np.ndarray: +def sigmoid( + x: np.ndarray, + /, + *, + out: Optional[np.ndarray] = None +) -> np.ndarray: if not ivy.is_array(x): return np.asarray(1 / (1 + np.exp(-x))) return np.asarray(1 / (1 + np.exp(-x))).astype(x.dtype) diff --git a/ivy/functional/backends/tensorflow/activations.py b/ivy/functional/backends/tensorflow/activations.py index f53f4ab4e0725..dc5244b3c898a 100644 --- a/ivy/functional/backends/tensorflow/activations.py +++ b/ivy/functional/backends/tensorflow/activations.py @@ -33,7 +33,12 @@ def relu(x: Tensor, /, *, out: Optional[Tensor] = None) -> Tensor: return tf.nn.relu(x) -def sigmoid(x: Tensor, /, *, out: Optional[Tensor] = None) -> Tensor: +def sigmoid( + x: Tensor, + /, + *, + out: Optional[Tensor] = None +) -> Tensor: if not ivy.is_array(x): x = float(x) return tf.nn.sigmoid(x) diff --git a/ivy/functional/backends/torch/activations.py b/ivy/functional/backends/torch/activations.py index a32eddaede275..788fcf0016b09 100644 --- a/ivy/functional/backends/torch/activations.py +++ b/ivy/functional/backends/torch/activations.py @@ -46,7 +46,12 @@ def gelu( @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, backend_version) -def sigmoid(x: torch.Tensor, /, *, out: Optional[torch.Tensor] = None) -> torch.Tensor: +def sigmoid( + x: torch.Tensor, + /, + *, + out: Optional[torch.Tensor] = None +) -> torch.Tensor: if not ivy.is_array(x): x = torch.tensor(x) return torch.sigmoid(x, out=out) diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index 3f56b3c93a5a9..3217f54f1489f 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -401,9 +401,14 @@ def relu( @handle_array_like_without_promotion @handle_array_function def sigmoid( - x: Union[ivy.Array, ivy.NativeArray], /, *, out: Optional[ivy.Array] = None + x: Union[ivy.Array, ivy.NativeArray], + /, + *, + out: Optional[ivy.Array] = None ) -> ivy.Array: - """Applies the sigmoid function element-wise. + + """ + Applies the sigmoid function element-wise. Parameters ---------- @@ -411,7 +416,8 @@ def sigmoid( input array. out optional output array, for writing the result to. It must have a shape that the - inputs broadcast to. + input broadcast to. + default: None Returns ------- @@ -422,16 +428,23 @@ def sigmoid( -------- With :class:`ivy.Array` input: - >>> x = ivy.array([-1., 1., 2.]) + >>> x = ivy.array([-1.0, 1.0, 2.0]) >>> y = ivy.sigmoid(x) >>> print(y) ivy.array([0.269, 0.731, 0.881]) + + or + + >>> x = ivy.array([-1.0, 1.0, 2.0]) + >>> y = x.sigmoid() + >>> print(y) + ivy.array([0.269, 0.731, 0.881]) - >>> x = ivy.array([-1.3, 3.8, 2.1]) + >>> x = ivy.array([[-1.3, 3.8, 2.1], [1.7, 4.2, -6.6]]) >>> y = ivy.sigmoid(x) >>> print(y) - ivy.array([0.214, 0.978, 0.891]) + ivy.array([[0.214, 0.978, 0.891], [0.846,0.985,0.001]] ) """ return current_backend(x).sigmoid(x, out=out)