summaryrefslogtreecommitdiff
path: root/candle-pyo3/py_src/candle/functional
diff options
context:
space:
mode:
Diffstat (limited to 'candle-pyo3/py_src/candle/functional')
-rw-r--r--candle-pyo3/py_src/candle/functional/__init__.py8
-rw-r--r--candle-pyo3/py_src/candle/functional/__init__.pyi40
2 files changed, 48 insertions, 0 deletions
diff --git a/candle-pyo3/py_src/candle/functional/__init__.py b/candle-pyo3/py_src/candle/functional/__init__.py
new file mode 100644
index 00000000..efb246f0
--- /dev/null
+++ b/candle-pyo3/py_src/candle/functional/__init__.py
@@ -0,0 +1,8 @@
+# Generated content DO NOT EDIT
+from .. import functional
+
+gelu = functional.gelu
+relu = functional.relu
+silu = functional.silu
+softmax = functional.softmax
+tanh = functional.tanh
diff --git a/candle-pyo3/py_src/candle/functional/__init__.pyi b/candle-pyo3/py_src/candle/functional/__init__.pyi
new file mode 100644
index 00000000..a46b6137
--- /dev/null
+++ b/candle-pyo3/py_src/candle/functional/__init__.pyi
@@ -0,0 +1,40 @@
+# Generated content DO NOT EDIT
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
+from os import PathLike
+from candle.typing import _ArrayLike, Device
+from candle import Tensor, DType, QTensor
+
+@staticmethod
+def gelu(tensor: Tensor) -> Tensor:
+ """
+ Applies the Gaussian Error Linear Unit (GELU) function to a given tensor.
+ """
+ pass
+
+@staticmethod
+def relu(tensor: Tensor) -> Tensor:
+ """
+ Applies the Rectified Linear Unit (ReLU) function to a given tensor.
+ """
+ pass
+
+@staticmethod
+def silu(tensor: Tensor) -> Tensor:
+ """
+ Applies the Sigmoid Linear Unit (SiLU) function to a given tensor.
+ """
+ pass
+
+@staticmethod
+def softmax(tensor: Tensor, dim: int) -> Tensor:
+ """
+ Applies the Softmax function to a given tensor.#
+ """
+ pass
+
+@staticmethod
+def tanh(tensor: Tensor) -> Tensor:
+ """
+ Applies the tanh function to a given tensor.
+ """
+ pass