summaryrefslogtreecommitdiff
path: root/candle-pyo3/py_src/candle/functional/__init__.pyi
diff options
context:
space:
mode:
Diffstat (limited to 'candle-pyo3/py_src/candle/functional/__init__.pyi')
-rw-r--r--candle-pyo3/py_src/candle/functional/__init__.pyi40
1 files changed, 40 insertions, 0 deletions
diff --git a/candle-pyo3/py_src/candle/functional/__init__.pyi b/candle-pyo3/py_src/candle/functional/__init__.pyi
new file mode 100644
index 00000000..a46b6137
--- /dev/null
+++ b/candle-pyo3/py_src/candle/functional/__init__.pyi
@@ -0,0 +1,40 @@
+# Generated content DO NOT EDIT
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
+from os import PathLike
+from candle.typing import _ArrayLike, Device
+from candle import Tensor, DType, QTensor
+
+@staticmethod
+def gelu(tensor: Tensor) -> Tensor:
+ """
+ Applies the Gaussian Error Linear Unit (GELU) function to a given tensor.
+ """
+ pass
+
+@staticmethod
+def relu(tensor: Tensor) -> Tensor:
+ """
+ Applies the Rectified Linear Unit (ReLU) function to a given tensor.
+ """
+ pass
+
+@staticmethod
+def silu(tensor: Tensor) -> Tensor:
+ """
+ Applies the Sigmoid Linear Unit (SiLU) function to a given tensor.
+ """
+ pass
+
+@staticmethod
+def softmax(tensor: Tensor, dim: int) -> Tensor:
+ """
+ Applies the Softmax function to a given tensor.#
+ """
+ pass
+
+@staticmethod
+def tanh(tensor: Tensor) -> Tensor:
+ """
+ Applies the tanh function to a given tensor.
+ """
+ pass