diff options
author | Laurent Mazare <laurent.mazare@gmail.com> | 2023-10-27 21:51:16 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-10-27 20:51:16 +0100 |
commit | c8face3f95a9c57b4714cd95dc69237533558c25 (patch) | |
tree | fc2f6b0aa8c2f71793f71293fdbd14b10dcc9575 /candle-nn | |
parent | 85bea43e5b088b94612b0fd7ed8f09261dc79d52 (diff) | |
download | candle-c8face3f95a9c57b4714cd95dc69237533558c25.tar.gz candle-c8face3f95a9c57b4714cd95dc69237533558c25.tar.bz2 candle-c8face3f95a9c57b4714cd95dc69237533558c25.zip |
Add the relu2 and relu6 activations. (#1201)
Diffstat (limited to 'candle-nn')
-rw-r--r-- | candle-nn/src/activation.rs | 4 |
1 files changed, 4 insertions, 0 deletions
diff --git a/candle-nn/src/activation.rs b/candle-nn/src/activation.rs index ddc211a7..52ceba78 100644 --- a/candle-nn/src/activation.rs +++ b/candle-nn/src/activation.rs @@ -9,6 +9,8 @@ pub enum Activation { #[serde(rename = "gated-gelu")] NewGelu, Relu, + Relu2, + Relu6, Silu, Sigmoid, Elu(f64), @@ -22,6 +24,8 @@ impl super::Module for Activation { // https://github.com/huggingface/transformers/blob/12f043eaeaabfef6f6efea411d98e6f6d3c094b7/src/transformers/activations.py#L49-L78 Self::NewGelu => xs.gelu(), Self::Relu => xs.relu(), + Self::Relu2 => xs.relu()?.sqr(), + Self::Relu6 => xs.clamp(0f32, 6f32), Self::Silu => crate::ops::silu(xs), Self::Sigmoid => crate::ops::sigmoid(xs), &Self::Elu(alpha) => xs.elu(alpha), |