diff options
author | Laurent Mazare <laurent.mazare@gmail.com> | 2023-12-03 17:06:09 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-12-03 16:06:09 +0000 |
commit | b5c283e86f18aa653e64d8c3894d8691d318bde7 (patch) | |
tree | 4569804ba8380518570ba50ca97925b23774c314 /candle-nn/src/activation.rs | |
parent | 8418154ee0cade8b9aebc22750c7717eb273b65d (diff) | |
download | candle-b5c283e86f18aa653e64d8c3894d8691d318bde7.tar.gz candle-b5c283e86f18aa653e64d8c3894d8691d318bde7.tar.bz2 candle-b5c283e86f18aa653e64d8c3894d8691d318bde7.zip |
Add the prelu layer. (#1402)
Diffstat (limited to 'candle-nn/src/activation.rs')
-rw-r--r-- | candle-nn/src/activation.rs | 50 |
1 files changed, 48 insertions, 2 deletions
diff --git a/candle-nn/src/activation.rs b/candle-nn/src/activation.rs index a2650634..8b9a8785 100644 --- a/candle-nn/src/activation.rs +++ b/candle-nn/src/activation.rs @@ -1,4 +1,4 @@ -use candle::Tensor; +use candle::{Result, Tensor}; use serde::Deserialize; #[derive(Debug, Clone, Copy, PartialEq, Deserialize, Default)] @@ -21,7 +21,7 @@ pub enum Activation { } impl super::Module for Activation { - fn forward(&self, xs: &Tensor) -> candle::Result<Tensor> { + fn forward(&self, xs: &Tensor) -> Result<Tensor> { match self { Self::Gelu => xs.gelu_erf(), // https://github.com/huggingface/transformers/blob/12f043eaeaabfef6f6efea411d98e6f6d3c094b7/src/transformers/activations.py#L49-L78 @@ -40,3 +40,49 @@ impl super::Module for Activation { } } } + +#[derive(Clone, Debug)] +pub struct PReLU { + weight: Tensor, + is_scalar: bool, +} + +impl PReLU { + pub fn new(weight: Tensor, is_scalar: bool) -> Self { + Self { weight, is_scalar } + } + + pub fn weight(&self) -> &Tensor { + &self.weight + } + + pub fn is_scalar(&self) -> bool { + self.is_scalar + } +} + +impl candle::Module for PReLU { + fn forward(&self, xs: &Tensor) -> Result<Tensor> { + let weight = if self.is_scalar { + self.weight.reshape(())? + } else { + self.weight.clone() + }; + let zeros = xs.zeros_like()?; + xs.maximum(&zeros)? + xs.minimum(&zeros)?.broadcast_mul(&weight)? + } +} + +/// Create or initialize a new PReLU layer. +/// +/// This uses some default name for weights, namely `"weight"`. +/// # Arguments +/// +/// * `num_parameters` - The number of parameters. Use `None` to have as single trainable value +/// and `Some` for a 1D vector with the appropriate number of features. +pub fn prelu(num_parameters: Option<usize>, vs: crate::VarBuilder) -> Result<PReLU> { + let init_ws = crate::init::Init::Const(0.25); + // When using a scalar weight, the PyTorch encoding is to use a 1d vector of length 1. + let ws = vs.get_with_hints((num_parameters.unwrap_or(1),), "weight", init_ws)?; + Ok(PReLU::new(ws, num_parameters.is_none())) +} |