diff options
Diffstat (limited to 'candle-nn/src/layer_norm.rs')
-rw-r--r-- | candle-nn/src/layer_norm.rs | 9 |
1 files changed, 7 insertions, 2 deletions
diff --git a/candle-nn/src/layer_norm.rs b/candle-nn/src/layer_norm.rs index 23d0c01b..b7dd61cb 100644 --- a/candle-nn/src/layer_norm.rs +++ b/candle-nn/src/layer_norm.rs @@ -11,8 +11,8 @@ //! use candle_nn::{LayerNorm, Module}; //! # fn main() -> candle::Result<()> { //! -//! let w = Tensor::new(1f32, &Cpu)?; -//! let b = Tensor::new(0f32, &Cpu)?; +//! let w = Tensor::new(&[1f32, 1f32, 1f32], &Cpu)?; +//! let b = Tensor::new(&[0f32, 0f32, 0f32], &Cpu)?; //! let layer = LayerNorm::new(w, b, 1e-5); //! //! let xs = Tensor::new( @@ -107,6 +107,11 @@ impl LayerNorm { impl Module for LayerNorm { fn forward(&self, x: &Tensor) -> Result<Tensor> { + if x.is_contiguous() && self.remove_mean { + if let Some(bias) = self.bias.as_ref() { + return crate::ops::layer_norm(x, &self.weight, bias, self.eps as f32); + } + } let x_dtype = x.dtype(); let internal_dtype = match x_dtype { DType::F16 | DType::BF16 => DType::F32, |