diff options
author | Laurent Mazare <laurent.mazare@gmail.com> | 2023-07-13 21:32:32 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-07-13 21:32:32 +0100 |
commit | 2bfa791336b320b96d392aba83cbd4cee87173e3 (patch) | |
tree | a3127719a64cf5cfbf38f5f8be859afd2dc6118e /candle-nn/src | |
parent | 57be3638d8c10304629f6859d183fb192858f3a3 (diff) | |
download | candle-2bfa791336b320b96d392aba83cbd4cee87173e3.tar.gz candle-2bfa791336b320b96d392aba83cbd4cee87173e3.tar.bz2 candle-2bfa791336b320b96d392aba83cbd4cee87173e3.zip |
Use the same default as pytorch for sum. (#164)
Diffstat (limited to 'candle-nn/src')
-rw-r--r-- | candle-nn/src/layer_norm.rs | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/candle-nn/src/layer_norm.rs b/candle-nn/src/layer_norm.rs index 188a02bf..06f984f2 100644 --- a/candle-nn/src/layer_norm.rs +++ b/candle-nn/src/layer_norm.rs @@ -51,9 +51,9 @@ impl LayerNorm { }; let (_bsize, _seq_len, hidden_size) = x.shape().r3()?; let x = x.to_dtype(internal_dtype)?; - let mean_x = (x.sum(&[2])? / hidden_size as f64)?; + let mean_x = (x.sum_keepdim(&[2])? / hidden_size as f64)?; let x = x.broadcast_sub(&mean_x)?; - let norm_x = ((&x * &x)?.sum(&[2])? / hidden_size as f64)?; + let norm_x = (x.sqr()?.sum_keepdim(&[2])? / hidden_size as f64)?; let x_normed = x.broadcast_div(&(norm_x + self.eps)?.sqrt()?)?; let x = x_normed .to_dtype(x_dtype)? |