summaryrefslogtreecommitdiff
path: root/candle-examples/examples/llama/model.rs
diff options
context:
space:
mode:
authorLaurent Mazare <laurent.mazare@gmail.com>2023-07-13 21:32:32 +0100
committerGitHub <noreply@github.com>2023-07-13 21:32:32 +0100
commit2bfa791336b320b96d392aba83cbd4cee87173e3 (patch)
treea3127719a64cf5cfbf38f5f8be859afd2dc6118e /candle-examples/examples/llama/model.rs
parent57be3638d8c10304629f6859d183fb192858f3a3 (diff)
downloadcandle-2bfa791336b320b96d392aba83cbd4cee87173e3.tar.gz
candle-2bfa791336b320b96d392aba83cbd4cee87173e3.tar.bz2
candle-2bfa791336b320b96d392aba83cbd4cee87173e3.zip
Use the same default as pytorch for sum. (#164)
Diffstat (limited to 'candle-examples/examples/llama/model.rs')
-rw-r--r--candle-examples/examples/llama/model.rs2
1 files changed, 1 insertions, 1 deletions
diff --git a/candle-examples/examples/llama/model.rs b/candle-examples/examples/llama/model.rs
index 04397d1e..57f339b0 100644
--- a/candle-examples/examples/llama/model.rs
+++ b/candle-examples/examples/llama/model.rs
@@ -95,7 +95,7 @@ impl RmsNorm {
// This is a no-op if x's dtype is already f32.
let x = x.to_dtype(DType::F32)?;
let (b_sz, seq_len, hidden_size) = x.shape().r3()?;
- let norm_x = ((&x * &x)?.sum(&[2])? / hidden_size as f64)?;
+ let norm_x = (x.sqr()?.sum_keepdim(&[2])? / hidden_size as f64)?;
let norm_x = norm_x.broadcast_as((b_sz, seq_len, hidden_size))?;
let x_normed = (x / (norm_x + 1e-5)?.sqrt()?)?;
let size = self.scale.shape().r1()?;