summaryrefslogtreecommitdiff
path: root/candle-examples/examples/llama_multiprocess
diff options
context:
space:
mode:
authorLaurent Mazare <laurent.mazare@gmail.com>2023-07-28 13:13:01 +0100
committerGitHub <noreply@github.com>2023-07-28 13:13:01 +0100
commit3eb2bc6d07f192a5ce73ab6964745275f2c15213 (patch)
treee5a682d0e40f3c258f668652082ff7fa45918e32 /candle-examples/examples/llama_multiprocess
parent68eab38de6e5cabf17159a5dcf45ec703fbea441 (diff)
downloadcandle-3eb2bc6d07f192a5ce73ab6964745275f2c15213.tar.gz
candle-3eb2bc6d07f192a5ce73ab6964745275f2c15213.tar.bz2
candle-3eb2bc6d07f192a5ce73ab6964745275f2c15213.zip
Softmax numerical stability. (#267)
* Softmax numerical stability. * Fix the flash-attn test.
Diffstat (limited to 'candle-examples/examples/llama_multiprocess')
-rw-r--r--candle-examples/examples/llama_multiprocess/model.rs2
1 files changed, 1 insertions, 1 deletions
diff --git a/candle-examples/examples/llama_multiprocess/model.rs b/candle-examples/examples/llama_multiprocess/model.rs
index bcf6ed2b..ae2ef3e7 100644
--- a/candle-examples/examples/llama_multiprocess/model.rs
+++ b/candle-examples/examples/llama_multiprocess/model.rs
@@ -323,7 +323,7 @@ impl CausalSelfAttention {
let att = (q.matmul(&k.t()?)? / (self.head_dim as f64).sqrt())?;
let mask = self.cache.mask(seq_len)?.broadcast_as(att.shape())?;
let att = masked_fill(&att, &mask, f32::NEG_INFINITY)?;
- let att = att.softmax(D::Minus1)?;
+ let att = candle_nn::ops::softmax(&att, D::Minus1)?;
// Convert to contiguous as matmul doesn't support strided vs for now.
let y = att.matmul(&v.contiguous()?)?;
let y = y.transpose(1, 2)?.reshape(&[b_sz, seq_len, n_embd])?;