diff options
author | Laurent Mazare <laurent.mazare@gmail.com> | 2023-07-06 23:22:08 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-07-06 23:22:08 +0100 |
commit | a3f3b93d16e08b78d3416d9caf7607c19bc11646 (patch) | |
tree | 0784e3b58e4d40f986a2aa90c3b8f525c8577961 /candle-examples/examples/falcon | |
parent | 0a2c82e3017ce4c983e9016ff2740c6599558ea4 (diff) | |
download | candle-a3f3b93d16e08b78d3416d9caf7607c19bc11646.tar.gz candle-a3f3b93d16e08b78d3416d9caf7607c19bc11646.tar.bz2 candle-a3f3b93d16e08b78d3416d9caf7607c19bc11646.zip |
Add the call to dense in the attention layer. (#96)
Diffstat (limited to 'candle-examples/examples/falcon')
-rw-r--r-- | candle-examples/examples/falcon/model.rs | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/candle-examples/examples/falcon/model.rs b/candle-examples/examples/falcon/model.rs index efab97ca..df89b75c 100644 --- a/candle-examples/examples/falcon/model.rs +++ b/candle-examples/examples/falcon/model.rs @@ -444,6 +444,7 @@ impl FalconAttention { .reshape((b_sz, self.num_heads, q_len, head_dim))? .transpose(1, 2)? .reshape((b_sz, q_len, self.num_heads * head_dim))?; + let attn_output = self.attn_output.forward(&attn_output)?; Ok(attn_output) } } |