summaryrefslogtreecommitdiff
path: root/candle-transformers/src/models
diff options
context:
space:
mode:
authorLaurent Mazare <laurent.mazare@gmail.com>2024-03-02 10:35:07 +0100
committerGitHub <noreply@github.com>2024-03-02 10:35:07 +0100
commit314630638d8f6886c07d73211d6c35f8cf05d56a (patch)
tree4af002f7fe6b842f2889c44f41770b1d7c8eef21 /candle-transformers/src/models
parent3e3def41346daae52a0e438513b282e8bba14e73 (diff)
downloadcandle-314630638d8f6886c07d73211d6c35f8cf05d56a.tar.gz
candle-314630638d8f6886c07d73211d6c35f8cf05d56a.tar.bz2
candle-314630638d8f6886c07d73211d6c35f8cf05d56a.zip
Rustfmt fix. (#1788)
Diffstat (limited to 'candle-transformers/src/models')
-rw-r--r--candle-transformers/src/models/quantized_stable_lm.rs6
-rw-r--r--candle-transformers/src/models/stable_lm.rs7
2 files changed, 10 insertions, 3 deletions
diff --git a/candle-transformers/src/models/quantized_stable_lm.rs b/candle-transformers/src/models/quantized_stable_lm.rs
index c79877b6..7d4385a7 100644
--- a/candle-transformers/src/models/quantized_stable_lm.rs
+++ b/candle-transformers/src/models/quantized_stable_lm.rs
@@ -186,7 +186,11 @@ impl DecoderLayer {
fn new(rotary_emb: Arc<RotaryEmbedding>, cfg: &Config, vb: VarBuilder) -> Result<Self> {
let self_attn = Attention::new(rotary_emb, cfg, vb.pp("self_attn"))?;
let mlp = MLP::new(cfg, vb.pp("mlp"))?;
- let input_layernorm = layer_norm(cfg.hidden_size, cfg.layer_norm_eps, vb.pp("input_layernorm"))?;
+ let input_layernorm = layer_norm(
+ cfg.hidden_size,
+ cfg.layer_norm_eps,
+ vb.pp("input_layernorm"),
+ )?;
let post_attention_layernorm = layer_norm(
cfg.hidden_size,
cfg.layer_norm_eps,
diff --git a/candle-transformers/src/models/stable_lm.rs b/candle-transformers/src/models/stable_lm.rs
index f46d3a2c..a1d58936 100644
--- a/candle-transformers/src/models/stable_lm.rs
+++ b/candle-transformers/src/models/stable_lm.rs
@@ -316,8 +316,11 @@ impl DecoderLayer {
fn new(rotary_emb: Arc<RotaryEmbedding>, cfg: &Config, vb: VarBuilder) -> Result<Self> {
let self_attn = Attention::new(rotary_emb, cfg, vb.pp("self_attn"))?;
let mlp = MLP::new(cfg, vb.pp("mlp"))?;
- let input_layernorm =
- candle_nn::layer_norm(cfg.hidden_size, cfg.layer_norm_eps, vb.pp("input_layernorm"))?;
+ let input_layernorm = candle_nn::layer_norm(
+ cfg.hidden_size,
+ cfg.layer_norm_eps,
+ vb.pp("input_layernorm"),
+ )?;
let post_attention_layernorm = candle_nn::layer_norm(
cfg.hidden_size,
cfg.layer_norm_eps,