summaryrefslogtreecommitdiff
path: root/candle-transformers/src/models/bigcode.rs
diff options
context:
space:
mode:
Diffstat (limited to 'candle-transformers/src/models/bigcode.rs')
-rw-r--r--candle-transformers/src/models/bigcode.rs2
1 files changed, 1 insertions, 1 deletions
diff --git a/candle-transformers/src/models/bigcode.rs b/candle-transformers/src/models/bigcode.rs
index 2e1bbd37..f6b4a4ef 100644
--- a/candle-transformers/src/models/bigcode.rs
+++ b/candle-transformers/src/models/bigcode.rs
@@ -298,7 +298,7 @@ impl GPTBigCode {
let wte = embedding(cfg.vocab_size, hidden_size, vb_t.pp("wte"))?;
let wpe = embedding(cfg.max_position_embeddings, hidden_size, vb_t.pp("wpe"))?;
let blocks = (0..cfg.num_hidden_layers)
- .map(|i| Block::load(vb_t.pp(&format!("h.{i}")), &cfg))
+ .map(|i| Block::load(vb_t.pp(format!("h.{i}")), &cfg))
.collect::<Result<Vec<_>>>()?;
let ln_f = layer_norm(hidden_size, cfg.layer_norm_epsilon, vb_t.pp("ln_f"))?;
let lm_head = linear(hidden_size, cfg.vocab_size, false, vb_t.pp("wte"))?;