summaryrefslogtreecommitdiff
path: root/candle-transformers/src/models/clip/text_model.rs
diff options
context:
space:
mode:
Diffstat (limited to 'candle-transformers/src/models/clip/text_model.rs')
-rw-r--r--candle-transformers/src/models/clip/text_model.rs4
1 files changed, 2 insertions, 2 deletions
diff --git a/candle-transformers/src/models/clip/text_model.rs b/candle-transformers/src/models/clip/text_model.rs
index 51db14ee..4662f65f 100644
--- a/candle-transformers/src/models/clip/text_model.rs
+++ b/candle-transformers/src/models/clip/text_model.rs
@@ -77,7 +77,7 @@ impl ClipTextEmbeddings {
)?;
let position_ids =
Tensor::arange(0u32, c.max_position_embeddings as u32, vs.device())?.unsqueeze(0)?;
- Ok(ClipTextEmbeddings {
+ Ok(Self {
token_embedding,
position_embedding,
position_ids,
@@ -298,7 +298,7 @@ impl ClipTextTransformer {
})
}
- // TODO: rewrrite to newer version
+ // TODO: rewrite to newer version
fn build_causal_attention_mask(
bsz: usize,
seq_len: usize,