summaryrefslogtreecommitdiff
path: root/candle-core/src/tensor.rs
diff options
context:
space:
mode:
authorlaurent <laurent.mazare@gmail.com>2023-06-29 19:07:52 +0100
committerlaurent <laurent.mazare@gmail.com>2023-06-29 19:07:52 +0100
commitb50bd880ce472d7c20d09d6e5c7f49fcdf95f8db (patch)
tree3dc788d3a1f1c7dda9dbb3a931a54f5a48df4cf6 /candle-core/src/tensor.rs
parent3232df9458e41c7414d51459b23e493b75a3949c (diff)
downloadcandle-b50bd880ce472d7c20d09d6e5c7f49fcdf95f8db.tar.gz
candle-b50bd880ce472d7c20d09d6e5c7f49fcdf95f8db.tar.bz2
candle-b50bd880ce472d7c20d09d6e5c7f49fcdf95f8db.zip
Only narrow when needed + deactivate the kv cache.
Diffstat (limited to 'candle-core/src/tensor.rs')
-rw-r--r--candle-core/src/tensor.rs41
1 files changed, 27 insertions, 14 deletions
diff --git a/candle-core/src/tensor.rs b/candle-core/src/tensor.rs
index 6586834c..2f05094b 100644
--- a/candle-core/src/tensor.rs
+++ b/candle-core/src/tensor.rs
@@ -349,21 +349,34 @@ impl Tensor {
}
/// Returns a new tensor that is a narrowed version of the input, the dimension `dim`
- /// ranges from `start` to `start + length`.
- pub fn narrow(&self, dim: usize, start: usize, length: usize) -> Result<Self> {
- let op = if self.track_op() {
- Some(Op::Narrow(self.clone(), dim, start, length))
+ /// ranges from `start` to `start + len`.
+ pub fn narrow(&self, dim: usize, start: usize, len: usize) -> Result<Self> {
+ let dims = self.dims();
+ if dim >= dims.len() || start + len > dims[dim] {
+ Err(Error::NarrowInvalidArgs {
+ shape: self.shape().clone(),
+ dim,
+ start,
+ len,
+ })?
+ }
+ if start == 0 && dims[dim] == len {
+ Ok(self.clone())
} else {
- None
- };
- let tensor_ = Tensor_ {
- id: TensorId::new(),
- storage: self.storage.clone(),
- layout: self.layout().narrow(dim, start, length)?,
- op,
- is_variable: false,
- };
- Ok(Tensor(Arc::new(tensor_)))
+ let op = if self.track_op() {
+ Some(Op::Narrow(self.clone(), dim, start, len))
+ } else {
+ None
+ };
+ let tensor_ = Tensor_ {
+ id: TensorId::new(),
+ storage: self.storage.clone(),
+ layout: self.layout().narrow(dim, start, len)?,
+ op,
+ is_variable: false,
+ };
+ Ok(Tensor(Arc::new(tensor_)))
+ }
}
pub fn softmax(&self, dim: usize) -> Result<Self> {