diff options
author | laurent <laurent.mazare@gmail.com> | 2023-06-21 14:29:48 +0100 |
---|---|---|
committer | laurent <laurent.mazare@gmail.com> | 2023-06-21 14:29:48 +0100 |
commit | 7adffafedab0db2e842b50b5f9fd734cc704544e (patch) | |
tree | 01821311411998ba8ccda5cfbb2b1606d9340114 /tests/grad_tests.rs | |
parent | 68f525f3219640750fcc4d3b84686bbfc0a0b8fa (diff) | |
download | candle-7adffafedab0db2e842b50b5f9fd734cc704544e.tar.gz candle-7adffafedab0db2e842b50b5f9fd734cc704544e.tar.bz2 candle-7adffafedab0db2e842b50b5f9fd734cc704544e.zip |
Abstract the gradient storage.
Diffstat (limited to 'tests/grad_tests.rs')
-rw-r--r-- | tests/grad_tests.rs | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/tests/grad_tests.rs b/tests/grad_tests.rs index e5ba68e8..432b1520 100644 --- a/tests/grad_tests.rs +++ b/tests/grad_tests.rs @@ -6,7 +6,7 @@ fn simple_grad() -> Result<()> { let x = Tensor::var(&[3f32, 1., 4.], Device::Cpu)?; let y = (((&x * &x)? + &x * 5f64)? + 4f64)?; let grads = y.backward()?; - let grad_x = grads.get(&x.id()).context("no grad for x")?; + let grad_x = grads.get(&x).context("no grad for x")?; assert_eq!(x.to_vec1::<f32>()?, [3., 1., 4.]); // y = x^2 + 5.x + 4 assert_eq!(y.to_vec1::<f32>()?, [28., 10., 40.]); |