From 8ad4a21ffcc03b745f33170130298bab79a09795 Mon Sep 17 00:00:00 2001 From: Laurent Mazare Date: Tue, 15 Aug 2023 17:19:18 +0100 Subject: Add a basic optimizer example. (#454) --- candle-nn/examples/basic_optimizer.rs | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 candle-nn/examples/basic_optimizer.rs (limited to 'candle-nn/examples/basic_optimizer.rs') diff --git a/candle-nn/examples/basic_optimizer.rs b/candle-nn/examples/basic_optimizer.rs new file mode 100644 index 00000000..3c5665e8 --- /dev/null +++ b/candle-nn/examples/basic_optimizer.rs @@ -0,0 +1,33 @@ +use candle::{DType, Device, Result, Tensor}; +use candle_nn::{linear, AdamW, Linear, ParamsAdamW, VarBuilder, VarMap}; + +fn gen_data() -> Result<(Tensor, Tensor)> { + // Generate some sample linear data. + let w_gen = Tensor::new(&[[3f32, 1.]], &Device::Cpu)?; + let b_gen = Tensor::new(-2f32, &Device::Cpu)?; + let gen = Linear::new(w_gen, Some(b_gen)); + let sample_xs = Tensor::new(&[[2f32, 1.], [7., 4.], [-4., 12.], [5., 8.]], &Device::Cpu)?; + let sample_ys = gen.forward(&sample_xs)?; + Ok((sample_xs, sample_ys)) +} + +fn main() -> Result<()> { + let (sample_xs, sample_ys) = gen_data()?; + + // Use backprop to run a linear regression between samples and get the coefficients back. + let varmap = VarMap::new(); + let vb = VarBuilder::from_varmap(&varmap, DType::F32, &Device::Cpu); + let model = linear(2, 1, vb.pp("linear"))?; + let params = ParamsAdamW { + lr: 0.1, + ..Default::default() + }; + let mut opt = AdamW::new(varmap.all_vars(), params)?; + for step in 0..10000 { + let ys = model.forward(&sample_xs)?; + let loss = ys.sub(&sample_ys)?.sqr()?.sum_all()?; + opt.backward_step(&loss)?; + println!("{step} {}", loss.to_vec0::()?); + } + Ok(()) +} -- cgit v1.2.3