diff options
author | Juarez Bochi <jbochi@gmail.com> | 2023-09-12 09:10:16 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-09-12 18:10:16 +0200 |
commit | 805bf9ffa78119a1a7e047b4ddf6b2ea7df4d94f (patch) | |
tree | 0df65e2e6fee356d2345954701ec3d47796ae7ee /candle-examples/examples/llama2-c | |
parent | 42da17694a4214a3e39e0d64afc22635ce83f557 (diff) | |
download | candle-805bf9ffa78119a1a7e047b4ddf6b2ea7df4d94f.tar.gz candle-805bf9ffa78119a1a7e047b4ddf6b2ea7df4d94f.tar.bz2 candle-805bf9ffa78119a1a7e047b4ddf6b2ea7df4d94f.zip |
Implement top_p / nucleus sampling (#819)
* Implement top_p / nucleus sampling
* Update changelog
* rustfmt
* Add tests
* Fix clippy warning
* Fix another clippy error
Diffstat (limited to 'candle-examples/examples/llama2-c')
-rw-r--r-- | candle-examples/examples/llama2-c/main.rs | 7 |
1 files changed, 6 insertions, 1 deletions
diff --git a/candle-examples/examples/llama2-c/main.rs b/candle-examples/examples/llama2-c/main.rs index e0ade322..e752a494 100644 --- a/candle-examples/examples/llama2-c/main.rs +++ b/candle-examples/examples/llama2-c/main.rs @@ -27,6 +27,10 @@ struct InferenceCmd { #[arg(long)] temperature: Option<f64>, + /// Nucleus sampling probability cutoff. + #[arg(long)] + top_p: Option<f64>, + #[arg(long, default_value = "")] prompt: String, @@ -133,6 +137,7 @@ fn main() -> anyhow::Result<()> { None => { let cmd = InferenceCmd { temperature: None, + top_p: None, prompt: "".to_string(), config: None, model_id: "karpathy/tinyllamas".to_string(), @@ -256,7 +261,7 @@ fn run_inference(args: &InferenceCmd, common_args: &Args) -> Result<()> { let model = Llama::load(vb, &cache, config)?; println!("starting the inference loop"); - let mut logits_processor = LogitsProcessor::new(299792458, args.temperature); + let mut logits_processor = LogitsProcessor::new(299792458, args.temperature, args.top_p); let mut index_pos = 0; print!("{}", args.prompt); |