summaryrefslogtreecommitdiff
path: root/candle-transformers
diff options
context:
space:
mode:
authorLaurent Mazare <laurent.mazare@gmail.com>2024-08-22 22:22:03 +0100
committerGitHub <noreply@github.com>2024-08-22 23:22:03 +0200
commit2ec8729d51a8561841d95b55bbe7fc4e7991eab2 (patch)
tree4634a48657213533e43d32c010f21c4f3ca11884 /candle-transformers
parente3c146ada665cd9ba5265a742c502a7309ca879e (diff)
downloadcandle-2ec8729d51a8561841d95b55bbe7fc4e7991eab2.tar.gz
candle-2ec8729d51a8561841d95b55bbe7fc4e7991eab2.tar.bz2
candle-2ec8729d51a8561841d95b55bbe7fc4e7991eab2.zip
Fix for parler-tts, do not add the last slice of padding tokens. (#2442)
* Fix for parler-tts, do not add the last slice of padding tokens. * Support for the mini model.
Diffstat (limited to 'candle-transformers')
-rw-r--r--candle-transformers/src/models/parler_tts.rs1
1 files changed, 0 insertions, 1 deletions
diff --git a/candle-transformers/src/models/parler_tts.rs b/candle-transformers/src/models/parler_tts.rs
index 16023a7c..da401247 100644
--- a/candle-transformers/src/models/parler_tts.rs
+++ b/candle-transformers/src/models/parler_tts.rs
@@ -429,7 +429,6 @@ impl Model {
let min_len = all_audio_tokens.iter().map(|v| v.len()).min().unwrap_or(0);
all_audio_tokens.iter_mut().for_each(|v| {
v.resize(min_len, 0);
- v.push(self.pad_token_id)
});
let all_audio_tokens = Tensor::new(all_audio_tokens, &candle::Device::Cpu)?;
Ok(all_audio_tokens)