summaryrefslogtreecommitdiff
path: root/candle-datasets/src/vision/mnist.rs
diff options
context:
space:
mode:
authorNicolas Patry <patry.nicolas@protonmail.com>2023-08-29 13:10:05 +0200
committerGitHub <noreply@github.com>2023-08-29 13:10:05 +0200
commit14b4d456e80a6fb218c6e3c16b4e5aeffb0c2c6f (patch)
tree11d5c84dedb610b9e4306030ec36929d1f03e980 /candle-datasets/src/vision/mnist.rs
parent62ef494dc17c1f582b28c665e78f2aa78d846bb9 (diff)
parent2d5b7a735d2c9ccb890dae73862dc734ef0950ae (diff)
downloadcandle-14b4d456e80a6fb218c6e3c16b4e5aeffb0c2c6f.tar.gz
candle-14b4d456e80a6fb218c6e3c16b4e5aeffb0c2c6f.tar.bz2
candle-14b4d456e80a6fb218c6e3c16b4e5aeffb0c2c6f.zip
Merge pull request #439 from huggingface/training_hub_dataset
[Book] Add small error management + start training (with generic dataset inclusion).
Diffstat (limited to 'candle-datasets/src/vision/mnist.rs')
-rw-r--r--candle-datasets/src/vision/mnist.rs59
1 files changed, 58 insertions, 1 deletions
diff --git a/candle-datasets/src/vision/mnist.rs b/candle-datasets/src/vision/mnist.rs
index 2267f9a0..1085edd6 100644
--- a/candle-datasets/src/vision/mnist.rs
+++ b/candle-datasets/src/vision/mnist.rs
@@ -2,7 +2,9 @@
//!
//! The files can be obtained from the following link:
//! <http://yann.lecun.com/exdb/mnist/>
-use candle::{DType, Device, Result, Tensor};
+use candle::{DType, Device, Error, Result, Tensor};
+use hf_hub::{api::sync::Api, Repo, RepoType};
+use parquet::file::reader::{FileReader, SerializedFileReader};
use std::fs::File;
use std::io::{self, BufReader, Read};
@@ -63,3 +65,58 @@ pub fn load_dir<T: AsRef<std::path::Path>>(dir: T) -> Result<crate::vision::Data
labels: 10,
})
}
+
+fn load_parquet(parquet: SerializedFileReader<std::fs::File>) -> Result<(Tensor, Tensor)> {
+ let samples = parquet.metadata().file_metadata().num_rows() as usize;
+ let mut buffer_images: Vec<u8> = Vec::with_capacity(samples * 784);
+ let mut buffer_labels: Vec<u8> = Vec::with_capacity(samples);
+ for row in parquet.into_iter().flatten() {
+ for (_name, field) in row.get_column_iter() {
+ if let parquet::record::Field::Group(subrow) = field {
+ for (_name, field) in subrow.get_column_iter() {
+ if let parquet::record::Field::Bytes(value) = field {
+ let image = image::load_from_memory(value.data()).unwrap();
+ buffer_images.extend(image.to_luma8().as_raw());
+ }
+ }
+ } else if let parquet::record::Field::Long(label) = field {
+ buffer_labels.push(*label as u8);
+ }
+ }
+ }
+ let images = (Tensor::from_vec(buffer_images, (samples, 784), &Device::Cpu)?
+ .to_dtype(DType::F32)?
+ / 255.)?;
+ let labels = Tensor::from_vec(buffer_labels, (samples,), &Device::Cpu)?;
+ Ok((images, labels))
+}
+
+pub fn load() -> Result<crate::vision::Dataset> {
+ let api = Api::new().map_err(|e| Error::Msg(format!("Api error: {e}")))?;
+ let dataset_id = "mnist".to_string();
+ let repo = Repo::with_revision(
+ dataset_id,
+ RepoType::Dataset,
+ "refs/convert/parquet".to_string(),
+ );
+ let repo = api.repo(repo);
+ let test_parquet_filename = repo
+ .get("mnist/mnist-test.parquet")
+ .map_err(|e| Error::Msg(format!("Api error: {e}")))?;
+ let train_parquet_filename = repo
+ .get("mnist/mnist-train.parquet")
+ .map_err(|e| Error::Msg(format!("Api error: {e}")))?;
+ let test_parquet = SerializedFileReader::new(std::fs::File::open(test_parquet_filename)?)
+ .map_err(|e| Error::Msg(format!("Parquet error: {e}")))?;
+ let train_parquet = SerializedFileReader::new(std::fs::File::open(train_parquet_filename)?)
+ .map_err(|e| Error::Msg(format!("Parquet error: {e}")))?;
+ let (test_images, test_labels) = load_parquet(test_parquet)?;
+ let (train_images, train_labels) = load_parquet(train_parquet)?;
+ Ok(crate::vision::Dataset {
+ train_images,
+ train_labels,
+ test_images,
+ test_labels,
+ labels: 10,
+ })
+}