diff --git a/splashsurf_lib/tests/integration_tests/io.rs b/splashsurf_lib/tests/integration_tests/io.rs index f028885c..c000c55e 100644 --- a/splashsurf_lib/tests/integration_tests/io.rs +++ b/splashsurf_lib/tests/integration_tests/io.rs @@ -1,96 +1,2 @@ #[cfg(feature = "vtk_extras")] -pub mod vtk { - use splashsurf_lib::nalgebra::Vector3; - use splashsurf_lib::Real; - - use std::fs::create_dir_all; - use std::path::Path; - - use anyhow::{anyhow, Context}; - - use vtkio::model::{ByteOrder, DataSet, Version, Vtk}; - use vtkio::IOBuffer; - - pub fn particles_from_vtk>( - vtk_file: P, - ) -> Result>, anyhow::Error> { - let sph_dataset = read_vtk(vtk_file)?; - particles_from_dataset(sph_dataset) - } - - pub fn write_vtk>( - data: impl Into, - filename: P, - title: &str, - ) -> Result<(), anyhow::Error> { - let vtk_file = Vtk { - version: Version::new((4, 1)), - title: title.to_string(), - file_path: None, - byte_order: ByteOrder::BigEndian, - data: data.into(), - }; - - let filename = filename.as_ref(); - if let Some(dir) = filename.parent() { - create_dir_all(dir).context("Failed to create parent directory of output file")?; - } - vtk_file - .export_ascii(filename) - .context("Error while writing VTK output to file") - } - - pub fn read_vtk>(filename: P) -> Result { - let filename = filename.as_ref(); - Vtk::import_legacy_be(filename).map(|vtk| vtk.data) - } - - pub fn particles_from_coords( - coords: &Vec, - ) -> Result>, anyhow::Error> { - if coords.len() % 3 != 0 { - anyhow!("The number of values in the particle data point buffer is not divisible by 3"); - } - - let num_points = coords.len() / 3; - let mut positions = Vec::with_capacity(num_points); - for i in 0..num_points { - positions.push(Vector3::new( - RealOut::from_f64(coords[3 * i + 0].to_f64().unwrap()).unwrap(), - RealOut::from_f64(coords[3 * i + 1].to_f64().unwrap()).unwrap(), - RealOut::from_f64(coords[3 * i + 2].to_f64().unwrap()).unwrap(), - )) - } - - Ok(positions) - } - - pub fn particles_from_dataset( - dataset: DataSet, - ) -> Result>, anyhow::Error> { - if let DataSet::UnstructuredGrid { pieces, .. } = dataset { - if let Some(piece) = pieces.into_iter().next() { - let points = piece - .into_loaded_piece_data(None) - .context("Failed to load unstructured grid piece")? - .points; - - match points { - IOBuffer::F64(coords) => particles_from_coords(&coords), - IOBuffer::F32(coords) => particles_from_coords(&coords), - _ => Err(anyhow!( - "Point coordinate IOBuffer does not contain f32 or f64 values" - )), - } - } else { - Err(anyhow!( - "Loaded dataset does not contain an unstructured grid piece" - )) - } - } else { - Err(anyhow!( - "Loaded dataset does not contain an unstructured grid" - )) - } - } -} +pub mod vtk; diff --git a/splashsurf_lib/tests/integration_tests/io/vtk.rs b/splashsurf_lib/tests/integration_tests/io/vtk.rs new file mode 100644 index 00000000..1d0e1952 --- /dev/null +++ b/splashsurf_lib/tests/integration_tests/io/vtk.rs @@ -0,0 +1,91 @@ +use splashsurf_lib::nalgebra::Vector3; +use splashsurf_lib::Real; + +use std::fs::create_dir_all; +use std::path::Path; + +use anyhow::{anyhow, Context}; + +use vtkio::model::{ByteOrder, DataSet, Version, Vtk}; +use vtkio::IOBuffer; + +pub fn particles_from_vtk>( + vtk_file: P, +) -> Result>, anyhow::Error> { + let sph_dataset = read_vtk(vtk_file)?; + particles_from_dataset(sph_dataset) +} + +pub fn write_vtk>( + data: impl Into, + filename: P, + title: &str, +) -> Result<(), anyhow::Error> { + let vtk_file = Vtk { + version: Version::new((4, 1)), + title: title.to_string(), + file_path: None, + byte_order: ByteOrder::BigEndian, + data: data.into(), + }; + + let filename = filename.as_ref(); + if let Some(dir) = filename.parent() { + create_dir_all(dir).context("Failed to create parent directory of output file")?; + } + vtk_file + .export_ascii(filename) + .context("Error while writing VTK output to file") +} + +pub fn read_vtk>(filename: P) -> Result { + let filename = filename.as_ref(); + Vtk::import_legacy_be(filename).map(|vtk| vtk.data) +} + +pub fn particles_from_coords( + coords: &Vec, +) -> Result>, anyhow::Error> { + if coords.len() % 3 != 0 { + anyhow!("The number of values in the particle data point buffer is not divisible by 3"); + } + + let num_points = coords.len() / 3; + let mut positions = Vec::with_capacity(num_points); + for i in 0..num_points { + positions.push(Vector3::new( + RealOut::from_f64(coords[3 * i + 0].to_f64().unwrap()).unwrap(), + RealOut::from_f64(coords[3 * i + 1].to_f64().unwrap()).unwrap(), + RealOut::from_f64(coords[3 * i + 2].to_f64().unwrap()).unwrap(), + )) + } + + Ok(positions) +} + +pub fn particles_from_dataset(dataset: DataSet) -> Result>, anyhow::Error> { + if let DataSet::UnstructuredGrid { pieces, .. } = dataset { + if let Some(piece) = pieces.into_iter().next() { + let points = piece + .into_loaded_piece_data(None) + .context("Failed to load unstructured grid piece")? + .points; + + match points { + IOBuffer::F64(coords) => particles_from_coords(&coords), + IOBuffer::F32(coords) => particles_from_coords(&coords), + _ => Err(anyhow!( + "Point coordinate IOBuffer does not contain f32 or f64 values" + )), + } + } else { + Err(anyhow!( + "Loaded dataset does not contain an unstructured grid piece" + )) + } + } else { + Err(anyhow!( + "Loaded dataset does not contain an unstructured grid" + )) + } +} diff --git a/splashsurf_lib/tests/integration_tests/mod.rs b/splashsurf_lib/tests/integration_tests/mod.rs index ddf2b357..468fdf7d 100644 --- a/splashsurf_lib/tests/integration_tests/mod.rs +++ b/splashsurf_lib/tests/integration_tests/mod.rs @@ -1,5 +1,5 @@ +#[cfg(feature = "vtk_extras")] pub mod io; - #[cfg(feature = "vtk_extras")] pub mod test_full; #[cfg(feature = "vtk_extras")]