diff --git a/Cargo.toml b/Cargo.toml index 674e835b..f12f1737 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,6 @@ csv = "1.3.0" diffsol = "=0.7.0" libloading = { version = "0.8.6", optional = true, features = [] } nalgebra = "0.34.1" -ndarray = { version = "0.17.1", features = ["rayon"] } rand = "0.9.0" rand_distr = "0.5.0" rayon = "1.10.0" @@ -28,6 +27,7 @@ thiserror = "2.0.11" argmin = "0.11.0" argmin-math = "0.5.1" tracing = "0.1.41" +faer = "0.23.2" [dev-dependencies] criterion = { version = "0.7.0", features = ["html_reports"] } diff --git a/examples/gendata.rs b/examples/gendata.rs index ba214a78..6282e569 100644 --- a/examples/gendata.rs +++ b/examples/gendata.rs @@ -63,7 +63,7 @@ fn main() { let mut data = vec![]; for (i, spp) in support_points.iter().enumerate() { let trajectories = sde.estimate_predictions(&subject, spp).unwrap(); - let trajectory = trajectories.row(0); + let trajectory = trajectories.row(0).unwrap(); // dbg!(&trajectory); let mut sb = data::Subject::builder(format!("id{}", i)).bolus(0.0, 20.0, 0); for (t, point) in trajectory.iter().enumerate() { diff --git a/examples/likelihood.rs b/examples/likelihood.rs index a2a049f9..febf5646 100644 --- a/examples/likelihood.rs +++ b/examples/likelihood.rs @@ -1,12 +1,10 @@ -use ndarray::Array1; const FRAC_1_SQRT_2PI: f64 = std::f64::consts::FRAC_2_SQRT_PI * std::f64::consts::FRAC_1_SQRT_2 / 2.0; -fn main() { - use ndarray::array; - let predictions = array![1.0, 2.0, 3.0]; - let observations = array![1.1, 1.9, 3.2]; - let sigma = array![0.1, 0.2, 0.3]; +fn main() { + let predictions = vec![1.0, 2.0, 3.0]; + let observations = vec![1.1, 1.9, 3.2]; + let sigma = vec![0.1, 0.2, 0.3]; let result1 = normal_likelihood1(&predictions, &observations, &sigma); let result2 = normal_likelihood2(&predictions, &observations, &sigma); @@ -15,32 +13,30 @@ fn main() { println!("Result 2: {}", result2); } -fn normal_likelihood1( - predictions: &Array1, - observations: &Array1, - sigma: &Array1, -) -> f64 { +fn normal_likelihood1(predictions: &[f64], observations: &[f64], sigma: &[f64]) -> f64 { const FRAC_1_SQRT_2PI: f64 = std::f64::consts::FRAC_2_SQRT_PI * std::f64::consts::FRAC_1_SQRT_2 / 2.0; - let diff = (observations - predictions).mapv(|x| x.powi(2)); - let two_sigma_sq = sigma.mapv(|x| 2.0 * x * x); - let exponent = (-&diff / two_sigma_sq).mapv(|x| x.exp()); - let aux_vec = FRAC_1_SQRT_2PI * exponent / sigma; - aux_vec.product() + + predictions + .iter() + .zip(observations.iter()) + .zip(sigma.iter()) + .map(|((pred, obs), sig)| { + let diff = (obs - pred).powi(2); + let two_sigma_sq = 2.0 * sig * sig; + let exponent = (-diff / two_sigma_sq).exp(); + FRAC_1_SQRT_2PI * exponent / sig + }) + .product() } -fn normal_likelihood2( - predictions: &Array1, - observations: &Array1, - sigma: &Array1, -) -> f64 { - let aux_vec = predictions +fn normal_likelihood2(predictions: &[f64], observations: &[f64], sigma: &[f64]) -> f64 { + predictions .iter() .zip(observations.iter()) .zip(sigma.iter()) .map(|((pred, obs), sig)| normpdf(*obs, *pred, *sig)) - .collect::>(); - aux_vec.product() + .product() } /// Probability density function diff --git a/src/error/mod.rs b/src/error/mod.rs index 9c10fa69..5f8f7d52 100644 --- a/src/error/mod.rs +++ b/src/error/mod.rs @@ -5,16 +5,12 @@ use crate::data::parser::pmetrics::PmetricsError; use crate::CovariateError; -use ndarray::ShapeError; - #[derive(Error, Debug, Clone)] pub enum PharmsolError { #[error("Error in the error model: {0}")] ErrorModelError(#[from] ErrorModelError), #[error("Covariate error: {0}")] CovariateError(#[from] CovariateError), - #[error("Shape error: {0}")] - NdarrayShapeError(#[from] ShapeError), #[error("Error parsing Pmetrics datafile: {0}")] PmetricsError(#[from] PmetricsError), #[error("Diffsol error: {0}")] diff --git a/src/lib.rs b/src/lib.rs index 58f01d8b..a5907642 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -30,7 +30,7 @@ pub mod prelude { pub use crate::simulator::{ equation, equation::Equation, - likelihood::{psi, PopulationPredictions, Prediction, SubjectPredictions}, + likelihood::{psi, Prediction, PredictionMatrix, SubjectPredictions}, }; } pub mod models { diff --git a/src/optimize/spp.rs b/src/optimize/spp.rs index ac19bd3b..673d955d 100644 --- a/src/optimize/spp.rs +++ b/src/optimize/spp.rs @@ -3,7 +3,7 @@ use argmin::{ solver::neldermead::NelderMead, }; -use ndarray::{Array1, Axis}; +use faer::Mat; use crate::{prelude::simulator::psi, Data, Equation, ErrorModels}; @@ -11,14 +11,15 @@ pub struct SppOptimizer<'a, E: Equation> { equation: &'a E, data: &'a Data, sig: &'a ErrorModels, - pyl: &'a Array1, + pyl: &'a Vec, } impl CostFunction for SppOptimizer<'_, E> { type Param = Vec; type Output = f64; fn cost(&self, spp: &Self::Param) -> Result { - let theta = Array1::from(spp.clone()).insert_axis(Axis(0)); + // Create a 1xN matrix (1 support point with N parameters) + let theta = Mat::from_fn(1, spp.len(), |_, j| spp[j]); let psi = psi(self.equation, self.data, &theta, self.sig, false, false)?; @@ -34,7 +35,9 @@ impl CostFunction for SppOptimizer<'_, E> { } let nsub = psi.nrows() as f64; let mut sum = -nsub; - for (p_i, pyl_i) in psi.iter().zip(self.pyl.iter()) { + // Iterate through each row of psi (which has only 1 column) + for (i, pyl_i) in self.pyl.iter().enumerate() { + let p_i = psi[(i, 0)]; sum += p_i / pyl_i; } Ok(-sum) @@ -42,12 +45,7 @@ impl CostFunction for SppOptimizer<'_, E> { } impl<'a, E: Equation> SppOptimizer<'a, E> { - pub fn new( - equation: &'a E, - data: &'a Data, - sig: &'a ErrorModels, - pyl: &'a Array1, - ) -> Self { + pub fn new(equation: &'a E, data: &'a Data, sig: &'a ErrorModels, pyl: &'a Vec) -> Self { Self { equation, data, @@ -55,14 +53,14 @@ impl<'a, E: Equation> SppOptimizer<'a, E> { pyl, } } - pub fn optimize_point(self, spp: Array1) -> Result, Error> { - let simplex = create_initial_simplex(&spp.to_vec()); + pub fn optimize_point(self, spp: Vec) -> Result, Error> { + let simplex = create_initial_simplex(&spp); let solver: NelderMead, f64> = NelderMead::new(simplex).with_sd_tolerance(1e-2)?; let res = Executor::new(self, solver) .configure(|state| state.max_iters(5)) // .add_observer(SlogLogger::term(), ObserverMode::Always) .run()?; - Ok(Array1::from(res.state.best_param.unwrap())) + Ok(res.state.best_param.unwrap()) } } diff --git a/src/simulator/equation/sde/mod.rs b/src/simulator/equation/sde/mod.rs index 85168378..a922c48e 100644 --- a/src/simulator/equation/sde/mod.rs +++ b/src/simulator/equation/sde/mod.rs @@ -2,7 +2,6 @@ mod em; use diffsol::{NalgebraContext, Vector}; use nalgebra::DVector; -use ndarray::{concatenate, Array2, Axis}; use rand::{rng, Rng}; use rayon::prelude::*; @@ -13,7 +12,7 @@ use crate::{ data::{Covariates, Infusion}, error_model::ErrorModels, prelude::simulator::Prediction, - simulator::{Diffusion, Drift, Fa, Init, Lag, Neqs, Out, V}, + simulator::{likelihood::PredictionMatrix, Diffusion, Drift, Fa, Init, Lag, Neqs, Out, V}, Subject, }; @@ -151,9 +150,9 @@ impl State for Vec> { /// Predictions implementation for particle-based SDE simulation outputs. /// /// This implementation manages and processes predictions from multiple particles. -impl Predictions for Array2 { +impl Predictions for PredictionMatrix { fn new(nparticles: usize) -> Self { - Array2::from_shape_fn((nparticles, 0), |_| Prediction::default()) + PredictionMatrix::new(nparticles, 0) } fn squared_error(&self) -> f64 { unimplemented!(); @@ -171,11 +170,11 @@ impl Predictions for Array2 { let mean_prediction: f64 = column .iter() - .map(|pred: &Prediction| pred.prediction()) + .map(|pred: &&Prediction| pred.prediction()) .sum::() / self.nrows() as f64; - let mut prediction = column.first().unwrap().clone(); + let mut prediction = (*column.first().unwrap()).clone(); prediction.set_prediction(mean_prediction); result.push(prediction); } @@ -186,7 +185,7 @@ impl Predictions for Array2 { impl EquationTypes for SDE { type S = Vec>; // Vec -> particles, DVector -> state - type P = Array2; // Rows -> particles, Columns -> time + type P = PredictionMatrix; // Rows -> particles, Columns -> time } impl EquationPriv for SDE { @@ -286,8 +285,10 @@ impl EquationPriv for SDE { ); *p = observation.to_prediction(y[observation.outeq()], x[i].as_slice().to_vec()); }); - let out = Array2::from_shape_vec((self.nparticles, 1), pred.clone())?; - *output = concatenate(Axis(1), &[output.view(), out.view()]).unwrap(); + + // Append the new column of predictions + output.append_column(pred.clone())?; + //e = y[t] .- x[:,1] // q = pdf.(Distributions.Normal(0, 0.5), e) if let Some(em) = error_models { diff --git a/src/simulator/likelihood/mod.rs b/src/simulator/likelihood/mod.rs index b4401775..c731e70d 100644 --- a/src/simulator/likelihood/mod.rs +++ b/src/simulator/likelihood/mod.rs @@ -5,8 +5,10 @@ use crate::{ data::error_model::ErrorModels, Data, Equation, ErrorPoly, Observation, PharmsolError, Predictions, }; -use ndarray::{Array2, Axis, ShapeBuilder}; +use faer::Mat; use rayon::prelude::*; +use serde::Deserialize; +use serde::Serialize; use statrs::distribution::ContinuousCDF; use statrs::distribution::Normal; @@ -122,27 +124,79 @@ impl From> for SubjectPredictions { } } -/// Container for predictions across a population of subjects. -/// -/// This struct holds predictions for multiple subjects organized in a 2D array. -pub struct PopulationPredictions { - /// 2D array of subject predictions - pub subject_predictions: Array2, +/// Matrix structure for storing predictions in a 2D layout. +/// Organized as rows x columns, where each element is a [Prediction]. +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct PredictionMatrix { + data: Vec>, + nrows: usize, + ncols: usize, } -impl Default for PopulationPredictions { - fn default() -> Self { +impl PredictionMatrix { + /// Create a new matrix with the given dimensions + pub fn new(nrows: usize, ncols: usize) -> Self { Self { - subject_predictions: Array2::default((0, 0)), + data: vec![vec![Prediction::default(); ncols]; nrows], + nrows, + ncols, } } -} -impl From> for PopulationPredictions { - fn from(subject_predictions: Array2) -> Self { - Self { - subject_predictions, + /// Get the number of rows + pub fn nrows(&self) -> usize { + self.nrows + } + + /// Get the number of columns + pub fn ncols(&self) -> usize { + self.ncols + } + + /// Check if the matrix is empty + pub fn is_empty(&self) -> bool { + self.nrows == 0 || self.ncols == 0 + } + + /// Get a reference to a specific element + pub fn get(&self, row: usize, col: usize) -> Option<&Prediction> { + self.data.get(row).and_then(|r| r.get(col)) + } + + /// Get a mutable reference to a specific element + pub fn get_mut(&mut self, row: usize, col: usize) -> Option<&mut Prediction> { + self.data.get_mut(row).and_then(|r| r.get_mut(col)) + } + + /// Get a reference to a row + pub fn row(&self, row: usize) -> Option<&Vec> { + self.data.get(row) + } + + /// Get an iterator over rows + pub fn rows(&self) -> impl Iterator> { + self.data.iter() + } + + /// Get a column as a vector + pub fn column(&self, col: usize) -> Vec<&Prediction> { + self.data.iter().filter_map(|row| row.get(col)).collect() + } + + /// Append a column to the matrix + pub fn append_column(&mut self, column: Vec) -> Result<(), PharmsolError> { + if column.len() != self.nrows { + return Err(PharmsolError::OtherError(format!( + "Column length {} does not match matrix rows {}", + column.len(), + self.nrows + ))); + } + for (row, item) in self.data.iter_mut().zip(column.into_iter()) { + row.push(item); } + self.ncols += 1; + Ok(()) } } @@ -151,75 +205,80 @@ impl From> for PopulationPredictions { /// # Parameters /// - `equation`: The equation to use for simulation /// - `subjects`: The subject data -/// - `support_points`: The support points to evaluate +/// - `support_points`: The support points to evaluate (rows = support points, cols = parameters) /// - `error_model`: The error model to use /// - `progress`: Whether to show a progress bar /// - `cache`: Whether to use caching /// /// # Returns -/// A 2D array of likelihoods +/// A 2D matrix of likelihoods pub fn psi( equation: &impl Equation, subjects: &Data, - support_points: &Array2, + support_points: &Mat, error_models: &ErrorModels, progress: bool, cache: bool, -) -> Result, PharmsolError> { - let mut psi: Array2 = Array2::default((subjects.len(), support_points.nrows()).f()); +) -> Result, PharmsolError> { + let nrows = subjects.len(); + let ncols = support_points.nrows(); - let subjects = subjects.subjects(); + let subjects_vec = subjects.subjects(); let progress_tracker = if progress { - let total = subjects.len() * support_points.nrows(); + let total = nrows * ncols; println!( "Simulating {} subjects with {} support points each...", - subjects.len(), - support_points.nrows() + nrows, ncols ); Some(ProgressTracker::new(total)) } else { None }; - let result: Result<(), PharmsolError> = psi - .axis_iter_mut(Axis(0)) - .into_par_iter() - .enumerate() - .try_for_each(|(i, mut row)| { - row.axis_iter_mut(Axis(0)) - .into_par_iter() - .enumerate() - .try_for_each(|(j, mut element)| { - let subject = subjects.get(i).unwrap(); - match equation.estimate_likelihood( - subject, - support_points.row(j).to_vec().as_ref(), - error_models, - cache, - ) { + // Collect results into a flat vector in row-major order + let mut results: Vec = vec![0.0; nrows * ncols]; + + let result: Result<(), PharmsolError> = + results + .par_chunks_mut(ncols) + .enumerate() + .try_for_each(|(i, row)| { + row.par_iter_mut().enumerate().try_for_each(|(j, element)| { + let subject = subjects_vec.get(i).unwrap(); + // Convert Mat row to Vec + let support_point: Vec = (0..support_points.ncols()) + .map(|k| support_points[(j, k)]) + .collect(); + match equation.estimate_likelihood(subject, &support_point, error_models, cache) + { Ok(likelihood) => { - element.fill(likelihood); + *element = likelihood; if let Some(ref tracker) = progress_tracker { tracker.inc(); } + Ok(()) } - Err(e) => return Err(e), - }; - Ok(()) + Err(e) => Err(e), + } }) - }); + }); if let Some(tracker) = progress_tracker { tracker.finish(); } result?; + + // Convert flat vector to faer::Mat + // faer uses column-major order by default, so we need to transpose or use from_fn + let psi = Mat::from_fn(nrows, ncols, |i, j| results[i * ncols + j]); + Ok(psi) } /// Prediction holds an observation and its prediction -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct Prediction { pub(crate) time: f64, pub(crate) observation: Option,