Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/data/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ pub mod error_model;
pub mod event;
pub mod parser;
pub mod residual_error;
pub mod row;
pub mod structs;
pub use covariate::*;
pub use error_model::*;
Expand Down
3 changes: 1 addition & 2 deletions src/data/parser/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
pub mod normalized;
pub mod pmetrics;

pub use normalized::{build_data, NormalizedRow, NormalizedRowBuilder};
pub use crate::data::row::{build_data, DataError, DataRow, DataRowBuilder};
pub use pmetrics::*;
64 changes: 15 additions & 49 deletions src/data/parser/pmetrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,45 +4,11 @@ use serde::de::{MapAccess, Visitor};
use serde::{de, Deserialize, Deserializer, Serialize};
use std::collections::HashMap;

use crate::data::row::build_data;
use crate::data::row::DataError;
use crate::data::row::DataRow;
use std::fmt;
use std::str::FromStr;
use thiserror::Error;

/// Custom error type for the module
#[allow(private_interfaces)]
#[derive(Error, Debug, Clone)]
pub enum PmetricsError {
/// Error encountered when reading CSV data
#[error("CSV error: {0}")]
CSVError(String),
/// Error during data deserialization
#[error("Parse error: {0}")]
SerdeError(String),
/// Encountered an unknown EVID value
#[error("Unknown EVID: {evid} for ID {id} at time {time}")]
UnknownEvid { evid: isize, id: String, time: f64 },
/// Required observation value (OUT) is missing
#[error("Observation OUT is missing for {id} at time {time}")]
MissingObservationOut { id: String, time: f64 },
/// Required observation output equation (OUTEQ) is missing
#[error("Observation OUTEQ is missing in for {id} at time {time}")]
MissingObservationOuteq { id: String, time: f64 },
/// Required infusion dose amount is missing
#[error("Infusion amount (DOSE) is missing for {id} at time {time}")]
MissingInfusionDose { id: String, time: f64 },
/// Required infusion input compartment is missing
#[error("Infusion compartment (INPUT) is missing for {id} at time {time}")]
MissingInfusionInput { id: String, time: f64 },
/// Required infusion duration is missing
#[error("Infusion duration (DUR) is missing for {id} at time {time}")]
MissingInfusionDur { id: String, time: f64 },
/// Required bolus dose amount is missing
#[error("Bolus amount (DOSE) is missing for {id} at time {time}")]
MissingBolusDose { id: String, time: f64 },
/// Required bolus input compartment is missing
#[error("Bolus compartment (INPUT) is missing for {id} at time {time}")]
MissingBolusInput { id: String, time: f64 },
}

/// Read a Pmetrics datafile and convert it to a [Data] object
///
Expand All @@ -56,7 +22,7 @@ pub enum PmetricsError {
///
/// # Returns
///
/// * `Result<Data, PmetricsError>` - A result containing either the parsed [Data] object or an error
/// * `Result<Data, DataError>` - A result containing either the parsed [Data] object or an error
///
/// # Example
///
Expand All @@ -78,32 +44,32 @@ pub enum PmetricsError {
///
/// For specific column definitions, see the `Row` struct.
#[allow(dead_code)]
pub fn read_pmetrics(path: impl Into<String>) -> Result<Data, PmetricsError> {
pub fn read_pmetrics(path: impl Into<String>) -> Result<Data, DataError> {
let path = path.into();

let mut reader = csv::ReaderBuilder::new()
.comment(Some(b'#'))
.has_headers(true)
.from_path(&path)
.map_err(|e| PmetricsError::CSVError(e.to_string()))?;
.map_err(|e| DataError::CSVError(e.to_string()))?;
// Convert headers to lowercase
let headers = reader
.headers()
.map_err(|e| PmetricsError::CSVError(e.to_string()))?
.map_err(|e| DataError::CSVError(e.to_string()))?
.iter()
.map(|h| h.to_lowercase())
.collect::<Vec<_>>();
reader.set_headers(csv::StringRecord::from(headers));

// Parse CSV rows and convert to NormalizedRows
let mut normalized_rows: Vec<super::normalized::NormalizedRow> = Vec::new();
// Parse CSV rows and convert to DataRows
let mut data_rows: Vec<DataRow> = Vec::new();
for row_result in reader.deserialize() {
let row: Row = row_result.map_err(|e| PmetricsError::CSVError(e.to_string()))?;
normalized_rows.push(row.to_normalized());
let row: Row = row_result.map_err(|e| DataError::CSVError(e.to_string()))?;
data_rows.push(row.to_datarow());
}

// Use the shared build_data logic
super::normalized::build_data(normalized_rows)
build_data(data_rows)
}

/// A [Row] represents a row in the Pmetrics data format
Expand Down Expand Up @@ -158,9 +124,9 @@ struct Row {
}

impl Row {
/// Convert this Row to a NormalizedRow for parsing
fn to_normalized(&self) -> super::normalized::NormalizedRow {
super::normalized::NormalizedRow {
/// Convert this Row to a DataRow for parsing
fn to_datarow(&self) -> DataRow {
DataRow {
id: self.id.clone(),
time: self.time,
evid: self.evid as i32,
Expand Down
Loading
Loading