From 4785a42f081533e629f3e9d9a7ace81a853cb5ad Mon Sep 17 00:00:00 2001 From: csaundersshultz Date: Mon, 15 Sep 2025 09:31:26 -0800 Subject: [PATCH] Update mdccm with dropped stations Fix mdccm calculation for LTS so it reflects which pairs were kept and dropped. Save max cross-corr values through time, then use element_weights to drop the correct station pairs and update mdccm. also correct ltsva function description to say 90% confidence intervals. --- lts_array/classes/lts_classes.py | 12 +++++++++--- lts_array/ltsva.py | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/lts_array/classes/lts_classes.py b/lts_array/classes/lts_classes.py index 45142c7..f98827f 100644 --- a/lts_array/classes/lts_classes.py +++ b/lts_array/classes/lts_classes.py @@ -829,6 +829,8 @@ def __init__(self, data): self.stdict = {} # Confidence value for uncertainty calculation self.p = 0.90 + # Pre-allocate max cross-corr values for all pairs and timesteps (median of this is mdccm) + self.cij_maxs = np.full((self.co_array_num, data.nits), np.nan) # Check co-array rank for least squares problem if (np.linalg.matrix_rank(self.xij) < self.dimension_number): raise RuntimeError('Co-array is ill posed for the least squares problem. Check array coordinates. xij rank < ' + str(self.dimension_number)) @@ -851,6 +853,7 @@ def calculate_co_array(self, data): def correlate(self, data): """ Cross correlate the time series data. + Save maximum correlations for each timestep to update mccm with dropped station pairs """ for jj in range(0, data.nits): # Get time from middle of window, except for the end. @@ -869,14 +872,14 @@ def correlate(self, data): # MATLAB's xcorr w/ 'coeff' normalization: # unit auto-correlations. self.cij[:, k] = (np.correlate(data.data[t0_ind:tf_ind, self.idx_pair[k][0]], data.data[t0_ind:tf_ind, self.idx_pair[k][1]], mode='full') / np.sqrt(np.sum(data.data[t0_ind:tf_ind, self.idx_pair[k][0]] * data.data[t0_ind:tf_ind, self.idx_pair[k][0]]) * np.sum(data.data[t0_ind:tf_ind, self.idx_pair[k][1]] * data.data[t0_ind:tf_ind, self.idx_pair[k][1]]))) # noqa - # Find the median of the cross-correlation maxima - self.mdccm[jj] = np.nanmedian(self.cij.max(axis=0)) + self.cij_maxs[:, jj] = self.cij.max(axis=0) #save cij_maxs for this time step # Form the time delay vector and save it delay = np.argmax(self.cij, axis=0) + 1 self.tau[:, jj] = (data.winlensamp - delay) / data.sampling_rate self.time_delay_mad[jj] = 1.4826 * np.median( np.abs(self.tau[:, jj])) - + + self.mdccm = np.nanmedian(self.cij_maxs, axis=0) #calculate mdccm for all time steps self.tau = np.reshape(self.tau, (self.co_array_num, data.nits, 1)) @@ -1023,6 +1026,9 @@ def solve(self, data): # Use the best slowness coefficients to determine dropped stations # Calculate uncertainties at 90% confidence self.lts_vel, self.lts_baz, self.element_weights, self.sigma_tau, self.conf_int_vel, self.conf_int_baz = post_process(self.dimension_number, self.co_array_num, data.alpha, self.h, data.nits, self.tau, self.xij, self.slowness_coeffs, self.lts_vel, self.lts_baz, self.element_weights, self.sigma_tau, self.p, self.conf_int_vel, self.conf_int_baz) # noqa + # Correct the mdccm value according to the dropped station pairs + masked_cij = np.where(self.element_weights == 1, self.cij_maxs, np.nan) + self.mdccm = np.nanmedian(masked_cij, axis=0) # Find dropped stations from weights # Map dropped data points back to elements. for jj in range(0, data.nits): diff --git a/lts_array/ltsva.py b/lts_array/ltsva.py index f2f069f..f88d826 100644 --- a/lts_array/ltsva.py +++ b/lts_array/ltsva.py @@ -33,8 +33,8 @@ def ltsva(st, lat_list, lon_list, window_length, window_overlap, alpha=1.0, plot ``mdccm`` (array): An array of median cross-correlation maxima. ``stdict`` (dict): A dictionary of flagged element pairs. ``sigma_tau`` (array): An array of sigma_tau values. - ``conf_int_vel`` (array): An array of 95% confidence intervals for the trace velocity. - ``conf_int_baz`` (array): An array of 95% confidence intervals for the back-azimuth. + ``conf_int_vel`` (array): An array of 90% confidence intervals for the trace velocity. + ``conf_int_baz`` (array): An array of 90% confidence intervals for the back-azimuth. """