From 8d4336b651bf668cd4e608f7e844c0d1948100d0 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 17 Dec 2025 08:45:20 -0500 Subject: [PATCH 1/7] Initial script to import TICON-4 data, Synthesize datums from haromonics --- .gitignore | 1 + package.json | 2 +- tools/datum.ts | 230 ++++++++++++++++++++++++++++++++++++++++++ tools/import-ticon | 12 +++ tools/import-ticon.ts | 128 +++++++++++++++++++++++ 5 files changed, 372 insertions(+), 1 deletion(-) create mode 100644 tools/datum.ts create mode 100755 tools/import-ticon create mode 100755 tools/import-ticon.ts diff --git a/.gitignore b/.gitignore index a1fb6e214..b146c1754 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ node_modules dist package-lock.json +tmp diff --git a/package.json b/package.json index 55bb75317..b901d0956 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "format": "prettier --write ." }, "devDependencies": { - "@neaps/tide-predictor": "^0.2.1", + "@neaps/tide-predictor": "^0.3.0", "@types/geokdbush": "^1.1.5", "@types/make-fetch-happen": "^10.0.4", "@types/node": "^25.0.3", diff --git a/tools/datum.ts b/tools/datum.ts new file mode 100644 index 000000000..8c9d3c5bf --- /dev/null +++ b/tools/datum.ts @@ -0,0 +1,230 @@ +import tidePredictor, { + type TidePredictionOptions, + type HarmonicConstituent, +} from '@neaps/tide-predictor' + +export interface EpochSpec { + start?: Date + end?: Date +} + +export type Datums = Record + +export interface TidalDatumsResult { + epochStart: Date + epochEnd: Date + lengthYears: number + + /** seconds between samples in the synthetic series */ + timeFidelity: number + /** tidal-day length used (hours) */ + tidalDayHours: number + + datums: Datums +} + +export interface DatumsOptions extends TidePredictionOptions { + /** + * Time step in hours for the synthetic series. + * Converted to `timeFidelity` in seconds for neaps. + * Default: 1 hour. + */ + stepHours?: number + + /** + * Length of a "tidal day" in hours. + * Typical: 24.8333 (24h 50m). + * Default: 24.8333333. + */ + tidalDayHours?: number +} + +const YEAR_MS = 365.2425 * 24 * 60 * 60 * 1000 +const NINETEEN_YEARS = 19 * YEAR_MS + +/** + * Resolve an EpochSpec to explicit start/end Dates. + */ +export function resolveEpoch({ + end = new Date(), + start = new Date(end.getTime() - NINETEEN_YEARS), +}: EpochSpec): { + start: Date + end: Date + lengthYears: number +} { + let lengthYears = (end.getTime() - start.getTime()) / YEAR_MS + if (lengthYears > 19) { + start = new Date(end.getTime() - NINETEEN_YEARS) + lengthYears = 19 + } + return { start, end, lengthYears } +} + +/** + * Core helper: given a regular timeline of {time, level}, compute datums + */ +function computeDatumsFromTimeline( + times: Date[], + heights: number[], + tidalDayHours: number +): Datums { + if (!times.length || times.length !== heights.length) { + throw new Error('times and heights must be non-empty and of equal length') + } + + const allHighs: number[] = [] + const allLows: number[] = [] + const higherHighs: number[] = [] + const lowerLows: number[] = [] + + const tidalDayMs = tidalDayHours * 60 * 60 * 1000 + + if (times.length === 0) { + throw new Error('times array is empty') + } + const firstTime = times[0] + const lastTime = times[times.length - 1] + if (!firstTime || !lastTime) { + throw new Error('times array is empty') + } + + let dayStartTime = firstTime.getTime() + let idx = 0 + let daysWithHighs = 0 + let daysWithLows = 0 + + while (dayStartTime < lastTime.getTime()) { + const dayEndTime = dayStartTime + tidalDayMs + + const idxStart = idx + while (idx < times.length && times[idx]!.getTime() < dayEndTime) { + idx++ + } + const idxEnd = idx + + if (idxEnd - idxStart >= 3) { + const highs: number[] = [] + const lows: number[] = [] + + for (let i = idxStart + 1; i < idxEnd - 1; i++) { + const hPrev = heights[i - 1] + const hCurr = heights[i] + const hNext = heights[i + 1] + + if ( + hCurr !== undefined && + hPrev !== undefined && + hNext !== undefined && + hCurr >= hPrev && + hCurr >= hNext && + (hCurr > hPrev || hCurr > hNext) + ) { + highs.push(hCurr) + } else if ( + hCurr !== undefined && + hPrev !== undefined && + hNext !== undefined && + hCurr <= hPrev && + hCurr <= hNext && + (hCurr < hPrev || hCurr < hNext) + ) { + lows.push(hCurr) + } + } + + if (highs.length > 0) { + daysWithHighs++ + allHighs.push(...highs) + highs.sort((a, b) => a - b) + // higher high + const hhVal = highs[highs.length - 1] + if (hhVal !== undefined) { + higherHighs.push(hhVal) + } + } + + if (lows.length > 0) { + daysWithLows++ + allLows.push(...lows) + lows.sort((a, b) => a - b) + // lower low + const llVal = lows[0] + if (llVal !== undefined) { + lowerLows.push(llVal) + } + } + } + + dayStartTime += tidalDayMs + + // ensure idx keeps up + while (idx < times.length && times[idx]!.getTime() < dayStartTime) { + idx++ + } + } + + const mhw = mean(allHighs) + const mlw = mean(allLows) + + return { + MHHW: toFixed(mean(higherHighs), 3), + MHW: toFixed(mhw, 3), + MSL: toFixed(mean(heights), 3), + MTL: toFixed((mhw + mlw) / 2, 3), + MLW: toFixed(mlw, 3), + MLLW: toFixed(mean(lowerLows), 3), + LAT: toFixed(Math.min(...heights), 3), + } +} + +/** + * Use @neaps/tide-predictor to synthesize a multi-year tidal timeline + * for a given set of constituents, and compute tidal datums from it. + */ +export function computeDatums( + constituents: HarmonicConstituent[], + epochSpec: EpochSpec, + { + stepHours = 1, + tidalDayHours = 24.8333333, + ...tidePredictorOptions + }: DatumsOptions = {} +): TidalDatumsResult { + const { start, end, lengthYears } = resolveEpoch(epochSpec) + + const timeFidelity = stepHours * 60 * 60 + + // Build predictor from @neaps/tide-predictor + const predictor = tidePredictor(constituents, tidePredictorOptions) + + // Ask it for a synthetic timeline over the epoch + const timeline = predictor.getExtremesPrediction({ + start, + end, + timeFidelity, + }) + + const times = timeline.map((pt) => pt.time) + const heights = timeline.map((pt) => pt.level) + + return { + epochStart: start, + epochEnd: end, + lengthYears, + timeFidelity, + tidalDayHours, + datums: computeDatumsFromTimeline(times, heights, tidalDayHours), + } +} + +export function toFixed(num: number, digits: number) { + if (typeof num !== 'number') return num + + const factor = Math.pow(10, digits) + return Math.round(num * factor) / factor +} + +export function mean(arr: number[]): number { + return arr.length ? arr.reduce((s, v) => s + v, 0) / arr.length : NaN +} diff --git a/tools/import-ticon b/tools/import-ticon new file mode 100755 index 000000000..25e716ac5 --- /dev/null +++ b/tools/import-ticon @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e + +mkdir -p tmp + +[[ -d tmp/TICON-4 ]] || { + curl -L -o tmp/TICON-4.zip https://github.com/user-attachments/files/24195063/TICON-4.zip + unzip -d tmp tmp/TICON-4.zip +} + +node tools/import-ticon.ts diff --git a/tools/import-ticon.ts b/tools/import-ticon.ts new file mode 100755 index 000000000..1fd0b85e5 --- /dev/null +++ b/tools/import-ticon.ts @@ -0,0 +1,128 @@ +#!/usr/bin/env node + +import { readFile } from 'fs/promises' +import { dirname, join } from 'path' +import { fileURLToPath } from 'url' +import { parseCSV, indexBy, groupBy } from './util.ts' +import { normalize, save } from './station.ts' +import { computeDatums } from './datum.ts' +import constituents from '../src/constituents.json' with { type: 'json' } +import type { Station, HarmonicConstituent } from '../src/index.ts' + +const constituents_by_name = indexBy(constituents, 'name') + +const __dirname = dirname(fileURLToPath(import.meta.url)) +const metaPath = join(__dirname, '..', 'tmp', 'TICON-4', 'meta.csv') +const dataPath = join(__dirname, '..', 'tmp', 'TICON-4', 'data.csv') +const metadata = indexBy( + parseCSV(await readFile(metaPath, 'utf-8')), + 'FILE NAME' +) +const data = await readFile(dataPath, 'utf-8') + +type TiconMetaRow = { + 'FILE NAME': string + 'SITE NAME': string +} + +/** + * Converts TICON-4 CSV files to station JSON format + * + * The script reads a TICON-4 CSV file and creates JSON files in the data/ directory + * that conform to the station schema. Each unique station (by lat/lon/name) becomes + * one JSON file with all its harmonic constituents aggregated. + */ +async function main() { + const stations = Object.values(groupBy(parseCSV(data), (r) => r.tide_gauge_name)) + + let created = 0 + + for (const rows of stations) { + await save( + normalize( + convertStation(rows) + ) + ) + created++ + process.stdout.write('.') + } + + console.log(`\nDone. Created ${created} files`) +} + +interface TiconRow { + lat: string + lon: string + tide_gauge_name: string + type: string + country: string + gesla_source: string + record_quality: string + datum_information: string + years_of_obs: string + start_date: string + end_date: string + con: string + amp: string + pha: string + amp_std: string + pha_std: string + missing_obs: string + no_of_obs: string +} + +function dayMonthYearToDate(date: string) { + const [ day, month, year ] = date.split('/').map((v) => parseInt(v, 10)) + if(!day || !month || !year) { + throw new Error(`Invalid date: ${date}`) + } + return new Date(Date.UTC(year, month - 1, day, 0, 0, 0)) +} + +/** + * Convert a TICON-4 station to our JSON schema format + */ +function convertStation(rows: TiconRow[]): Station { + if(!rows[0]) { + throw new Error('No rows to convert') + } + + const gesla = metadata[rows[0].tide_gauge_name] + + const constituents: HarmonicConstituent[] = rows.map((row) => ({ + name: row.con, + amplitude: parseFloat(row.amp) / 100, // convert cm to m + phase: ((parseFloat(row.pha) % 360) + 360) % 360, // lag in degrees; normalize to [0, 360) + speed: constituents_by_name[row.con]?.speed, + })) + + const start = dayMonthYearToDate(rows[0].start_date) + const end = dayMonthYearToDate(rows[0].end_date) + + const { datums } = computeDatums(constituents, { start, end }) + + // Create the station JSON + return normalize({ + name: gesla['SITE NAME'], + country: rows[0].country, + latitude: parseFloat(rows[0].lat), + longitude: parseFloat(rows[0].lon), + type: 'reference', + disclaimers: rows[0].record_quality, + source: { + name: 'TICON-4', + url: 'https://www.seanoe.org/data/00980/109129/', + id: rows[0].tide_gauge_name, + published_harmonics: true, + }, + license: { + type: 'cc-by-4.0', + commercial_use: true, + url: 'https://creativecommons.org/licenses/by/4.0/', + }, + harmonic_constituents: constituents, + datums, + }) +} + +main() From 0d3f841fd9a04a83d392fddd0e41ed6805fb00b2 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 9 Jan 2026 13:10:36 -0500 Subject: [PATCH 2/7] Update @neaps/tide-predictor to get new constituents --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b901d0956..77aa0e347 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "format": "prettier --write ." }, "devDependencies": { - "@neaps/tide-predictor": "^0.3.0", + "@neaps/tide-predictor": "^0.4.0", "@types/geokdbush": "^1.1.5", "@types/make-fetch-happen": "^10.0.4", "@types/node": "^25.0.3", From d0059fd6b70837448c3b23b150c71fbdb2917426 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 9 Jan 2026 13:11:29 -0500 Subject: [PATCH 3/7] Run prettier against new files MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …so copilot can focus on more meaningful feedback --- tools/datum.ts | 148 +++++++++++++++++++++--------------------- tools/import-ticon.ts | 134 +++++++++++++++++++------------------- 2 files changed, 140 insertions(+), 142 deletions(-) diff --git a/tools/datum.ts b/tools/datum.ts index 8c9d3c5bf..305070ceb 100644 --- a/tools/datum.ts +++ b/tools/datum.ts @@ -1,26 +1,26 @@ import tidePredictor, { type TidePredictionOptions, type HarmonicConstituent, -} from '@neaps/tide-predictor' +} from "@neaps/tide-predictor"; export interface EpochSpec { - start?: Date - end?: Date + start?: Date; + end?: Date; } -export type Datums = Record +export type Datums = Record; export interface TidalDatumsResult { - epochStart: Date - epochEnd: Date - lengthYears: number + epochStart: Date; + epochEnd: Date; + lengthYears: number; /** seconds between samples in the synthetic series */ - timeFidelity: number + timeFidelity: number; /** tidal-day length used (hours) */ - tidalDayHours: number + tidalDayHours: number; - datums: Datums + datums: Datums; } export interface DatumsOptions extends TidePredictionOptions { @@ -29,18 +29,18 @@ export interface DatumsOptions extends TidePredictionOptions { * Converted to `timeFidelity` in seconds for neaps. * Default: 1 hour. */ - stepHours?: number + stepHours?: number; /** * Length of a "tidal day" in hours. * Typical: 24.8333 (24h 50m). * Default: 24.8333333. */ - tidalDayHours?: number + tidalDayHours?: number; } -const YEAR_MS = 365.2425 * 24 * 60 * 60 * 1000 -const NINETEEN_YEARS = 19 * YEAR_MS +const YEAR_MS = 365.2425 * 24 * 60 * 60 * 1000; +const NINETEEN_YEARS = 19 * YEAR_MS; /** * Resolve an EpochSpec to explicit start/end Dates. @@ -49,16 +49,16 @@ export function resolveEpoch({ end = new Date(), start = new Date(end.getTime() - NINETEEN_YEARS), }: EpochSpec): { - start: Date - end: Date - lengthYears: number + start: Date; + end: Date; + lengthYears: number; } { - let lengthYears = (end.getTime() - start.getTime()) / YEAR_MS + let lengthYears = (end.getTime() - start.getTime()) / YEAR_MS; if (lengthYears > 19) { - start = new Date(end.getTime() - NINETEEN_YEARS) - lengthYears = 19 + start = new Date(end.getTime() - NINETEEN_YEARS); + lengthYears = 19; } - return { start, end, lengthYears } + return { start, end, lengthYears }; } /** @@ -67,50 +67,50 @@ export function resolveEpoch({ function computeDatumsFromTimeline( times: Date[], heights: number[], - tidalDayHours: number + tidalDayHours: number, ): Datums { if (!times.length || times.length !== heights.length) { - throw new Error('times and heights must be non-empty and of equal length') + throw new Error("times and heights must be non-empty and of equal length"); } - const allHighs: number[] = [] - const allLows: number[] = [] - const higherHighs: number[] = [] - const lowerLows: number[] = [] + const allHighs: number[] = []; + const allLows: number[] = []; + const higherHighs: number[] = []; + const lowerLows: number[] = []; - const tidalDayMs = tidalDayHours * 60 * 60 * 1000 + const tidalDayMs = tidalDayHours * 60 * 60 * 1000; if (times.length === 0) { - throw new Error('times array is empty') + throw new Error("times array is empty"); } - const firstTime = times[0] - const lastTime = times[times.length - 1] + const firstTime = times[0]; + const lastTime = times[times.length - 1]; if (!firstTime || !lastTime) { - throw new Error('times array is empty') + throw new Error("times array is empty"); } - let dayStartTime = firstTime.getTime() - let idx = 0 - let daysWithHighs = 0 - let daysWithLows = 0 + let dayStartTime = firstTime.getTime(); + let idx = 0; + let daysWithHighs = 0; + let daysWithLows = 0; while (dayStartTime < lastTime.getTime()) { - const dayEndTime = dayStartTime + tidalDayMs + const dayEndTime = dayStartTime + tidalDayMs; - const idxStart = idx + const idxStart = idx; while (idx < times.length && times[idx]!.getTime() < dayEndTime) { - idx++ + idx++; } - const idxEnd = idx + const idxEnd = idx; if (idxEnd - idxStart >= 3) { - const highs: number[] = [] - const lows: number[] = [] + const highs: number[] = []; + const lows: number[] = []; for (let i = idxStart + 1; i < idxEnd - 1; i++) { - const hPrev = heights[i - 1] - const hCurr = heights[i] - const hNext = heights[i + 1] + const hPrev = heights[i - 1]; + const hCurr = heights[i]; + const hNext = heights[i + 1]; if ( hCurr !== undefined && @@ -120,7 +120,7 @@ function computeDatumsFromTimeline( hCurr >= hNext && (hCurr > hPrev || hCurr > hNext) ) { - highs.push(hCurr) + highs.push(hCurr); } else if ( hCurr !== undefined && hPrev !== undefined && @@ -129,43 +129,43 @@ function computeDatumsFromTimeline( hCurr <= hNext && (hCurr < hPrev || hCurr < hNext) ) { - lows.push(hCurr) + lows.push(hCurr); } } if (highs.length > 0) { - daysWithHighs++ - allHighs.push(...highs) - highs.sort((a, b) => a - b) + daysWithHighs++; + allHighs.push(...highs); + highs.sort((a, b) => a - b); // higher high - const hhVal = highs[highs.length - 1] + const hhVal = highs[highs.length - 1]; if (hhVal !== undefined) { - higherHighs.push(hhVal) + higherHighs.push(hhVal); } } if (lows.length > 0) { - daysWithLows++ - allLows.push(...lows) - lows.sort((a, b) => a - b) + daysWithLows++; + allLows.push(...lows); + lows.sort((a, b) => a - b); // lower low - const llVal = lows[0] + const llVal = lows[0]; if (llVal !== undefined) { - lowerLows.push(llVal) + lowerLows.push(llVal); } } } - dayStartTime += tidalDayMs + dayStartTime += tidalDayMs; // ensure idx keeps up while (idx < times.length && times[idx]!.getTime() < dayStartTime) { - idx++ + idx++; } } - const mhw = mean(allHighs) - const mlw = mean(allLows) + const mhw = mean(allHighs); + const mlw = mean(allLows); return { MHHW: toFixed(mean(higherHighs), 3), @@ -175,7 +175,7 @@ function computeDatumsFromTimeline( MLW: toFixed(mlw, 3), MLLW: toFixed(mean(lowerLows), 3), LAT: toFixed(Math.min(...heights), 3), - } + }; } /** @@ -189,24 +189,24 @@ export function computeDatums( stepHours = 1, tidalDayHours = 24.8333333, ...tidePredictorOptions - }: DatumsOptions = {} + }: DatumsOptions = {}, ): TidalDatumsResult { - const { start, end, lengthYears } = resolveEpoch(epochSpec) + const { start, end, lengthYears } = resolveEpoch(epochSpec); - const timeFidelity = stepHours * 60 * 60 + const timeFidelity = stepHours * 60 * 60; // Build predictor from @neaps/tide-predictor - const predictor = tidePredictor(constituents, tidePredictorOptions) + const predictor = tidePredictor(constituents, tidePredictorOptions); // Ask it for a synthetic timeline over the epoch const timeline = predictor.getExtremesPrediction({ start, end, timeFidelity, - }) + }); - const times = timeline.map((pt) => pt.time) - const heights = timeline.map((pt) => pt.level) + const times = timeline.map((pt) => pt.time); + const heights = timeline.map((pt) => pt.level); return { epochStart: start, @@ -215,16 +215,16 @@ export function computeDatums( timeFidelity, tidalDayHours, datums: computeDatumsFromTimeline(times, heights, tidalDayHours), - } + }; } export function toFixed(num: number, digits: number) { - if (typeof num !== 'number') return num + if (typeof num !== "number") return num; - const factor = Math.pow(10, digits) - return Math.round(num * factor) / factor + const factor = Math.pow(10, digits); + return Math.round(num * factor) / factor; } export function mean(arr: number[]): number { - return arr.length ? arr.reduce((s, v) => s + v, 0) / arr.length : NaN + return arr.length ? arr.reduce((s, v) => s + v, 0) / arr.length : NaN; } diff --git a/tools/import-ticon.ts b/tools/import-ticon.ts index 1fd0b85e5..a7cd7164b 100755 --- a/tools/import-ticon.ts +++ b/tools/import-ticon.ts @@ -1,29 +1,29 @@ #!/usr/bin/env node -import { readFile } from 'fs/promises' -import { dirname, join } from 'path' -import { fileURLToPath } from 'url' -import { parseCSV, indexBy, groupBy } from './util.ts' -import { normalize, save } from './station.ts' -import { computeDatums } from './datum.ts' -import constituents from '../src/constituents.json' with { type: 'json' } -import type { Station, HarmonicConstituent } from '../src/index.ts' - -const constituents_by_name = indexBy(constituents, 'name') - -const __dirname = dirname(fileURLToPath(import.meta.url)) -const metaPath = join(__dirname, '..', 'tmp', 'TICON-4', 'meta.csv') -const dataPath = join(__dirname, '..', 'tmp', 'TICON-4', 'data.csv') +import { readFile } from "fs/promises"; +import { dirname, join } from "path"; +import { fileURLToPath } from "url"; +import { parseCSV, indexBy, groupBy } from "./util.ts"; +import { normalize, save } from "./station.ts"; +import { computeDatums } from "./datum.ts"; +import constituents from "../src/constituents.json" with { type: "json" }; +import type { Station, HarmonicConstituent } from "../src/index.ts"; + +const constituents_by_name = indexBy(constituents, "name"); + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const metaPath = join(__dirname, "..", "tmp", "TICON-4", "meta.csv"); +const dataPath = join(__dirname, "..", "tmp", "TICON-4", "data.csv"); const metadata = indexBy( - parseCSV(await readFile(metaPath, 'utf-8')), - 'FILE NAME' -) -const data = await readFile(dataPath, 'utf-8') + parseCSV(await readFile(metaPath, "utf-8")), + "FILE NAME", +); +const data = await readFile(dataPath, "utf-8"); type TiconMetaRow = { - 'FILE NAME': string - 'SITE NAME': string -} + "FILE NAME": string; + "SITE NAME": string; +}; /** * Converts TICON-4 CSV files to station JSON format @@ -33,96 +33,94 @@ type TiconMetaRow = { * one JSON file with all its harmonic constituents aggregated. */ async function main() { - const stations = Object.values(groupBy(parseCSV(data), (r) => r.tide_gauge_name)) + const stations = Object.values( + groupBy(parseCSV(data), (r) => r.tide_gauge_name), + ); - let created = 0 + let created = 0; for (const rows of stations) { - await save( - normalize( - convertStation(rows) - ) - ) - created++ - process.stdout.write('.') + await save(normalize(convertStation(rows))); + created++; + process.stdout.write("."); } - console.log(`\nDone. Created ${created} files`) + console.log(`\nDone. Created ${created} files`); } interface TiconRow { - lat: string - lon: string - tide_gauge_name: string - type: string - country: string - gesla_source: string - record_quality: string - datum_information: string - years_of_obs: string - start_date: string - end_date: string - con: string - amp: string - pha: string - amp_std: string - pha_std: string - missing_obs: string - no_of_obs: string + lat: string; + lon: string; + tide_gauge_name: string; + type: string; + country: string; + gesla_source: string; + record_quality: string; + datum_information: string; + years_of_obs: string; + start_date: string; + end_date: string; + con: string; + amp: string; + pha: string; + amp_std: string; + pha_std: string; + missing_obs: string; + no_of_obs: string; } function dayMonthYearToDate(date: string) { - const [ day, month, year ] = date.split('/').map((v) => parseInt(v, 10)) - if(!day || !month || !year) { - throw new Error(`Invalid date: ${date}`) + const [day, month, year] = date.split("/").map((v) => parseInt(v, 10)); + if (!day || !month || !year) { + throw new Error(`Invalid date: ${date}`); } - return new Date(Date.UTC(year, month - 1, day, 0, 0, 0)) + return new Date(Date.UTC(year, month - 1, day, 0, 0, 0)); } /** * Convert a TICON-4 station to our JSON schema format */ function convertStation(rows: TiconRow[]): Station { - if(!rows[0]) { - throw new Error('No rows to convert') + if (!rows[0]) { + throw new Error("No rows to convert"); } - const gesla = metadata[rows[0].tide_gauge_name] + const gesla = metadata[rows[0].tide_gauge_name]; const constituents: HarmonicConstituent[] = rows.map((row) => ({ name: row.con, amplitude: parseFloat(row.amp) / 100, // convert cm to m phase: ((parseFloat(row.pha) % 360) + 360) % 360, // lag in degrees; normalize to [0, 360) speed: constituents_by_name[row.con]?.speed, - })) + })); - const start = dayMonthYearToDate(rows[0].start_date) - const end = dayMonthYearToDate(rows[0].end_date) + const start = dayMonthYearToDate(rows[0].start_date); + const end = dayMonthYearToDate(rows[0].end_date); - const { datums } = computeDatums(constituents, { start, end }) + const { datums } = computeDatums(constituents, { start, end }); // Create the station JSON return normalize({ - name: gesla['SITE NAME'], + name: gesla["SITE NAME"], country: rows[0].country, latitude: parseFloat(rows[0].lat), longitude: parseFloat(rows[0].lon), - type: 'reference', + type: "reference", disclaimers: rows[0].record_quality, source: { - name: 'TICON-4', - url: 'https://www.seanoe.org/data/00980/109129/', + name: "TICON-4", + url: "https://www.seanoe.org/data/00980/109129/", id: rows[0].tide_gauge_name, published_harmonics: true, }, license: { - type: 'cc-by-4.0', + type: "cc-by-4.0", commercial_use: true, - url: 'https://creativecommons.org/licenses/by/4.0/', + url: "https://creativecommons.org/licenses/by/4.0/", }, harmonic_constituents: constituents, datums, - }) + }); } -main() +main(); From 9e8a940f1b1ac612d5ce5394432a4fcc5dedfd90 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 9 Jan 2026 13:42:23 -0500 Subject: [PATCH 4/7] Remove double normalization --- tools/import-ticon.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/import-ticon.ts b/tools/import-ticon.ts index a7cd7164b..8a5527403 100755 --- a/tools/import-ticon.ts +++ b/tools/import-ticon.ts @@ -40,7 +40,7 @@ async function main() { let created = 0; for (const rows of stations) { - await save(normalize(convertStation(rows))); + await save(convertStation(rows)); created++; process.stdout.write("."); } From d0e0ac7e265e3bc5f04a7802bedf4c9e10c12b34 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Sat, 10 Jan 2026 09:34:18 -0500 Subject: [PATCH 5/7] Bump @neaps/tide-predictor --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 77aa0e347..b921081d7 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "format": "prettier --write ." }, "devDependencies": { - "@neaps/tide-predictor": "^0.4.0", + "@neaps/tide-predictor": "^0.4.1", "@types/geokdbush": "^1.1.5", "@types/make-fetch-happen": "^10.0.4", "@types/node": "^25.0.3", From 2eac677cf5272181a47bcae0319dd9a5bc0cab42 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Sun, 11 Jan 2026 10:58:16 -0500 Subject: [PATCH 6/7] Update for stable id --- tools/import-ticon.ts | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/tools/import-ticon.ts b/tools/import-ticon.ts index 8a5527403..281a8cc2a 100755 --- a/tools/import-ticon.ts +++ b/tools/import-ticon.ts @@ -6,10 +6,7 @@ import { fileURLToPath } from "url"; import { parseCSV, indexBy, groupBy } from "./util.ts"; import { normalize, save } from "./station.ts"; import { computeDatums } from "./datum.ts"; -import constituents from "../src/constituents.json" with { type: "json" }; -import type { Station, HarmonicConstituent } from "../src/index.ts"; - -const constituents_by_name = indexBy(constituents, "name"); +import type { StationData, HarmonicConstituent } from "../src/index.ts"; const __dirname = dirname(fileURLToPath(import.meta.url)); const metaPath = join(__dirname, "..", "tmp", "TICON-4", "meta.csv"); @@ -40,7 +37,7 @@ async function main() { let created = 0; for (const rows of stations) { - await save(convertStation(rows)); + await save("ticon", convertStation(rows)); created++; process.stdout.write("."); } @@ -80,7 +77,7 @@ function dayMonthYearToDate(date: string) { /** * Convert a TICON-4 station to our JSON schema format */ -function convertStation(rows: TiconRow[]): Station { +function convertStation(rows: TiconRow[]): StationData { if (!rows[0]) { throw new Error("No rows to convert"); } @@ -91,7 +88,6 @@ function convertStation(rows: TiconRow[]): Station { name: row.con, amplitude: parseFloat(row.amp) / 100, // convert cm to m phase: ((parseFloat(row.pha) % 360) + 360) % 360, // lag in degrees; normalize to [0, 360) - speed: constituents_by_name[row.con]?.speed, })); const start = dayMonthYearToDate(rows[0].start_date); From 894ffff1cb975b73ba92d557d24baddabc426034 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Mon, 12 Jan 2026 14:41:56 -0500 Subject: [PATCH 7/7] Update docs --- README.md | 4 ++-- docs/noaa.md => data/noaa/README.md | 2 +- data/ticon/README.md | 25 +++++++++++++++++++++++++ 3 files changed, 28 insertions(+), 3 deletions(-) rename docs/noaa.md => data/noaa/README.md (85%) create mode 100644 data/ticon/README.md diff --git a/README.md b/README.md index 9daed33c4..b3035725b 100644 --- a/README.md +++ b/README.md @@ -6,10 +6,10 @@ This database includes harmonic constituents for tide prediction from various so ## Sources -- ✅ [**NOAA**](https://tidesandcurrents.noaa.gov): National Oceanic and Atmospheric Administration +- ✅ [**NOAA**](data/noaa/README.md): National Oceanic and Atmospheric Administration ~3379 stations, mostly in the United States and its territories. Updated monthly via [NOAA's API](https://api.tidesandcurrents.noaa.gov/mdapi/prod/). -- 🔜 [**TICON-4**](https://www.seanoe.org/data/00980/109129/): TIdal CONstants based on GESLA-4 sea-level records +- ✅ [**TICON-4**](data/ticon/README.md): TIdal CONstants based on GESLA-4 sea-level records 4,838 global stations - ([#16](https://github.com/neaps/tide-database/pull/16)) If you know of other public sources of harmonic constituents, please [open an issue](https://github.com/neaps/tide-database/issues/new) to discuss adding them. diff --git a/docs/noaa.md b/data/noaa/README.md similarity index 85% rename from docs/noaa.md rename to data/noaa/README.md index f3768e112..3781a221a 100644 --- a/docs/noaa.md +++ b/data/noaa/README.md @@ -1,6 +1,6 @@ ## NOAA Tide Station Data Overview -This script fetches tide station metadata from NOAA CO-OPS and converts it into a local, normalized dataset. It classifies stations by prediction method, stores harmonic constituents or prediction offsets as appropriate, and records available tidal datums for reference. +This database fetches tide station metadata from NOAA CO-OPS and converts it into a local, normalized dataset. It classifies stations by prediction method, stores harmonic constituents or prediction offsets as appropriate, and records available tidal datums for reference. The goal is to mirror how NOAA operationally produces tide predictions, not just what data exists in their metadata. diff --git a/data/ticon/README.md b/data/ticon/README.md new file mode 100644 index 000000000..9678bc342 --- /dev/null +++ b/data/ticon/README.md @@ -0,0 +1,25 @@ +# TICON-4 Tide Station Data + +[TICON-4](https://www.seanoe.org/data/00980/109129/) is a global dataset of tidal harmonic constituents derived from the **GESLA-4** (Global Extreme Sea Level Analysis v.4) sea-level gauge compilation. It provides tidal characteristics for approximately **4,838 tide stations** worldwide, with emphasis on global coverage outside the United States (which is covered by NOAA's tide database). + +**Key Details:** +- **Source:** [TICON-4 @ SEANOE](https://www.seanoe.org/data/00980/109129/) +- **Manual:** [TICON Documentation](https://www.seanoe.org/data/00980/109129/data/122852.pdf) +- **License:** CC-BY-4.0 (Creative Commons Attribution 4.0) +- **Coverage:** Global tide stations with harmonic constituent analysis from GESLA-4 observations + +Each station in this dataset contains harmonic constituents (amplitude and phase for tidal frequency components such as M2, K1, O1, etc.) extracted from historical sea-level records. + +![](https://www.seanoe.org/data/00980/109129/illustration.jpg) + +## Synthetic Tidal Datums + +TICON-4 does not provide empirically derived tidal datums. Instead, this dataset includes **synthetic tidal datums** computed from 19-year harmonic predictions using the harmonic constituents, not from observed water level data. This approach generates theoretical datums that represent long-term average tidal characteristics without the influence of weather events, non-tidal water level changes, or observational gaps. + +These datums should eventually be replaced with water-level-derived datums when available. See [#40](https://github.com/neaps/tide-database/issues/40). + +## References + +- [TICON-4 Dataset](https://www.seanoe.org/data/00980/109129/) +- [TICON Manual](https://www.seanoe.org/data/00980/109129/data/122852.pdf) +- [GESLA-4 Project](https://gesla787883612.wordpress.com)