From 3ba74ab7ddc7148b2e4fbc45fbfa5571f7cde396 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Mon, 13 Apr 2026 20:33:15 -0500 Subject: [PATCH 01/16] Implement direct timeseries read benchmark path --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 527 +++++++++++++++++- load_data/performance/.gitignore | 2 + .../invoke-timeseries-read-benchmark.ps1 | 454 +++++++++++++++ 3 files changed, 981 insertions(+), 2 deletions(-) create mode 100644 load_data/performance/.gitignore create mode 100644 load_data/performance/invoke-timeseries-read-benchmark.ps1 diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index 2102f58eb5..9ec37f8e43 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -88,6 +88,7 @@ import usace.cwms.db.jooq.codegen.tables.AV_TSV_DQU; import usace.cwms.db.jooq.codegen.tables.AV_TS_GRP_ASSGN; import usace.cwms.db.jooq.codegen.udt.records.DATE_TABLE_TYPE; +import usace.cwms.db.jooq.codegen.udt.records.DATE_RANGE_T; import usace.cwms.db.jooq.codegen.udt.records.ZTSV_ARRAY; import usace.cwms.db.jooq.codegen.udt.records.ZTSV_TYPE; @@ -127,6 +128,9 @@ public class TimeSeriesDaoImpl extends JooqDao implements TimeSeries ); public static final String VERSIONED_NAME = "isVersioned"; + private static final long UTC_OFFSET_IRREGULAR = -2147483648L; + private static final long UTC_OFFSET_UNDEFINED = 2147483647L; + private static final String UTC = "UTC"; /** To be able to use a named inner table (otherwise JOOQ creates a random alias which messes * with the planner) we need to use fixed names to be able to reference the required columns. @@ -259,8 +263,18 @@ public FilteredTimeSeries getTimeseries(String page, int pageSize, TimeSeriesReq return fts; } - protected TimeSeries getRequestedTimeSeries(String page, int pageSize, @NotNull TimeSeriesRequestParameters requestParameters, - @Nullable FilteredTimeSeriesParameters fp) { + protected TimeSeries getRequestedTimeSeries(String page, int pageSize, + @NotNull TimeSeriesRequestParameters requestParameters, + @Nullable FilteredTimeSeriesParameters fp) { + if (fp != null) { + return getRequestedTimeSeriesLegacy(page, pageSize, requestParameters, fp); + } + return getRequestedTimeSeriesDirect(page, pageSize, requestParameters); + } + + protected TimeSeries getRequestedTimeSeriesLegacy(String page, int pageSize, + @NotNull TimeSeriesRequestParameters requestParameters, + @Nullable FilteredTimeSeriesParameters fp) { String names = requestParameters.getNames(); String office = requestParameters.getOffice(); @@ -636,6 +650,515 @@ private TimeSeries buildTimeSeriesFromMetadata(Record tsMetadata, @Nullable Inte ); } + private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, + @NotNull TimeSeriesRequestParameters requestParameters) { + String names = requestParameters.getNames(); + String office = requestParameters.getOffice(); + String requestedUnits = requestParameters.getUnits(); + ZonedDateTime beginTime = requestParameters.getBeginTime(); + ZonedDateTime endTime = requestParameters.getEndTime(); + ZonedDateTime versionDate = requestParameters.getVersionDate(); + boolean includeEntryDate = requestParameters.isIncludeEntryDate(); + String cursor = null; + Timestamp tsCursor = null; + + validateEntryDateSupport(includeEntryDate); + + if (page != null && !page.isEmpty()) { + final String[] parts = CwmsDTOPaginated.decodeCursor(page); + + logger.atFine().log("Decoded cursor"); + logger.atFinest().log("%s", lazy(() -> { + StringBuilder sb = new StringBuilder(); + for (String p : parts) { + sb.append(p).append("\n"); + } + return sb.toString(); + })); + + if (parts.length > 1) { + cursor = parts[0]; + tsCursor = Timestamp.from(Instant.ofEpochMilli(Long.parseLong(parts[0]))); + pageSize = Integer.parseInt(parts[parts.length - 1]); + } + } + + RequestedTimeSeriesMetadata metadata = fetchRequestedTimeSeriesMetadata(requestParameters); + if (metadata == null) { + throw new DataAccessException("Unable to resolve time series metadata for " + names); + } + + String parmPart = metadata.getParmPart(); + String locPart = metadata.getLocPart(); + VerticalDatumInfo verticalDatumInfo = null; + if (shouldFetchVerticalDatum(parmPart)) { + verticalDatumInfo = fetchVerticalDatumInfoSeparately(locPart, requestedUnits, office); + } + + VersionType finalDateVersionType = getVersionType(dsl, names, office, versionDate != null); + if (pageSize == 0) { + return null; + } + + List rawRows = fetchRequestedTimeSeriesRows(metadata, requestParameters); + List expectedTimes = fetchExpectedRegularTimes(metadata, requestParameters, rawRows); + int total = countMergedRows(rawRows, expectedTimes); + + TimeSeries timeseries = new TimeSeries( + cursor, + pageSize, + total, + metadata.getTsId(), + metadata.getOfficeId(), + beginTime, + endTime, + metadata.getUnits(), + Duration.ofMinutes(metadata.getIntervalMinutes()), + verticalDatumInfo, + metadata.getIntervalOffset(), + metadata.getTimeZoneId(), + versionDate, + finalDateVersionType + ); + + populateTimeSeriesValues(timeseries, rawRows, expectedTimes, tsCursor, includeEntryDate); + return timeseries; + } + + private RequestedTimeSeriesMetadata fetchRequestedTimeSeriesMetadata( + TimeSeriesRequestParameters requestParameters) { + String names = requestParameters.getNames(); + String office = requestParameters.getOffice(); + String units = requestParameters.getUnits(); + + final Field officeId = CWMS_UTIL_PACKAGE.call_GET_DB_OFFICE_ID( + office != null ? DSL.val(office) : CWMS_UTIL_PACKAGE.call_USER_OFFICE_ID()); + final Field tsId = CWMS_TS_PACKAGE.call_GET_TS_ID__2(DSL.val(names), officeId); + final Field tsCode = CWMS_TS_PACKAGE.call_GET_TS_CODE__2(DSL.val(names), officeId); + + Table> validTs = + select(tsCode.as("tscode"), + tsId.as("tsid"), + officeId.as("office_id")) + .asTable("validts"); + + Field loc = CWMS_UTIL_PACKAGE.call_SPLIT_TEXT( + validTs.field("tsid", String.class), + DSL.val(BigInteger.valueOf(1L)), DSL.val("."), + DSL.val(BigInteger.valueOf(6L))); + Field param = DSL.upper(CWMS_UTIL_PACKAGE.call_SPLIT_TEXT( + validTs.field("tsid", String.class), + DSL.val(BigInteger.valueOf(2L)), DSL.val("."), + DSL.val(BigInteger.valueOf(6L)))); + Field intervalPart = CWMS_UTIL_PACKAGE.call_SPLIT_TEXT( + validTs.field("tsid", String.class), + DSL.val(BigInteger.valueOf(4L)), DSL.val("."), + DSL.val(BigInteger.valueOf(6L))); + + Field unit = units.compareToIgnoreCase("SI") == 0 + || units.compareToIgnoreCase("EN") == 0 + ? CWMS_UTIL_PACKAGE.call_GET_DEFAULT_UNITS( + CWMS_TS_PACKAGE.call_GET_BASE_PARAMETER_ID(tsCode), + DSL.val(units, String.class)) + : DSL.val(units, String.class); + + Field interval = CWMS_TS_PACKAGE.call_GET_TS_INTERVAL__2(validTs.field("tsid", String.class)); + + CommonTableExpression valid = + name("valid").fields("tscode", "tsid", "office_id", "loc_part", "units", + "interval", "parm_part", "interval_part") + .as( + select( + validTs.field("tscode", BigDecimal.class).as("tscode"), + validTs.field("tsid", String.class).as("tsid"), + validTs.field("office_id", String.class).as("office_id"), + loc.as("loc_part"), + unit.as("units"), + interval.as("interval"), + param.as("parm_part"), + intervalPart.as("interval_part")) + .from(validTs)); + + SelectJoinStep metadataQuery = + dsl.with(valid) + .select( + valid.field("tscode", BigDecimal.class).as("tscode"), + valid.field("tsid", String.class).as("tsid"), + valid.field("office_id", String.class).as("office_id"), + valid.field("units", String.class).as("units"), + valid.field("interval", BigDecimal.class).as("interval"), + valid.field("loc_part", String.class).as("loc_part"), + valid.field("parm_part", String.class).as("parm_part"), + valid.field("interval_part", String.class).as("interval_part"), + AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET, + AV_CWMS_TS_ID2.TIME_ZONE_ID) + .from(valid) + .leftOuterJoin(AV_CWMS_TS_ID2) + .on(AV_CWMS_TS_ID2.DB_OFFICE_ID.eq(valid.field("office_id", String.class)) + .and(AV_CWMS_TS_ID2.TS_CODE.eq(valid.field("tscode", BigDecimal.class))) + .and(AV_CWMS_TS_ID2.ALIASED_ITEM.isNull())); + + logger.atFine().log("%s", lazy(() -> metadataQuery.getSQL(ParamType.INLINED))); + + return metadataQuery.fetchOne(tsMetadata -> { + BigDecimal intervalValue = tsMetadata.getValue("interval", BigDecimal.class); + Number offsetValue = tsMetadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET); + BigDecimal tsCodeValue = tsMetadata.getValue("tscode", BigDecimal.class); + long tsCodeLong = tsCodeValue.longValue(); + boolean isLrts = parseBool(CWMS_TS_PACKAGE.call_IS_LRTS__2(dsl.configuration(), tsCodeLong)); + return new RequestedTimeSeriesMetadata( + tsCodeLong, + tsMetadata.getValue("tsid", String.class), + tsMetadata.getValue("office_id", String.class), + tsMetadata.getValue("units", String.class), + intervalValue == null ? 0L : intervalValue.longValue(), + offsetValue == null ? UTC_OFFSET_IRREGULAR : offsetValue.longValue(), + tsMetadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID) == null + ? UTC + : tsMetadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID), + tsMetadata.getValue("loc_part", String.class), + tsMetadata.getValue("parm_part", String.class), + tsMetadata.getValue("interval_part", String.class), + isLrts + ); + }); + } + + private List fetchRequestedTimeSeriesRows(RequestedTimeSeriesMetadata metadata, + TimeSeriesRequestParameters requestParameters) { + ZonedDateTime beginTime = requestParameters.getBeginTime(); + ZonedDateTime endTime = requestParameters.getEndTime(); + ZonedDateTime versionDate = requestParameters.getVersionDate(); + Timestamp beginTimestamp = Timestamp.from(beginTime.toInstant()); + Timestamp endTimestamp = Timestamp.from(endTime.toInstant()); + + AV_TSV_DQU view = AV_TSV_DQU.AV_TSV_DQU; + Field qualityForNormalization = DSL.nvl( + view.QUALITY_CODE.cast(BigDecimal.class), + DSL.val(BigDecimal.valueOf(5)) + ); + Field normalizedQuality = CWMS_TS_PACKAGE.call_NORMALIZE_QUALITY( + qualityForNormalization).as("quality_norm"); + + Condition baseCondition = view.ALIASED_ITEM.isNull() + .and(view.TS_CODE.eq(metadata.getTsCode())) + .and(view.OFFICE_ID.eq(metadata.getOfficeId())) + .and(view.UNIT_ID.eq(metadata.getUnits())) + .and(view.DATE_TIME.ge(beginTimestamp)) + .and(view.DATE_TIME.le(endTimestamp)) + .and(view.START_DATE.le(endTimestamp)) + .and(view.END_DATE.gt(beginTimestamp)); + + SelectConditionStep> query; + if (versionDate != null) { + query = dsl.select( + view.DATE_TIME, + view.VALUE, + normalizedQuality, + view.DATA_ENTRY_DATE) + .from(view) + .where(baseCondition.and(view.VERSION_DATE.eq(Timestamp.from(versionDate.toInstant())))); + } else { + Table rankedRows = dsl.select( + view.DATE_TIME.as(DATE_TIME), + view.VALUE.as(VALUE), + normalizedQuality, + view.DATA_ENTRY_DATE.as(DATA_ENTRY_DATE), + DSL.rowNumber() + .over(partitionBy(view.DATE_TIME) + .orderBy(view.VERSION_DATE.desc(), view.DATA_ENTRY_DATE.desc())) + .as("version_rank")) + .from(view) + .where(baseCondition) + .asTable("ranked_rows"); + + Field dateTimeCol = rankedRows.field(DATE_TIME, Timestamp.class); + Field valueCol = rankedRows.field(VALUE, Double.class); + Field qualityCol = rankedRows.field("quality_norm", BigDecimal.class); + Field dataEntryDateCol = rankedRows.field(DATA_ENTRY_DATE, Timestamp.class); + Field versionRankCol = rankedRows.field("version_rank", Integer.class); + + query = dsl.select(dateTimeCol, valueCol, qualityCol, dataEntryDateCol) + .from(rankedRows) + .where(versionRankCol.eq(1)); + } + + query.orderBy(field(DATE_TIME, Timestamp.class).asc()); + logger.atFine().log("%s", lazy(() -> query.getSQL(ParamType.INLINED))); + + return query.fetch(record -> new RetrievedTimeSeriesValue( + record.getValue(0, Timestamp.class), + record.getValue(1, Double.class), + record.getValue(2, BigDecimal.class).intValue(), + record.getValue(3, Timestamp.class) + )); + } + + private List fetchExpectedRegularTimes(RequestedTimeSeriesMetadata metadata, + TimeSeriesRequestParameters requestParameters, + List rawRows) { + if (!isRegularSeries(metadata)) { + return Collections.emptyList(); + } + if (rawRows.isEmpty() && requestParameters.isShouldTrim()) { + return Collections.emptyList(); + } + + Timestamp rangeStart = requestParameters.isShouldTrim() + ? rawRows.get(0).getDateTime() + : Timestamp.from(requestParameters.getBeginTime().toInstant()); + Timestamp rangeEnd = requestParameters.isShouldTrim() + ? rawRows.get(rawRows.size() - 1).getDateTime() + : Timestamp.from(requestParameters.getEndTime().toInstant()); + + long offsetMinutes = resolveIntervalOffset(metadata, rawRows); + String intervalTimeZone = metadata.isLrts() ? metadata.getTimeZoneId() : UTC; + DATE_RANGE_T dateRange = new DATE_RANGE_T(rangeStart, rangeEnd, UTC, "T", "T", null); + DATE_TABLE_TYPE expectedTimeTable = CWMS_TS_PACKAGE.call_GET_REG_TS_TIMES_UTC_F( + dsl.configuration(), + dateRange, + metadata.getIntervalPart(), + String.valueOf(offsetMinutes), + intervalTimeZone + ); + + List retVal = new ArrayList<>(); + if (expectedTimeTable != null) { + expectedTimeTable.forEach(timestamp -> { + if (timestamp != null) { + retVal.add(timestamp); + } + }); + } + return retVal; + } + + private long resolveIntervalOffset(RequestedTimeSeriesMetadata metadata, + List rawRows) { + long intervalOffset = metadata.getIntervalOffset(); + if (intervalOffset != UTC_OFFSET_UNDEFINED) { + return intervalOffset; + } + if (rawRows.isEmpty()) { + return 0L; + } + + String intervalTimeZone = metadata.isLrts() ? metadata.getTimeZoneId() : UTC; + Timestamp topOfInterval = CWMS_TS_PACKAGE.call_TOP_OF_INTERVAL_UTC( + dsl.configuration(), + rawRows.get(0).getDateTime(), + metadata.getIntervalPart(), + intervalTimeZone, + "F" + ); + return (rawRows.get(0).getDateTime().getTime() - topOfInterval.getTime()) / TimeUnit.MINUTES.toMillis(1); + } + + private boolean isRegularSeries(RequestedTimeSeriesMetadata metadata) { + return metadata.getIntervalMinutes() != 0L || metadata.getIntervalOffset() != UTC_OFFSET_IRREGULAR; + } + + private int countMergedRows(List rawRows, List expectedTimes) { + if (expectedTimes.isEmpty()) { + return rawRows.size(); + } + + int total = 0; + int rawIndex = 0; + int expectedIndex = 0; + while (rawIndex < rawRows.size() || expectedIndex < expectedTimes.size()) { + Timestamp rawTime = rawIndex < rawRows.size() ? rawRows.get(rawIndex).getDateTime() : null; + Timestamp expectedTime = expectedIndex < expectedTimes.size() ? expectedTimes.get(expectedIndex) : null; + + if (rawTime == null) { + expectedIndex++; + } else if (expectedTime == null) { + rawIndex++; + } else { + int compare = expectedTime.compareTo(rawTime); + if (compare < 0) { + expectedIndex++; + } else if (compare > 0) { + rawIndex++; + } else { + expectedIndex++; + rawIndex++; + } + } + total++; + } + return total; + } + + private void populateTimeSeriesValues(TimeSeries timeseries, + List rawRows, + List expectedTimes, + Timestamp tsCursor, + boolean includeEntryDate) { + int rawIndex = 0; + int expectedIndex = 0; + int collected = 0; + int maxRecords = timeseries.getPageSize() > 0 ? timeseries.getPageSize() + 1 : Integer.MAX_VALUE; + + while ((rawIndex < rawRows.size() || expectedIndex < expectedTimes.size()) && collected < maxRecords) { + RetrievedTimeSeriesValue rawRow = rawIndex < rawRows.size() ? rawRows.get(rawIndex) : null; + Timestamp expectedTime = expectedIndex < expectedTimes.size() ? expectedTimes.get(expectedIndex) : null; + + Timestamp candidateTime; + RetrievedTimeSeriesValue candidateRow = null; + boolean syntheticRow = false; + + if (rawRow == null) { + candidateTime = expectedTime; + syntheticRow = true; + expectedIndex++; + } else if (expectedTime == null) { + candidateTime = rawRow.getDateTime(); + candidateRow = rawRow; + rawIndex++; + } else { + int compare = expectedTime.compareTo(rawRow.getDateTime()); + if (compare < 0) { + candidateTime = expectedTime; + syntheticRow = true; + expectedIndex++; + } else if (compare > 0) { + candidateTime = rawRow.getDateTime(); + candidateRow = rawRow; + rawIndex++; + } else { + candidateTime = rawRow.getDateTime(); + candidateRow = rawRow; + rawIndex++; + expectedIndex++; + } + } + + if (tsCursor != null && candidateTime.before(tsCursor)) { + continue; + } + + if (syntheticRow) { + if (includeEntryDate) { + timeseries.addValue(candidateTime, null, 5, null); + } else { + timeseries.addValue(candidateTime, null, 5); + } + } else if (includeEntryDate) { + timeseries.addValue(candidateRow.getDateTime(), candidateRow.getValue(), + candidateRow.getQualityCode(), candidateRow.getDataEntryDate()); + } else { + timeseries.addValue(candidateRow.getDateTime(), candidateRow.getValue(), + candidateRow.getQualityCode()); + } + collected++; + } + } + + private static final class RequestedTimeSeriesMetadata { + private final long tsCode; + private final String tsId; + private final String officeId; + private final String units; + private final long intervalMinutes; + private final long intervalOffset; + private final String timeZoneId; + private final String locPart; + private final String parmPart; + private final String intervalPart; + private final boolean isLrts; + + private RequestedTimeSeriesMetadata(long tsCode, String tsId, String officeId, String units, + long intervalMinutes, long intervalOffset, String timeZoneId, + String locPart, String parmPart, String intervalPart, + boolean isLrts) { + this.tsCode = tsCode; + this.tsId = tsId; + this.officeId = officeId; + this.units = units; + this.intervalMinutes = intervalMinutes; + this.intervalOffset = intervalOffset; + this.timeZoneId = timeZoneId; + this.locPart = locPart; + this.parmPart = parmPart; + this.intervalPart = intervalPart; + this.isLrts = isLrts; + } + + private long getTsCode() { + return tsCode; + } + + private String getTsId() { + return tsId; + } + + private String getOfficeId() { + return officeId; + } + + private String getUnits() { + return units; + } + + private long getIntervalMinutes() { + return intervalMinutes; + } + + private long getIntervalOffset() { + return intervalOffset; + } + + private String getTimeZoneId() { + return timeZoneId; + } + + private String getLocPart() { + return locPart; + } + + private String getParmPart() { + return parmPart; + } + + private String getIntervalPart() { + return intervalPart; + } + + private boolean isLrts() { + return isLrts; + } + } + + private static final class RetrievedTimeSeriesValue { + private final Timestamp dateTime; + private final Double value; + private final int qualityCode; + private final Timestamp dataEntryDate; + + private RetrievedTimeSeriesValue(Timestamp dateTime, Double value, int qualityCode, Timestamp dataEntryDate) { + this.dateTime = dateTime; + this.value = value; + this.qualityCode = qualityCode; + this.dataEntryDate = dataEntryDate; + } + + private Timestamp getDateTime() { + return dateTime; + } + + private Double getValue() { + return value; + } + + private int getQualityCode() { + return qualityCode; + } + + private Timestamp getDataEntryDate() { + return dataEntryDate; + } + } private boolean shouldFetchVerticalDatum(String parmPart) { // Check if parameter requires vertical datum (e.g., "ELEV") diff --git a/load_data/performance/.gitignore b/load_data/performance/.gitignore new file mode 100644 index 0000000000..ddbb6df966 --- /dev/null +++ b/load_data/performance/.gitignore @@ -0,0 +1,2 @@ +results/ +responses/ diff --git a/load_data/performance/invoke-timeseries-read-benchmark.ps1 b/load_data/performance/invoke-timeseries-read-benchmark.ps1 new file mode 100644 index 0000000000..ba737d3183 --- /dev/null +++ b/load_data/performance/invoke-timeseries-read-benchmark.ps1 @@ -0,0 +1,454 @@ +[CmdletBinding()] +param( + [string]$Office = "SPK", + [string]$LocationId = "PERF1MREAD", + [string]$SeriesId = "PERF1MREAD.Stage.Inst.1Minute.0.BENCH", + [string]$Units = "ft", + [string]$CdaBaseUrl = "http://localhost:8081/cwms-data", + [string]$DbContainer = "cwms-data-api-db-1", + [string]$DbUser = "CWMS_20", + [string]$DbPassword = "simplecwmspasswD1", + [string]$DbService = "localhost:1521/FREEPDB1", + [string]$StartTime = "2024-01-01T00:00:00Z", + [int]$PointCount = 1000000, + [int]$PageSize = 1000000, + [int]$Runs = 1, + [switch]$Warmup, + [switch]$SkipSeed, + [switch]$ForceReseed, + [switch]$KeepResponses +) + +Set-StrictMode -Version Latest +$ErrorActionPreference = "Stop" + +$SqlPlusPath = "/opt/oracle/product/23ai/dbhomeFree/bin/sqlplus" +$ResultsDir = Join-Path $PSScriptRoot "results" +$ResponsesDir = Join-Path $PSScriptRoot "responses" +$NonVersionedDateSql = "date '1111-11-11'" + +function Convert-ToSqlStringLiteral { + param([string]$Value) + return "'" + $Value.Replace("'", "''") + "'" +} + +function Convert-ToOracleDateExpression { + param([datetimeoffset]$Value) + $utc = $Value.ToUniversalTime().ToString("yyyy-MM-dd HH:mm:ss") + return "to_date('$utc', 'yyyy-mm-dd hh24:mi:ss')" +} + +function Invoke-OracleSql { + param( + [string]$Sql, + [string]$Label = "oracle" + ) + + $sqlFile = Join-Path $env:TEMP ("cwms-benchmark-{0}-{1}.sql" -f $Label, [guid]::NewGuid().ToString("N")) + try { + Set-Content -LiteralPath $sqlFile -Value $Sql -Encoding ASCII + + $containerSqlFile = "/tmp/" + [System.IO.Path]::GetFileName($sqlFile) + $null = & docker cp $sqlFile "${DbContainer}:${containerSqlFile}" + if ($LASTEXITCODE -ne 0) { + throw "Failed to copy SQL to container $DbContainer" + } + + $command = "$SqlPlusPath -s -L $DbUser/$DbPassword@$DbService @$containerSqlFile" + $output = & docker exec $DbContainer bash -lc $command 2>&1 + if ($LASTEXITCODE -ne 0) { + throw ("Oracle SQL failed for {0}:`n{1}" -f $Label, ($output -join [Environment]::NewLine)) + } + + return ($output -join [Environment]::NewLine) + } + finally { + if (Test-Path -LiteralPath $sqlFile) { + Remove-Item -LiteralPath $sqlFile -Force + } + } +} + +function Get-YearSegments { + param( + [datetimeoffset]$StartUtc, + [int]$Count + ) + + $segments = @() + $remaining = $Count + $offset = 0 + $cursor = $StartUtc.ToUniversalTime() + + while ($remaining -gt 0) { + $yearStart = [datetimeoffset]::ParseExact( + "{0}-01-01T00:00:00+00:00" -f $cursor.Year, + "yyyy-MM-ddTHH:mm:sszzz", + [System.Globalization.CultureInfo]::InvariantCulture + ) + $nextYear = $yearStart.AddYears(1) + $minutesUntilNextYear = [int][Math]::Floor(($nextYear - $cursor).TotalMinutes) + if ($minutesUntilNextYear -le 0) { + throw "Computed non-positive year segment size for $($cursor.Year)" + } + + $segmentCount = [Math]::Min($remaining, $minutesUntilNextYear) + $segments += [pscustomobject]@{ + Year = $cursor.Year + Start = $cursor + Count = $segmentCount + ValueStart = $offset + 1 + } + + $cursor = $cursor.AddMinutes($segmentCount) + $remaining -= $segmentCount + $offset += $segmentCount + } + + return $segments +} + +function Get-SeededPointCount { + $seriesLiteral = Convert-ToSqlStringLiteral $SeriesId + $officeLiteral = Convert-ToSqlStringLiteral $Office + $sql = @" +set heading off feedback off verify off pagesize 0 trimspool on +select count(*) + from av_tsv v + join at_cwms_ts_id t + on t.ts_code = v.ts_code + where t.db_office_id = $officeLiteral + and t.cwms_ts_id = $seriesLiteral; +exit; +"@ + + $raw = Invoke-OracleSql -Sql $sql -Label "count" + $countText = (($raw -split "\r?\n") | ForEach-Object { $_.Trim() } | Where-Object { $_ } | Select-Object -Last 1) + return [int]$countText +} + +function Ensure-BenchmarkSeed { + param( + [datetimeoffset]$StartUtc, + [int]$Count + ) + + if ($SkipSeed) { + return [pscustomobject]@{ + Seeded = $false + ExistingPointCount = Get-SeededPointCount + } + } + + $existingCount = Get-SeededPointCount + if (-not $ForceReseed -and $existingCount -eq $Count) { + return [pscustomobject]@{ + Seeded = $false + ExistingPointCount = $existingCount + } + } + + $seriesLiteral = Convert-ToSqlStringLiteral $SeriesId + $locationLiteral = Convert-ToSqlStringLiteral $LocationId + $officeLiteral = Convert-ToSqlStringLiteral $Office + $locationTypeLiteral = Convert-ToSqlStringLiteral "SITE" + $publicNameLiteral = Convert-ToSqlStringLiteral $LocationId + $longNameLiteral = Convert-ToSqlStringLiteral "$LocationId Benchmark Location" + $descriptionLiteral = Convert-ToSqlStringLiteral "Performance benchmark location" + $timeZoneLiteral = Convert-ToSqlStringLiteral "UTC" + $horizontalDatumLiteral = Convert-ToSqlStringLiteral "NAD83" + $segments = Get-YearSegments -StartUtc $StartUtc -Count $Count + + $insertStatements = foreach ($segment in $segments) { + $dateExpr = Convert-ToOracleDateExpression $segment.Start + @" + execute immediate q'[ + insert /*+ APPEND */ into at_tsv_$($segment.Year) + (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag) + select :1, + $dateExpr + numtodsinterval(level - 1, 'MINUTE'), + $NonVersionedDateSql, + systimestamp, + $($segment.ValueStart) + level - 1, + 0, + 0 + from dual + connect by level <= $($segment.Count) + ]' using l_ts_code; +"@ + } + + $seedSql = @" +set serveroutput on feedback on +whenever sqlerror exit failure rollback +declare + location_exists exception; + pragma exception_init(location_exists, -20026); + ts_exists exception; + pragma exception_init(ts_exists, -20003); + l_ts_code number; +begin + begin + cwms_loc.create_location( + p_location_id => $locationLiteral, + p_location_type => $locationTypeLiteral, + p_elevation => null, + p_elev_unit_id => null, + p_vertical_datum => null, + p_latitude => 38.0, + p_longitude => -90.0, + p_horizontal_datum => $horizontalDatumLiteral, + p_public_name => $publicNameLiteral, + p_long_name => $longNameLiteral, + p_description => $descriptionLiteral, + p_time_zone_id => $timeZoneLiteral, + p_county_name => null, + p_state_initial => null, + p_active => 'T', + p_db_office_id => $officeLiteral + ); + exception + when location_exists then null; + end; + + begin + cwms_ts.create_ts($officeLiteral, $seriesLiteral, 0); + exception + when ts_exists then null; + end; + + select ts_code + into l_ts_code + from at_cwms_ts_id + where db_office_id = $officeLiteral + and cwms_ts_id = $seriesLiteral; + + for rec in (select table_name from at_ts_table_properties) loop + execute immediate 'delete from ' || rec.table_name || ' where ts_code = :1' using l_ts_code; + end loop; + + delete from at_ts_extents where ts_code = l_ts_code; + +$($insertStatements -join [Environment]::NewLine) + + cwms_ts.update_ts_extents(l_ts_code, $NonVersionedDateSql); + commit; +end; +/ +set heading off feedback off verify off pagesize 0 trimspool on +select count(*) + from av_tsv v + join at_cwms_ts_id t + on t.ts_code = v.ts_code + where t.db_office_id = $officeLiteral + and t.cwms_ts_id = $seriesLiteral; +exit; +"@ + + $raw = Invoke-OracleSql -Sql $seedSql -Label "seed" + $countText = (($raw -split "\r?\n") | ForEach-Object { $_.Trim() } | Where-Object { $_ } | Select-Object -Last 1) + return [pscustomobject]@{ + Seeded = $true + ExistingPointCount = [int]$countText + } +} + +function Invoke-CdaRequest { + param( + [string]$Url, + [string]$ResponseFile + ) + + $format = '{"http_code":%{http_code},"time_total":%{time_total},"time_starttransfer":%{time_starttransfer},"time_connect":%{time_connect},"size_download":%{size_download},"speed_download":%{speed_download}}' + $json = & curl.exe -sS -H "Accept: application/json;version=2" -o $ResponseFile -w $format $Url 2>&1 + if ($LASTEXITCODE -ne 0) { + throw ("curl failed: {0}" -f ($json -join [Environment]::NewLine)) + } + + return ($json | ConvertFrom-Json) +} + +function Wait-ForCdaReady { + param( + [string]$Url, + [int]$MaxAttempts = 30, + [int]$DelaySeconds = 1 + ) + + $probeFile = Join-Path $ResponsesDir "readiness-probe.json" + try { + for ($attempt = 1; $attempt -le $MaxAttempts; $attempt++) { + if (Test-Path -LiteralPath $probeFile) { + Remove-Item -LiteralPath $probeFile -Force + } + + $response = Invoke-CdaRequest -Url $Url -ResponseFile $probeFile + if ($response.http_code -eq 200) { + return + } + + Start-Sleep -Seconds $DelaySeconds + } + } + finally { + if (Test-Path -LiteralPath $probeFile) { + Remove-Item -LiteralPath $probeFile -Force + } + } + + throw "CDA did not become ready after $MaxAttempts attempts: $Url" +} + +function Get-ResponseSummary { + param([string]$ResponseFile) + + $content = Get-Content -LiteralPath $ResponseFile -Raw + $total = $null + $pageSize = $null + $firstTimestamp = $null + $lastTimestamp = $null + + if ($content -match '"total":(?\d+)') { + $total = [int]$Matches["total"] + } + if ($content -match '"page-size":(?\d+)') { + $pageSize = [int]$Matches["pageSize"] + } + if ($content -match '\[\[(?\d+),') { + $firstTimestamp = [long]$Matches["first"] + } + $allMatches = [regex]::Matches($content, '\[(?\d+),') + if ($allMatches.Count -gt 0) { + $lastTimestamp = [long]$allMatches[$allMatches.Count - 1].Groups["ts"].Value + } + + return [pscustomobject]@{ + Total = $total + PageSize = $pageSize + FirstTimestamp = $firstTimestamp + LastTimestamp = $lastTimestamp + ResponseBytes = (Get-Item -LiteralPath $ResponseFile).Length + } +} + +$startUtc = [datetimeoffset]::Parse($StartTime, [System.Globalization.CultureInfo]::InvariantCulture).ToUniversalTime() +$endUtc = $startUtc.AddMinutes($PointCount - 1) +$escapedSeriesId = [uri]::EscapeDataString($SeriesId) +$escapedOffice = [uri]::EscapeDataString($Office) +$escapedUnits = [uri]::EscapeDataString($Units) +$escapedBegin = [uri]::EscapeDataString($startUtc.ToString("yyyy-MM-ddTHH:mm:ssZ")) +$escapedEnd = [uri]::EscapeDataString($endUtc.ToString("yyyy-MM-ddTHH:mm:ssZ")) +$requestUrl = "{0}/timeseries?office={1}&name={2}&units={3}&begin={4}&end={5}&page-size={6}" -f ` + $CdaBaseUrl.TrimEnd("/"), ` + $escapedOffice, ` + $escapedSeriesId, ` + $escapedUnits, ` + $escapedBegin, ` + $escapedEnd, ` + $PageSize + +New-Item -ItemType Directory -Path $ResultsDir -Force | Out-Null +New-Item -ItemType Directory -Path $ResponsesDir -Force | Out-Null + +$seedInfo = Ensure-BenchmarkSeed -StartUtc $startUtc -Count $PointCount +if ($seedInfo.ExistingPointCount -ne $PointCount) { + throw "Expected $PointCount seeded points but found $($seedInfo.ExistingPointCount)" +} + +Wait-ForCdaReady -Url ("{0}/offices/{1}" -f $CdaBaseUrl.TrimEnd("/"), $escapedOffice) + +if ($Warmup) { + $warmupFile = Join-Path $ResponsesDir "warmup.json" + $null = Invoke-CdaRequest -Url $requestUrl -ResponseFile $warmupFile + if (-not $KeepResponses -and (Test-Path -LiteralPath $warmupFile)) { + Remove-Item -LiteralPath $warmupFile -Force + } +} + +$results = @() +$failedRuns = @() +for ($run = 1; $run -le $Runs; $run++) { + $responseFile = Join-Path $ResponsesDir ("timeseries-read-run-{0}.json" -f $run) + $curlMetrics = Invoke-CdaRequest -Url $requestUrl -ResponseFile $responseFile + $responseSummary = Get-ResponseSummary -ResponseFile $responseFile + $errorBody = $null + if ($curlMetrics.http_code -ne 200) { + $errorBody = [string](Get-Content -LiteralPath $responseFile -Raw) + } + + $result = [pscustomobject]@{ + run = $run + http_code = [int]$curlMetrics.http_code + time_total_seconds = [double]$curlMetrics.time_total + time_starttransfer_seconds = [double]$curlMetrics.time_starttransfer + time_connect_seconds = [double]$curlMetrics.time_connect + size_download_bytes = [double]$curlMetrics.size_download + speed_download_bytes_per_second = [double]$curlMetrics.speed_download + response_bytes_on_disk = [long]$responseSummary.ResponseBytes + reported_total = $responseSummary.Total + reported_page_size = $responseSummary.PageSize + first_timestamp = $responseSummary.FirstTimestamp + last_timestamp = $responseSummary.LastTimestamp + error_body = $errorBody + response_file = $responseFile + } + $results += $result + if ($curlMetrics.http_code -ne 200) { + $failedRuns += $result + } + + if (-not $KeepResponses -and (Test-Path -LiteralPath $responseFile)) { + Remove-Item -LiteralPath $responseFile -Force + $result.response_file = $null + } +} + +$gitBranch = (& git branch --show-current 2>$null) +$gitBranchExitCode = $LASTEXITCODE +$gitCommit = (& git rev-parse HEAD 2>$null) +$gitCommitExitCode = $LASTEXITCODE +$timestamp = Get-Date -Format "yyyyMMdd-HHmmss" +$resultFile = Join-Path $ResultsDir ("timeseries-read-benchmark-{0}.json" -f $timestamp) +$successfulRuns = @($results | Where-Object { $_.http_code -eq 200 }) +$summary = $null +if ($successfulRuns.Count -gt 0) { + $avg = ($successfulRuns | Measure-Object -Property time_total_seconds -Average).Average + $min = ($successfulRuns | Measure-Object -Property time_total_seconds -Minimum).Minimum + $max = ($successfulRuns | Measure-Object -Property time_total_seconds -Maximum).Maximum + $summary = [pscustomobject]@{ + successful_runs = $successfulRuns.Count + average_time_total_seconds = [math]::Round([double]$avg, 6) + min_time_total_seconds = [math]::Round([double]$min, 6) + max_time_total_seconds = [math]::Round([double]$max, 6) + } +} + +$payload = [pscustomobject]@{ + benchmark = "timeseries-read" + generated_at = (Get-Date).ToUniversalTime().ToString("o") + git_branch = if ($gitBranchExitCode -eq 0) { $gitBranch.Trim() } else { $null } + git_commit = if ($gitCommitExitCode -eq 0) { $gitCommit.Trim() } else { $null } + office = $Office + location_id = $LocationId + series_id = $SeriesId + units = $Units + start_time_utc = $startUtc.ToString("o") + end_time_utc = $endUtc.ToString("o") + point_count = $PointCount + page_size = $PageSize + request_url = $requestUrl + seed = [pscustomobject]@{ + seeded = [bool]$seedInfo.Seeded + point_count = [int]$seedInfo.ExistingPointCount + } + summary = $summary + runs = $results +} + +$payload | ConvertTo-Json -Depth 6 | Set-Content -LiteralPath $resultFile -Encoding ASCII +$payload | ConvertTo-Json -Depth 6 + +if ($failedRuns.Count -gt 0) { + $statusList = ($failedRuns | ForEach-Object { $_.http_code }) -join ", " + throw "Benchmark completed with HTTP failures ($statusList). Results saved to $resultFile" +} From 74d5f299fab72ed993f452ccad27329efa42cb46 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Mon, 13 Apr 2026 21:28:01 -0500 Subject: [PATCH 02/16] Add timeseries parity harness --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 4 +- .../invoke-timeseries-parity-check.ps1 | 751 ++++++++++++++++++ 2 files changed, 753 insertions(+), 2 deletions(-) create mode 100644 load_data/performance/invoke-timeseries-parity-check.ps1 diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index 9ec37f8e43..3fa9155f69 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -1040,9 +1040,9 @@ private void populateTimeSeriesValues(TimeSeries timeseries, if (syntheticRow) { if (includeEntryDate) { - timeseries.addValue(candidateTime, null, 5, null); + timeseries.addValue(candidateTime, null, 0, null); } else { - timeseries.addValue(candidateTime, null, 5); + timeseries.addValue(candidateTime, null, 0); } } else if (includeEntryDate) { timeseries.addValue(candidateRow.getDateTime(), candidateRow.getValue(), diff --git a/load_data/performance/invoke-timeseries-parity-check.ps1 b/load_data/performance/invoke-timeseries-parity-check.ps1 new file mode 100644 index 0000000000..6191e56c41 --- /dev/null +++ b/load_data/performance/invoke-timeseries-parity-check.ps1 @@ -0,0 +1,751 @@ +[CmdletBinding()] +param( + [string]$Office = "SPK", + [string]$CdaBaseUrl = "http://localhost:8081/cwms-data", + [string]$DbContainer = "cwms-data-api-db-1", + [string]$DbUser = "CWMS_20", + [string]$DbPassword = "simplecwmspasswD1", + [string]$DbService = "localhost:1521/FREEPDB1", + [string[]]$Scenarios = @( + "dense-regular", + "dense-regular-entry-date", + "gap-regular", + "versioned-max", + "versioned-single", + "irregular" + ), + [switch]$KeepResponses +) + +Set-StrictMode -Version Latest +$ErrorActionPreference = "Stop" + +$SqlPlusPath = "/opt/oracle/product/23ai/dbhomeFree/bin/sqlplus" +$ResultsDir = Join-Path $PSScriptRoot "results" +$ResponsesDir = Join-Path $PSScriptRoot "responses" +$NonVersionedDateSql = "date '1111-11-11'" +$FloatTolerance = 1e-9 + +function Convert-ToSqlStringLiteral { + param([string]$Value) + return "'" + $Value.Replace("'", "''") + "'" +} + +function Convert-ToOracleDateExpression { + param([datetimeoffset]$Value) + $utc = $Value.ToUniversalTime().ToString("yyyy-MM-dd HH:mm:ss") + return "to_date('$utc', 'yyyy-mm-dd hh24:mi:ss')" +} + +function Convert-ToOracleTimestampExpression { + param([datetimeoffset]$Value) + $utc = $Value.ToUniversalTime().ToString("yyyy-MM-dd HH:mm:ss") + return "to_timestamp('$utc', 'yyyy-mm-dd hh24:mi:ss')" +} + +function Invoke-OracleSql { + param( + [string]$Sql, + [string]$Label = "oracle" + ) + + $sqlFile = Join-Path $env:TEMP ("cwms-parity-{0}-{1}.sql" -f $Label, [guid]::NewGuid().ToString("N")) + try { + Set-Content -LiteralPath $sqlFile -Value $Sql -Encoding ASCII + + $containerSqlFile = "/tmp/" + [System.IO.Path]::GetFileName($sqlFile) + $null = & docker cp $sqlFile "${DbContainer}:${containerSqlFile}" + if ($LASTEXITCODE -ne 0) { + throw "Failed to copy SQL to container $DbContainer" + } + + $command = "$SqlPlusPath -s -L $DbUser/$DbPassword@$DbService @$containerSqlFile" + $output = & docker exec $DbContainer bash -lc $command 2>&1 + if ($LASTEXITCODE -ne 0) { + throw ("Oracle SQL failed for {0}:`n{1}" -f $Label, ($output -join [Environment]::NewLine)) + } + + return ($output -join [Environment]::NewLine) + } + finally { + if (Test-Path -LiteralPath $sqlFile) { + Remove-Item -LiteralPath $sqlFile -Force + } + } +} + +function Invoke-CdaRequest { + param( + [string]$Url, + [string]$ResponseFile + ) + + $format = '{"http_code":%{http_code},"time_total":%{time_total},"time_starttransfer":%{time_starttransfer},"time_connect":%{time_connect},"size_download":%{size_download},"speed_download":%{speed_download}}' + $json = & curl.exe -sS -H "Accept: application/json;version=2" -o $ResponseFile -w $format $Url 2>&1 + if ($LASTEXITCODE -ne 0) { + throw ("curl failed: {0}" -f ($json -join [Environment]::NewLine)) + } + + return ($json | ConvertFrom-Json) +} + +function Wait-ForCdaReady { + param( + [string]$Url, + [int]$MaxAttempts = 30, + [int]$DelaySeconds = 1 + ) + + $probeFile = Join-Path $ResponsesDir "parity-readiness-probe.json" + try { + for ($attempt = 1; $attempt -le $MaxAttempts; $attempt++) { + if (Test-Path -LiteralPath $probeFile) { + Remove-Item -LiteralPath $probeFile -Force + } + + $response = Invoke-CdaRequest -Url $Url -ResponseFile $probeFile + if ($response.http_code -eq 200) { + return + } + + Start-Sleep -Seconds $DelaySeconds + } + } + finally { + if (Test-Path -LiteralPath $probeFile) { + Remove-Item -LiteralPath $probeFile -Force + } + } + + throw "CDA did not become ready after $MaxAttempts attempts: $Url" +} + +function New-SeedRow { + param( + [datetimeoffset]$DateTime, + [double]$Value, + [int]$QualityCode = 0, + [datetimeoffset]$DataEntryDate, + [Nullable[datetimeoffset]]$VersionDate = $null + ) + + return [pscustomobject]@{ + DateTime = $DateTime.ToUniversalTime() + Value = $Value + QualityCode = $QualityCode + DataEntryDate = $DataEntryDate.ToUniversalTime() + VersionDate = $VersionDate + } +} + +function New-Scenario { + param( + [string]$Name, + [string]$LocationId, + [string]$SeriesId, + [string]$Units, + [datetimeoffset]$BeginTime, + [datetimeoffset]$EndTime, + [object[]]$Rows, + [bool]$Versioned, + [bool]$IncludeEntryDate, + [string]$ExpectedDateVersionType, + [long]$ExpectedIntervalOffset, + [string]$ExpectedInterval, + [Nullable[datetimeoffset]]$VersionDate = $null + ) + + return [pscustomobject]@{ + Name = $Name + LocationId = $LocationId + SeriesId = $SeriesId + Units = $Units + BeginTime = $BeginTime.ToUniversalTime() + EndTime = $EndTime.ToUniversalTime() + Rows = $Rows + Versioned = $Versioned + IncludeEntryDate = $IncludeEntryDate + ExpectedDateVersionType = $ExpectedDateVersionType + ExpectedIntervalOffset = $ExpectedIntervalOffset + ExpectedInterval = $ExpectedInterval + VersionDate = $VersionDate + } +} + +function Get-ScenarioDefinitions { + $denseRows = @( + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-01-02T00:00:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:01:00Z") -Value 2 -DataEntryDate ([datetimeoffset]"2024-01-02T00:01:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:02:00Z") -Value 3 -DataEntryDate ([datetimeoffset]"2024-01-02T00:02:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:03:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-01-02T00:03:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:04:00Z") -Value 5 -DataEntryDate ([datetimeoffset]"2024-01-02T00:04:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Value 6 -DataEntryDate ([datetimeoffset]"2024-01-02T00:05:00Z")) + ) + + $gapRows = @( + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-01-03T00:00:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:01:00Z") -Value 2 -DataEntryDate ([datetimeoffset]"2024-01-03T00:01:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:02:00Z") -Value 3 -DataEntryDate ([datetimeoffset]"2024-01-03T00:02:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Value 6 -DataEntryDate ([datetimeoffset]"2024-01-03T00:05:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:06:00Z") -Value 7 -DataEntryDate ([datetimeoffset]"2024-01-03T00:06:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:07:00Z") -Value 8 -DataEntryDate ([datetimeoffset]"2024-01-03T00:07:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:08:00Z") -Value 9 -DataEntryDate ([datetimeoffset]"2024-01-03T00:08:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:09:00Z") -Value 10 -DataEntryDate ([datetimeoffset]"2024-01-03T00:09:00Z")) + ) + + $versionDateOlder = [datetimeoffset]"2024-06-20T08:00:00Z" + $versionDateNewer = [datetimeoffset]"2024-06-21T08:00:00Z" + $versionedRows = @( + (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T15:00:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-06-20T09:00:00Z") -VersionDate $versionDateOlder), + (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T16:00:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-06-20T09:01:00Z") -VersionDate $versionDateOlder), + (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T17:00:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-06-20T09:02:00Z") -VersionDate $versionDateOlder), + (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T18:00:00Z") -Value 3 -DataEntryDate ([datetimeoffset]"2024-06-20T09:03:00Z") -VersionDate $versionDateOlder), + (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T15:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-06-21T09:00:00Z") -VersionDate $versionDateNewer), + (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T16:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-06-21T09:01:00Z") -VersionDate $versionDateNewer), + (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T17:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-06-21T09:02:00Z") -VersionDate $versionDateNewer) + ) + + $irregularRows = @( + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:00:00Z") -Value 10 -DataEntryDate ([datetimeoffset]"2024-01-06T00:00:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:07:20Z") -Value 20 -DataEntryDate ([datetimeoffset]"2024-01-06T00:01:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:19:45Z") -Value 30 -DataEntryDate ([datetimeoffset]"2024-01-06T00:02:00Z")), + (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:33:10Z") -Value 40 -DataEntryDate ([datetimeoffset]"2024-01-06T00:03:00Z")) + ) + + return @( + (New-Scenario -Name "dense-regular" -LocationId "PARREG" -SeriesId "PARREG.Stage.Inst.1Minute.0.BENCH" -Units "ft" -BeginTime ([datetimeoffset]"2024-01-01T00:00:00Z") -EndTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Rows $denseRows -Versioned $false -IncludeEntryDate $false -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1M"), + (New-Scenario -Name "dense-regular-entry-date" -LocationId "PARREG" -SeriesId "PARREG.Stage.Inst.1Minute.0.BENCH" -Units "ft" -BeginTime ([datetimeoffset]"2024-01-01T00:00:00Z") -EndTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Rows $denseRows -Versioned $false -IncludeEntryDate $true -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1M"), + (New-Scenario -Name "gap-regular" -LocationId "PARGAP" -SeriesId "PARGAP.Stage.Inst.1Minute.0.BENCH" -Units "ft" -BeginTime ([datetimeoffset]"2024-01-01T00:00:00Z") -EndTime ([datetimeoffset]"2024-01-01T00:09:00Z") -Rows $gapRows -Versioned $false -IncludeEntryDate $false -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1M"), + (New-Scenario -Name "versioned-max" -LocationId "PARVER" -SeriesId "PARVER.Flow.Inst.1Hour.0.BENCH" -Units "cfs" -BeginTime ([datetimeoffset]"2024-05-01T15:00:00Z") -EndTime ([datetimeoffset]"2024-05-01T18:00:00Z") -Rows $versionedRows -Versioned $true -IncludeEntryDate $false -ExpectedDateVersionType "MAX_AGGREGATE" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1H"), + (New-Scenario -Name "versioned-single" -LocationId "PARVER" -SeriesId "PARVER.Flow.Inst.1Hour.0.BENCH" -Units "cfs" -BeginTime ([datetimeoffset]"2024-05-01T15:00:00Z") -EndTime ([datetimeoffset]"2024-05-01T18:00:00Z") -Rows $versionedRows -Versioned $true -IncludeEntryDate $false -ExpectedDateVersionType "SINGLE_VERSION" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1H" -VersionDate $versionDateNewer), + (New-Scenario -Name "irregular" -LocationId "PARIRR" -SeriesId "PARIRR.Flow.Inst.0.0.BENCH" -Units "cfs" -BeginTime ([datetimeoffset]"2024-01-05T12:00:00Z") -EndTime ([datetimeoffset]"2024-01-05T12:33:10Z") -Rows $irregularRows -Versioned $false -IncludeEntryDate $false -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset (-2147483648L) -ExpectedInterval "PT0S") + ) +} + +function Convert-SeedValueToSqlLiteral { + param([double]$Value) + return ([System.Globalization.CultureInfo]::InvariantCulture.TextInfo.ToLower($Value.ToString("0.################", [System.Globalization.CultureInfo]::InvariantCulture))) +} + +function Get-SeedSql { + param($Scenario) + + $seriesLiteral = Convert-ToSqlStringLiteral $Scenario.SeriesId + $locationLiteral = Convert-ToSqlStringLiteral $Scenario.LocationId + $officeLiteral = Convert-ToSqlStringLiteral $Office + $locationTypeLiteral = Convert-ToSqlStringLiteral "SITE" + $publicNameLiteral = Convert-ToSqlStringLiteral $Scenario.LocationId + $longNameLiteral = Convert-ToSqlStringLiteral "$($Scenario.LocationId) Parity Location" + $descriptionLiteral = Convert-ToSqlStringLiteral "Parity harness location" + $timeZoneLiteral = Convert-ToSqlStringLiteral "UTC" + $horizontalDatumLiteral = Convert-ToSqlStringLiteral "NAD83" + $versionedFlagLiteral = if ($Scenario.Versioned) { "'T'" } else { "'F'" } + + $groupedRows = $Scenario.Rows | Group-Object { $_.DateTime.Year } + $insertStatements = foreach ($group in $groupedRows) { + $intoStatements = foreach ($row in $group.Group) { + $dateExpr = Convert-ToOracleDateExpression $row.DateTime + $versionExpr = if ($null -ne $row.VersionDate) { + Convert-ToOracleDateExpression $row.VersionDate + } else { + $NonVersionedDateSql + } + $entryExpr = Convert-ToOracleTimestampExpression $row.DataEntryDate + $valueExpr = Convert-SeedValueToSqlLiteral $row.Value + " into at_tsv_$($group.Name) (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag) values (l_ts_code, $dateExpr, $versionExpr, $entryExpr, $valueExpr, $($row.QualityCode), 0)" + } + + @" +insert all +$($intoStatements -join [Environment]::NewLine) +select 1 from dual; +"@ + } + + $distinctVersionDates = @($Scenario.Rows | + ForEach-Object { $_.VersionDate } | + Where-Object { $null -ne $_ } | + Sort-Object | + Get-Unique) + + $extentStatements = if ($distinctVersionDates.Count -gt 0) { + foreach ($versionDate in $distinctVersionDates) { + " cwms_ts.update_ts_extents(l_ts_code, $(Convert-ToOracleDateExpression $versionDate));" + } + } else { + " cwms_ts.update_ts_extents(l_ts_code, $NonVersionedDateSql);" + } + + return @" +set serveroutput on feedback on +whenever sqlerror exit failure rollback +declare + location_exists exception; + pragma exception_init(location_exists, -20026); + ts_exists exception; + pragma exception_init(ts_exists, -20003); + l_ts_code number; +begin + begin + cwms_loc.create_location( + p_location_id => $locationLiteral, + p_location_type => $locationTypeLiteral, + p_elevation => null, + p_elev_unit_id => null, + p_vertical_datum => null, + p_latitude => 38.0, + p_longitude => -90.0, + p_horizontal_datum => $horizontalDatumLiteral, + p_public_name => $publicNameLiteral, + p_long_name => $longNameLiteral, + p_description => $descriptionLiteral, + p_time_zone_id => $timeZoneLiteral, + p_county_name => null, + p_state_initial => null, + p_active => 'T', + p_db_office_id => $officeLiteral + ); + exception + when location_exists then null; + end; + + begin + cwms_ts.create_ts($officeLiteral, $seriesLiteral, 0); + exception + when ts_exists then null; + end; + + cwms_ts.set_tsid_versioned($seriesLiteral, $versionedFlagLiteral, $officeLiteral); + + select ts_code + into l_ts_code + from at_cwms_ts_id + where db_office_id = $officeLiteral + and cwms_ts_id = $seriesLiteral; + + for rec in (select table_name from at_ts_table_properties) loop + execute immediate 'delete from ' || rec.table_name || ' where ts_code = :1' using l_ts_code; + end loop; + + delete from at_ts_extents where ts_code = l_ts_code; + +$($insertStatements -join [Environment]::NewLine) + +$($extentStatements -join [Environment]::NewLine) + commit; +end; +/ +exit; +"@ +} + +function Convert-CdaResponseToRows { + param( + [object]$Payload, + [bool]$IncludeEntryDate + ) + + $rows = @() + foreach ($entry in $Payload.values) { + $row = [ordered]@{ + date_time = [long]$entry[0] + value = if ($null -eq $entry[1]) { $null } else { [double]$entry[1] } + quality_code = [int]$entry[2] + } + if ($IncludeEntryDate) { + $row.data_entry_date = if ($entry.Count -gt 3 -and $null -ne $entry[3]) { + [long]$entry[3] + } else { + $null + } + } + $rows += [pscustomobject]$row + } + + return @($rows | Sort-Object date_time) +} + +function Get-CdaScenarioResult { + param($Scenario) + + $responseFile = Join-Path $ResponsesDir ("parity-{0}-cda.json" -f $Scenario.Name) + $escapedOffice = [uri]::EscapeDataString($Office) + $escapedSeriesId = [uri]::EscapeDataString($Scenario.SeriesId) + $escapedUnits = [uri]::EscapeDataString($Scenario.Units) + $escapedBegin = [uri]::EscapeDataString($Scenario.BeginTime.ToString("yyyy-MM-ddTHH:mm:ssZ")) + $escapedEnd = [uri]::EscapeDataString($Scenario.EndTime.ToString("yyyy-MM-ddTHH:mm:ssZ")) + $requestUrl = "{0}/timeseries?office={1}&name={2}&units={3}&begin={4}&end={5}&page-size=1000" -f ` + $CdaBaseUrl.TrimEnd("/"), ` + $escapedOffice, ` + $escapedSeriesId, ` + $escapedUnits, ` + $escapedBegin, ` + $escapedEnd + + if ($Scenario.IncludeEntryDate) { + $requestUrl += "&include-entry-date=true" + } + + if ($null -ne $Scenario.VersionDate) { + $escapedVersionDate = [uri]::EscapeDataString($Scenario.VersionDate.ToString("yyyy-MM-ddTHH:mm:ssZ")) + $requestUrl += "&version-date=$escapedVersionDate" + } + + $curlMetrics = Invoke-CdaRequest -Url $requestUrl -ResponseFile $responseFile + $payload = Get-Content -LiteralPath $responseFile -Raw | ConvertFrom-Json + $rows = Convert-CdaResponseToRows -Payload $payload -IncludeEntryDate $Scenario.IncludeEntryDate + + if (-not $KeepResponses -and (Test-Path -LiteralPath $responseFile)) { + Remove-Item -LiteralPath $responseFile -Force + $responseFile = $null + } + + return [pscustomobject]@{ + RequestUrl = $requestUrl + HttpCode = [int]$curlMetrics.http_code + TimeTotalSeconds = [double]$curlMetrics.time_total + Payload = $payload + Rows = $rows + ResponseFile = $responseFile + } +} + +function Get-OracleRowsSql { + param($Scenario) + + $seriesLiteral = Convert-ToSqlStringLiteral $Scenario.SeriesId + $unitsLiteral = Convert-ToSqlStringLiteral $Scenario.Units + $officeLiteral = Convert-ToSqlStringLiteral $Office + $beginExpr = Convert-ToOracleDateExpression $Scenario.BeginTime + $endExpr = Convert-ToOracleDateExpression $Scenario.EndTime + $versionDateExpr = if ($null -ne $Scenario.VersionDate) { + Convert-ToOracleDateExpression $Scenario.VersionDate + } else { + "null" + } + $maxVersionLiteral = if ($null -ne $Scenario.VersionDate) { "'F'" } else { "'T'" } + $retrieveFunction = if ($Scenario.IncludeEntryDate) { + "cwms_20.cwms_ts.retrieve_ts_entry_out_tab" + } else { + "cwms_20.cwms_ts.retrieve_ts_out_tab" + } + + $rowProjection = if ($Scenario.IncludeEntryDate) { + @" +json_object( + 'date_time' value round((date_time - date '1970-01-01') * 86400000), + 'value' value value, + 'quality_code' value quality_code, + 'data_entry_date' value case + when data_entry_date is null then null + else round((cast(data_entry_date as date) - date '1970-01-01') * 86400000) + end null on null +) +"@ + } else { + @" +json_object( + 'date_time' value round((date_time - date '1970-01-01') * 86400000), + 'value' value value, + 'quality_code' value quality_code +) +"@ + } + + return @" +set heading off feedback off verify off pagesize 0 linesize 32767 long 1000000 longchunksize 1000000 trimspool on +with oracle_rows as ( + select * + from table($retrieveFunction( + $seriesLiteral, + $unitsLiteral, + $beginExpr, + $endExpr, + 'UTC', + 'T', + 'T', + 'T', + 'F', + 'F', + $versionDateExpr, + $maxVersionLiteral, + $officeLiteral + )) +) +select json_object( + 'row_count' value (select count(*) from oracle_rows), + 'rows' value nvl( + ( + select json_arrayagg( + $rowProjection + returning clob + ) + from ( + select * + from oracle_rows + order by date_time + ) + ), + '[]' + ) format json + returning clob +) +from dual; +exit; +"@ +} + +function Get-OracleScenarioResult { + param($Scenario) + + $responseFile = Join-Path $ResponsesDir ("parity-{0}-oracle.json" -f $Scenario.Name) + $raw = Invoke-OracleSql -Sql (Get-OracleRowsSql -Scenario $Scenario) -Label ("oracle-{0}" -f $Scenario.Name) + $json = (($raw -split "\r?\n") | ForEach-Object { $_.Trim() } | Where-Object { $_ }) -join "" + Set-Content -LiteralPath $responseFile -Value $json -Encoding ASCII + $payload = $json | ConvertFrom-Json + $rows = @() + foreach ($entry in $payload.rows) { + $row = [ordered]@{ + date_time = [long]$entry.date_time + value = if ($null -eq $entry.value) { $null } else { [double]$entry.value } + quality_code = [int]$entry.quality_code + } + if ($Scenario.IncludeEntryDate) { + $row.data_entry_date = if ($null -ne $entry.PSObject.Properties["data_entry_date"] -and $null -ne $entry.data_entry_date) { + [long]$entry.data_entry_date + } else { + $null + } + } + $rows += [pscustomobject]$row + } + + if (-not $KeepResponses -and (Test-Path -LiteralPath $responseFile)) { + Remove-Item -LiteralPath $responseFile -Force + $responseFile = $null + } + + return [pscustomobject]@{ + Payload = $payload + Rows = @($rows | Sort-Object date_time) + ResponseFile = $responseFile + } +} + +function Test-RowEquality { + param( + $Expected, + $Actual, + [bool]$IncludeEntryDate + ) + + if ($Expected.date_time -ne $Actual.date_time) { + return $false + } + + if ($Expected.quality_code -ne $Actual.quality_code) { + return $false + } + + if ($null -eq $Expected.value -and $null -ne $Actual.value) { + return $false + } + + if ($null -ne $Expected.value -and $null -eq $Actual.value) { + return $false + } + + if ($null -ne $Expected.value -and $null -ne $Actual.value) { + if ([math]::Abs([double]$Expected.value - [double]$Actual.value) -gt $FloatTolerance) { + return $false + } + } + + if ($IncludeEntryDate) { + if ($Expected.data_entry_date -ne $Actual.data_entry_date) { + return $false + } + } + + return $true +} + +function Compare-ScenarioRows { + param( + [object[]]$ExpectedRows, + [object[]]$ActualRows, + [bool]$IncludeEntryDate + ) + + $mismatchCount = 0 + $firstMismatch = $null + $maxLength = [math]::Max($ExpectedRows.Count, $ActualRows.Count) + + for ($index = 0; $index -lt $maxLength; $index++) { + $expected = if ($index -lt $ExpectedRows.Count) { $ExpectedRows[$index] } else { $null } + $actual = if ($index -lt $ActualRows.Count) { $ActualRows[$index] } else { $null } + + $equal = $false + if ($null -ne $expected -and $null -ne $actual) { + $equal = Test-RowEquality -Expected $expected -Actual $actual -IncludeEntryDate $IncludeEntryDate + } + + if (-not $equal) { + $mismatchCount++ + if ($null -eq $firstMismatch) { + $firstMismatch = [pscustomobject]@{ + index = $index + expected = $expected + actual = $actual + } + } + } + } + + return [pscustomobject]@{ + mismatch_count = $mismatchCount + first_mismatch = $firstMismatch + } +} + +function Test-MetadataExpectation { + param( + $Scenario, + $CdaResult, + $OracleResult + ) + + $metadataMismatches = @() + if ($CdaResult.Payload.total -ne $OracleResult.Payload.row_count) { + $metadataMismatches += [pscustomobject]@{ + field = "total" + expected = [int]$OracleResult.Payload.row_count + actual = $CdaResult.Payload.total + } + } + + if ($CdaResult.Payload.'date-version-type' -ne $Scenario.ExpectedDateVersionType) { + $metadataMismatches += [pscustomobject]@{ + field = "date-version-type" + expected = $Scenario.ExpectedDateVersionType + actual = $CdaResult.Payload.'date-version-type' + } + } + + if ($CdaResult.Payload.'interval-offset' -ne $Scenario.ExpectedIntervalOffset) { + $metadataMismatches += [pscustomobject]@{ + field = "interval-offset" + expected = $Scenario.ExpectedIntervalOffset + actual = $CdaResult.Payload.'interval-offset' + } + } + + if ($CdaResult.Payload.interval -ne $Scenario.ExpectedInterval) { + $metadataMismatches += [pscustomobject]@{ + field = "interval" + expected = $Scenario.ExpectedInterval + actual = $CdaResult.Payload.interval + } + } + + if ($null -ne $Scenario.VersionDate) { + $expectedVersionDate = $Scenario.VersionDate.ToString("yyyy-MM-ddTHH:mm:ssZ") + if ($CdaResult.Payload.'version-date' -ne $expectedVersionDate) { + $metadataMismatches += [pscustomobject]@{ + field = "version-date" + expected = $expectedVersionDate + actual = $CdaResult.Payload.'version-date' + } + } + } + + return @($metadataMismatches) +} + +New-Item -ItemType Directory -Path $ResultsDir -Force | Out-Null +New-Item -ItemType Directory -Path $ResponsesDir -Force | Out-Null + +$scenarioMap = @{} +foreach ($scenario in Get-ScenarioDefinitions) { + $scenarioMap[$scenario.Name] = $scenario +} + +$requestedScenarios = foreach ($scenarioName in $Scenarios) { + if (-not $scenarioMap.ContainsKey($scenarioName)) { + throw "Unknown scenario '$scenarioName'. Available scenarios: $($scenarioMap.Keys -join ', ')" + } + $scenarioMap[$scenarioName] +} + +Wait-ForCdaReady -Url ("{0}/offices/{1}" -f $CdaBaseUrl.TrimEnd("/"), [uri]::EscapeDataString($Office)) + +$results = @() +$failedScenarios = @() +foreach ($scenario in $requestedScenarios) { + Invoke-OracleSql -Sql (Get-SeedSql -Scenario $scenario) -Label ("seed-{0}" -f $scenario.Name) | Out-Null + + $oracleResult = Get-OracleScenarioResult -Scenario $scenario + $cdaResult = Get-CdaScenarioResult -Scenario $scenario + $rowComparison = Compare-ScenarioRows -ExpectedRows $oracleResult.Rows -ActualRows $cdaResult.Rows -IncludeEntryDate $scenario.IncludeEntryDate + $metadataMismatches = @(Test-MetadataExpectation -Scenario $scenario -CdaResult $cdaResult -OracleResult $oracleResult) + $passed = $cdaResult.HttpCode -eq 200 -and $rowComparison.mismatch_count -eq 0 -and $metadataMismatches.Count -eq 0 + + $result = [pscustomobject]@{ + scenario = $scenario.Name + http_code = $cdaResult.HttpCode + time_total_seconds = $cdaResult.TimeTotalSeconds + request_url = $cdaResult.RequestUrl + include_entry_date = [bool]$scenario.IncludeEntryDate + version_date = if ($null -ne $scenario.VersionDate) { $scenario.VersionDate.ToString("o") } else { $null } + expected_row_count = [int]$oracleResult.Payload.row_count + actual_row_count = $cdaResult.Rows.Count + reported_total = $cdaResult.Payload.total + expected_date_version_type = $scenario.ExpectedDateVersionType + actual_date_version_type = $cdaResult.Payload.'date-version-type' + expected_interval = $scenario.ExpectedInterval + actual_interval = $cdaResult.Payload.interval + expected_interval_offset = $scenario.ExpectedIntervalOffset + actual_interval_offset = $cdaResult.Payload.'interval-offset' + metadata_mismatches = $metadataMismatches + row_mismatch_count = $rowComparison.mismatch_count + first_row_mismatch = $rowComparison.first_mismatch + oracle_response_file = $oracleResult.ResponseFile + cda_response_file = $cdaResult.ResponseFile + passed = $passed + } + + $results += $result + if (-not $passed) { + $failedScenarios += $result + } +} + +$gitBranch = (& git branch --show-current 2>$null) +$gitBranchExitCode = $LASTEXITCODE +$gitCommit = (& git rev-parse HEAD 2>$null) +$gitCommitExitCode = $LASTEXITCODE +$timestamp = Get-Date -Format "yyyyMMdd-HHmmss" +$resultFile = Join-Path $ResultsDir ("timeseries-parity-{0}.json" -f $timestamp) +$summary = [pscustomobject]@{ + total_scenarios = $results.Count + passed_scenarios = @($results | Where-Object { $_.passed }).Count + failed_scenarios = @($results | Where-Object { -not $_.passed }).Count +} + +$payload = [pscustomobject]@{ + parity = "timeseries" + generated_at = (Get-Date).ToUniversalTime().ToString("o") + git_branch = if ($gitBranchExitCode -eq 0 -and $null -ne $gitBranch) { $gitBranch.Trim() } else { $null } + git_commit = if ($gitCommitExitCode -eq 0 -and $null -ne $gitCommit) { $gitCommit.Trim() } else { $null } + office = $Office + summary = $summary + results = $results +} + +$payload | ConvertTo-Json -Depth 8 | Set-Content -LiteralPath $resultFile -Encoding ASCII +$payload | ConvertTo-Json -Depth 8 + +if ($failedScenarios.Count -gt 0) { + $failedNames = ($failedScenarios | ForEach-Object { $_.scenario }) -join ", " + throw "Parity check found mismatches in: $failedNames. Results saved to $resultFile" +} From 74a8635a219a5774298b4c3bac0ca46b6419b3fe Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Mon, 13 Apr 2026 23:34:12 -0500 Subject: [PATCH 03/16] Add direct timeseries parity integration tests --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 46 +- .../cda/api/TimeSeriesDirectReadParityIT.java | 569 ++++++++++++++++++ 2 files changed, 606 insertions(+), 9 deletions(-) create mode 100644 cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index 3fa9155f69..10e4d27b01 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -51,7 +51,9 @@ import java.time.Duration; import java.time.Instant; import java.time.LocalDate; +import java.time.LocalDateTime; import java.time.ZoneId; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; @@ -786,6 +788,7 @@ private RequestedTimeSeriesMetadata fetchRequestedTimeSeriesMetadata( valid.field("tsid", String.class).as("tsid"), valid.field("office_id", String.class).as("office_id"), valid.field("units", String.class).as("units"), + AV_CWMS_TS_ID2.UNIT_ID.as("source_unit"), valid.field("interval", BigDecimal.class).as("interval"), valid.field("loc_part", String.class).as("loc_part"), valid.field("parm_part", String.class).as("parm_part"), @@ -805,12 +808,15 @@ private RequestedTimeSeriesMetadata fetchRequestedTimeSeriesMetadata( Number offsetValue = tsMetadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET); BigDecimal tsCodeValue = tsMetadata.getValue("tscode", BigDecimal.class); long tsCodeLong = tsCodeValue.longValue(); + String requestedUnit = tsMetadata.getValue("units", String.class); + String sourceUnit = tsMetadata.getValue("source_unit", String.class); + validateRequestedUnits(sourceUnit, requestedUnit); boolean isLrts = parseBool(CWMS_TS_PACKAGE.call_IS_LRTS__2(dsl.configuration(), tsCodeLong)); return new RequestedTimeSeriesMetadata( tsCodeLong, tsMetadata.getValue("tsid", String.class), tsMetadata.getValue("office_id", String.class), - tsMetadata.getValue("units", String.class), + requestedUnit, intervalValue == null ? 0L : intervalValue.longValue(), offsetValue == null ? UTC_OFFSET_IRREGULAR : offsetValue.longValue(), tsMetadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID) == null @@ -843,7 +849,7 @@ private List fetchRequestedTimeSeriesRows(RequestedTim Condition baseCondition = view.ALIASED_ITEM.isNull() .and(view.TS_CODE.eq(metadata.getTsCode())) .and(view.OFFICE_ID.eq(metadata.getOfficeId())) - .and(view.UNIT_ID.eq(metadata.getUnits())) + .and(view.UNIT_ID.equalIgnoreCase(metadata.getUnits())) .and(view.DATE_TIME.ge(beginTimestamp)) .and(view.DATE_TIME.le(endTimestamp)) .and(view.START_DATE.le(endTimestamp)) @@ -851,13 +857,15 @@ private List fetchRequestedTimeSeriesRows(RequestedTim SelectConditionStep> query; if (versionDate != null) { + Field versionTimestamp = CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2( + DSL.val(versionDate.toInstant().toEpochMilli())); query = dsl.select( view.DATE_TIME, view.VALUE, normalizedQuality, view.DATA_ENTRY_DATE) .from(view) - .where(baseCondition.and(view.VERSION_DATE.eq(Timestamp.from(versionDate.toInstant())))); + .where(baseCondition.and(view.VERSION_DATE.eq(versionTimestamp))); } else { Table rankedRows = dsl.select( view.DATE_TIME.as(DATE_TIME), @@ -926,7 +934,7 @@ private List fetchExpectedRegularTimes(RequestedTimeSeriesMetadata me if (expectedTimeTable != null) { expectedTimeTable.forEach(timestamp -> { if (timestamp != null) { - retVal.add(timestamp); + retVal.add(normalizeOracleUtcTimestamp(timestamp)); } }); } @@ -944,13 +952,13 @@ private long resolveIntervalOffset(RequestedTimeSeriesMetadata metadata, } String intervalTimeZone = metadata.isLrts() ? metadata.getTimeZoneId() : UTC; - Timestamp topOfInterval = CWMS_TS_PACKAGE.call_TOP_OF_INTERVAL_UTC( + Timestamp topOfInterval = normalizeOracleUtcTimestamp(CWMS_TS_PACKAGE.call_TOP_OF_INTERVAL_UTC( dsl.configuration(), rawRows.get(0).getDateTime(), metadata.getIntervalPart(), intervalTimeZone, "F" - ); + )); return (rawRows.get(0).getDateTime().getTime() - topOfInterval.getTime()) / TimeUnit.MINUTES.toMillis(1); } @@ -975,7 +983,7 @@ private int countMergedRows(List rawRows, List 0) { @@ -1017,7 +1025,7 @@ private void populateTimeSeriesValues(TimeSeries timeseries, candidateRow = rawRow; rawIndex++; } else { - int compare = expectedTime.compareTo(rawRow.getDateTime()); + int compare = compareTimestampOrder(expectedTime, rawRow.getDateTime()); if (compare < 0) { candidateTime = expectedTime; syntheticRow = true; @@ -1034,7 +1042,7 @@ private void populateTimeSeriesValues(TimeSeries timeseries, } } - if (tsCursor != null && candidateTime.before(tsCursor)) { + if (tsCursor != null && compareTimestampOrder(candidateTime, tsCursor) < 0) { continue; } @@ -1055,6 +1063,26 @@ private void populateTimeSeriesValues(TimeSeries timeseries, } } + private int compareTimestampOrder(Timestamp left, Timestamp right) { + return Long.compare(left.getTime(), right.getTime()); + } + + private Timestamp normalizeOracleUtcTimestamp(Timestamp timestamp) { + LocalDateTime utcWallTime = timestamp.toLocalDateTime(); + return Timestamp.from(utcWallTime.toInstant(ZoneOffset.UTC)); + } + + private void validateRequestedUnits(String sourceUnit, String requestedUnit) { + if (sourceUnit == null || requestedUnit == null || sourceUnit.equalsIgnoreCase(requestedUnit)) { + return; + } + dsl.select(CWMS_UTIL_PACKAGE.call_CONVERT_UNITS( + DSL.val(0.0d), + DSL.val(sourceUnit), + DSL.val(requestedUnit))) + .fetchOne(0, Double.class); + } + private static final class RequestedTimeSeriesMetadata { private final long tsCode; private final String tsId; diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java new file mode 100644 index 0000000000..43e55241ce --- /dev/null +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -0,0 +1,569 @@ +package cwms.cda.api; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.is; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import cwms.cda.formatters.Formats; +import fixtures.CwmsDataApiSetupCallback; +import io.restassured.filter.log.LogDetail; +import io.restassured.response.ExtractableResponse; +import io.restassured.response.Response; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.servlet.http.HttpServletResponse; +import mil.army.usace.hec.test.database.CwmsDatabaseContainer; +import org.jooq.impl.DSL; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import usace.cwms.db.jooq.codegen.packages.CWMS_TS_PACKAGE; +import io.restassured.specification.RequestSpecification; + +@Tag("integration") +final class TimeSeriesDirectReadParityIT extends DataApiTestIT { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final String OFFICE = "SPK"; + private static final double DOUBLE_TOLERANCE = 1e-9; + + @ParameterizedTest(name = "{0}") + @MethodSource("scenarios") + void directReadMatchesOracleRetrieveTs(Scenario scenario) throws Exception { + seedScenario(scenario); + + List expectedRows = fetchOracleRows(scenario); + TimeSeriesResponse actualResponse = fetchCdaRows(scenario); + String mismatchSummary = buildMismatchSummary(expectedRows, actualResponse); + + assertEquals(expectedRows.size(), actualResponse.total, "Reported total " + mismatchSummary); + assertEquals(scenario.expectedDateVersionType, actualResponse.dateVersionType, "Date version type"); + assertEquals(scenario.expectedInterval, actualResponse.interval, "Interval"); + assertEquals(scenario.expectedIntervalOffset, actualResponse.intervalOffset, "Interval offset"); + + if (scenario.versionDate != null) { + assertNotNull(actualResponse.versionDate, "Version date"); + assertEquals(scenario.versionDate, actualResponse.versionDate, "Version date"); + } else { + assertNull(actualResponse.versionDate, "Version date"); + } + + assertEquals(expectedRows.size(), actualResponse.rows.size(), "Row count " + mismatchSummary); + for (int i = 0; i < expectedRows.size(); i++) { + assertRowsEqual(expectedRows.get(i), actualResponse.rows.get(i), i); + } + } + + private static String buildMismatchSummary(List expectedRows, TimeSeriesResponse actualResponse) { + return "expectedRows=" + summarizeRows(expectedRows) + + " actualRows=" + summarizeRows(actualResponse.rows) + + " actualTotal=" + actualResponse.total; + } + + private static String summarizeRows(List rows) { + return rows.stream() + .limit(12) + .map(row -> "{t=" + row.dateTimeMillis + + ",v=" + row.value + + ",q=" + row.qualityCode + + ",e=" + row.dataEntryDateMillis + + "}") + .collect(Collectors.joining(", ", "[", rows.size() > 12 ? ", ...]" : "]")); + } + + private static Stream scenarios() { + Instant olderVersion = Instant.parse("2024-06-20T08:00:00Z"); + Instant newerVersion = Instant.parse("2024-06-21T08:00:00Z"); + + List denseRows = List.of( + row("2024-01-01T00:00:00Z", 1.0, 0, "2024-01-02T00:00:00Z", null), + row("2024-01-01T00:01:00Z", 2.0, 0, "2024-01-02T00:01:00Z", null), + row("2024-01-01T00:02:00Z", 3.0, 0, "2024-01-02T00:02:00Z", null), + row("2024-01-01T00:03:00Z", 4.0, 0, "2024-01-02T00:03:00Z", null), + row("2024-01-01T00:04:00Z", 5.0, 0, "2024-01-02T00:04:00Z", null), + row("2024-01-01T00:05:00Z", 6.0, 0, "2024-01-02T00:05:00Z", null) + ); + + List gapRows = List.of( + row("2024-01-01T00:00:00Z", 1.0, 0, "2024-01-03T00:00:00Z", null), + row("2024-01-01T00:01:00Z", 2.0, 0, "2024-01-03T00:01:00Z", null), + row("2024-01-01T00:02:00Z", 3.0, 0, "2024-01-03T00:02:00Z", null), + row("2024-01-01T00:05:00Z", 6.0, 0, "2024-01-03T00:05:00Z", null), + row("2024-01-01T00:06:00Z", 7.0, 0, "2024-01-03T00:06:00Z", null), + row("2024-01-01T00:07:00Z", 8.0, 0, "2024-01-03T00:07:00Z", null), + row("2024-01-01T00:08:00Z", 9.0, 0, "2024-01-03T00:08:00Z", null), + row("2024-01-01T00:09:00Z", 10.0, 0, "2024-01-03T00:09:00Z", null) + ); + + List versionedRows = List.of( + row("2024-05-01T15:00:00Z", 4.0, 0, "2024-06-20T09:00:00Z", olderVersion), + row("2024-05-01T16:00:00Z", 4.0, 0, "2024-06-20T09:01:00Z", olderVersion), + row("2024-05-01T17:00:00Z", 4.0, 0, "2024-06-20T09:02:00Z", olderVersion), + row("2024-05-01T18:00:00Z", 3.0, 0, "2024-06-20T09:03:00Z", olderVersion), + row("2024-05-01T15:00:00Z", 1.0, 0, "2024-06-21T09:00:00Z", newerVersion), + row("2024-05-01T16:00:00Z", 1.0, 0, "2024-06-21T09:01:00Z", newerVersion), + row("2024-05-01T17:00:00Z", 1.0, 0, "2024-06-21T09:02:00Z", newerVersion) + ); + + List irregularRows = List.of( + row("2024-01-05T12:00:00Z", 10.0, 0, "2024-01-06T00:00:00Z", null), + row("2024-01-05T12:07:20Z", 20.0, 0, "2024-01-06T00:01:00Z", null), + row("2024-01-05T12:19:45Z", 30.0, 0, "2024-01-06T00:02:00Z", null), + row("2024-01-05T12:33:10Z", 40.0, 0, "2024-01-06T00:03:00Z", null) + ); + + return Stream.of( + new Scenario("dense-regular", + "ITPARREG", + "ITPARREG.Stage.Inst.1Minute.0.BENCH", + "ft", + Instant.parse("2024-01-01T00:00:00Z"), + Instant.parse("2024-01-01T00:05:00Z"), + denseRows, + false, + false, + "UNVERSIONED", + "PT1M", + 0L, + null), + new Scenario("dense-regular-entry-date", + "ITPARREG", + "ITPARREG.Stage.Inst.1Minute.0.BENCH", + "ft", + Instant.parse("2024-01-01T00:00:00Z"), + Instant.parse("2024-01-01T00:05:00Z"), + denseRows, + false, + true, + "UNVERSIONED", + "PT1M", + 0L, + null), + new Scenario("gap-regular", + "ITPARGAP", + "ITPARGAP.Stage.Inst.1Minute.0.BENCH", + "ft", + Instant.parse("2024-01-01T00:00:00Z"), + Instant.parse("2024-01-01T00:09:00Z"), + gapRows, + false, + false, + "UNVERSIONED", + "PT1M", + 0L, + null), + new Scenario("versioned-max", + "ITPARVER", + "ITPARVER.Flow.Inst.1Hour.0.BENCH", + "cfs", + Instant.parse("2024-05-01T15:00:00Z"), + Instant.parse("2024-05-01T18:00:00Z"), + versionedRows, + true, + false, + "MAX_AGGREGATE", + "PT1H", + 0L, + null), + new Scenario("versioned-single", + "ITPARVER", + "ITPARVER.Flow.Inst.1Hour.0.BENCH", + "cfs", + Instant.parse("2024-05-01T15:00:00Z"), + Instant.parse("2024-05-01T18:00:00Z"), + versionedRows, + true, + false, + "SINGLE_VERSION", + "PT1H", + 0L, + newerVersion), + new Scenario("irregular", + "ITPARIRR", + "ITPARIRR.Flow.Inst.0.0.BENCH", + "cfs", + Instant.parse("2024-01-05T12:00:00Z"), + Instant.parse("2024-01-05T12:33:10Z"), + irregularRows, + false, + false, + "UNVERSIONED", + "PT0S", + Integer.MIN_VALUE, + null) + ); + } + + private static SeedRow row(String dateTime, Double value, int qualityCode, String dataEntryDate, Instant versionDate) { + return new SeedRow( + Instant.parse(dateTime), + value, + qualityCode, + Instant.parse(dataEntryDate), + versionDate + ); + } + + private static void assertRowsEqual(RetrievedRow expected, RetrievedRow actual, int index) { + assertEquals(expected.dateTimeMillis, actual.dateTimeMillis, "Row " + index + " timestamp"); + assertEquals(expected.qualityCode, actual.qualityCode, "Row " + index + " quality"); + + if (expected.value == null) { + assertNull(actual.value, "Row " + index + " value"); + } else { + assertNotNull(actual.value, "Row " + index + " value"); + assertEquals(expected.value, actual.value, DOUBLE_TOLERANCE, "Row " + index + " value"); + } + + if (expected.dataEntryDateMillis == null) { + assertNull(actual.dataEntryDateMillis, "Row " + index + " entry date"); + } else { + assertEquals(expected.dataEntryDateMillis, actual.dataEntryDateMillis, "Row " + index + " entry date"); + } + } + + private static void seedScenario(Scenario scenario) throws SQLException { + createLocation(scenario.locationId, true, OFFICE); + createTimeseries(OFFICE, scenario.seriesId, 0); + + CwmsDatabaseContainer database = CwmsDataApiSetupCallback.getDatabaseLink(); + database.connection(connection -> { + try { + CWMS_TS_PACKAGE.call_SET_TSID_VERSIONED(DSL.using(connection).configuration(), + scenario.seriesId, + scenario.versioned ? "T" : "F", + OFFICE); + + long tsCode = findTsCode(connection, scenario.seriesId); + List years = scenario.rows.stream() + .map(row -> OffsetDateTime.ofInstant(row.dateTime, ZoneOffset.UTC).getYear()) + .distinct() + .collect(Collectors.toList()); + + clearScenarioRows(connection, tsCode, years); + insertScenarioRows(connection, tsCode, scenario.rows); + updateScenarioExtents(connection, tsCode, scenario.rows); + } catch (SQLException e) { + throw new RuntimeException("Unable to seed scenario " + scenario.name, e); + } + }, "cwms_20"); + } + + private static long findTsCode(Connection connection, String seriesId) throws SQLException { + String sql = "select ts_code from at_cwms_ts_id where db_office_id = ? and cwms_ts_id = ?"; + try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.setString(1, OFFICE); + statement.setString(2, seriesId); + try (ResultSet resultSet = statement.executeQuery()) { + if (!resultSet.next()) { + throw new IllegalStateException("Unable to find ts_code for " + seriesId); + } + return resultSet.getLong(1); + } + } + } + + private static void clearScenarioRows(Connection connection, long tsCode, List years) throws SQLException { + for (Integer year : years) { + try (PreparedStatement statement = connection.prepareStatement( + "delete from at_tsv_" + year + " where ts_code = ?")) { + statement.setLong(1, tsCode); + statement.executeUpdate(); + } + } + + try (PreparedStatement statement = connection.prepareStatement( + "delete from at_ts_extents where ts_code = ?")) { + statement.setLong(1, tsCode); + statement.executeUpdate(); + } + } + + private static void insertScenarioRows(Connection connection, long tsCode, List rows) throws SQLException { + List sortedRows = new ArrayList<>(rows); + sortedRows.sort(Comparator.comparing(seedRow -> seedRow.dateTime)); + + for (SeedRow row : sortedRows) { + int year = OffsetDateTime.ofInstant(row.dateTime, ZoneOffset.UTC).getYear(); + String sql = "insert into at_tsv_" + year + + " (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag)" + + " values (" + + tsCode + ", " + + toOracleDateExpression(row.dateTime) + ", " + + (row.versionDate != null ? toOracleDateExpression(row.versionDate) : "date '1111-11-11'") + ", " + + (row.dataEntryDate != null ? toOracleTimestampExpression(row.dataEntryDate) : "null") + ", " + + (row.value != null ? Double.toString(row.value) : "null") + ", " + + row.qualityCode + + ", 0)"; + try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.executeUpdate(); + } + } + } + + private static void updateScenarioExtents(Connection connection, long tsCode, List rows) throws SQLException { + Set distinctVersionDates = rows.stream() + .map(seedRow -> seedRow.versionDate) + .filter(Objects::nonNull) + .collect(Collectors.toCollection(LinkedHashSet::new)); + + if (distinctVersionDates.isEmpty()) { + updateTsExtents(connection, tsCode, "date '1111-11-11'"); + return; + } + + for (Instant versionDate : distinctVersionDates) { + updateTsExtents(connection, tsCode, toOracleDateExpression(versionDate)); + } + } + + private static void updateTsExtents(Connection connection, long tsCode, String versionDateExpression) throws SQLException { + String sql = "begin cwms_ts.update_ts_extents(" + tsCode + ", " + versionDateExpression + "); end;"; + try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.execute(); + } + } + + private static List fetchOracleRows(Scenario scenario) throws SQLException { + CwmsDatabaseContainer database = CwmsDataApiSetupCallback.getDatabaseLink(); + return database.connection(connection -> { + try { + String functionName = scenario.includeEntryDate + ? "cwms_20.cwms_ts.retrieve_ts_entry_out_tab" + : "cwms_20.cwms_ts.retrieve_ts_out_tab"; + String rowProjection = scenario.includeEntryDate + ? ", case when data_entry_date is null then null else round((cast(data_entry_date as date) - date '1970-01-01') * 86400000) end as data_entry_date_ms" + : ""; + String versionDateExpression = scenario.versionDate != null + ? toOracleDateExpression(scenario.versionDate) + : "null"; + String maxVersionFlag = scenario.versionDate != null ? "'F'" : "'T'"; + String sql = "select round((date_time - date '1970-01-01') * 86400000) as date_time_ms," + + " value," + + " quality_code" + + rowProjection + + " from table(" + functionName + "(" + + toSqlStringLiteral(scenario.seriesId) + ", " + + toSqlStringLiteral(scenario.units) + ", " + + toOracleDateExpression(scenario.beginTime) + ", " + + toOracleDateExpression(scenario.endTime) + ", " + + "'UTC', 'T', 'T', 'T', 'F', 'F', " + + versionDateExpression + ", " + + maxVersionFlag + ", " + + toSqlStringLiteral(OFFICE) + + "))" + + " order by date_time"; + + try (PreparedStatement statement = connection.prepareStatement(sql)) { + try (ResultSet resultSet = statement.executeQuery()) { + List rows = new ArrayList<>(); + while (resultSet.next()) { + Double value = resultSet.getDouble("value"); + if (resultSet.wasNull()) { + value = null; + } + + Long dataEntryDateMillis = null; + if (scenario.includeEntryDate) { + long entryMillis = resultSet.getLong("data_entry_date_ms"); + if (!resultSet.wasNull()) { + dataEntryDateMillis = entryMillis; + } + } + + rows.add(new RetrievedRow( + resultSet.getLong("date_time_ms"), + value, + resultSet.getInt("quality_code"), + dataEntryDateMillis + )); + } + return rows; + } + } + } catch (SQLException e) { + throw new RuntimeException("Unable to fetch Oracle rows for " + scenario.name, e); + } + }, "cwms_20"); + } + + private static TimeSeriesResponse fetchCdaRows(Scenario scenario) throws Exception { + RequestSpecification request = given() + .log().ifValidationFails(LogDetail.ALL, true) + .accept(Formats.JSONV2) + .queryParam(Controllers.OFFICE, OFFICE) + .queryParam(Controllers.NAME, scenario.seriesId) + .queryParam(Controllers.UNIT, scenario.units) + .queryParam(Controllers.BEGIN, scenario.beginTime.toString()) + .queryParam(Controllers.END, scenario.endTime.toString()) + .queryParam("page-size", 1000) + .queryParam(Controllers.INCLUDE_ENTRY_DATE, scenario.includeEntryDate); + if (scenario.versionDate != null) { + request = request.queryParam(Controllers.VERSION_DATE, scenario.versionDate.toString()); + } + + ExtractableResponse response = request.when() + .redirects().follow(true) + .redirects().max(3) + .get("/timeseries/") + .then() + .log().ifValidationFails(LogDetail.ALL, true) + .assertThat() + .statusCode(is(HttpServletResponse.SC_OK)) + .extract(); + + JsonNode payload = OBJECT_MAPPER.readTree(response.asString()); + List rows = new ArrayList<>(); + for (JsonNode entry : payload.get("values")) { + Double value = entry.get(1).isNull() ? null : entry.get(1).asDouble(); + Long dataEntryDateMillis = null; + if (scenario.includeEntryDate && entry.size() > 3 && !entry.get(3).isNull()) { + dataEntryDateMillis = entry.get(3).asLong(); + } + rows.add(new RetrievedRow( + entry.get(0).asLong(), + value, + entry.get(2).asInt(), + dataEntryDateMillis + )); + } + + Instant versionDate = null; + JsonNode versionDateNode = payload.get("version-date"); + if (versionDateNode != null && !versionDateNode.isNull()) { + versionDate = OffsetDateTime.parse(versionDateNode.asText()).toInstant(); + } + + return new TimeSeriesResponse( + rows, + payload.get("total").asInt(), + payload.get("date-version-type").asText(), + payload.get("interval").asText(), + payload.get("interval-offset").asLong(), + versionDate + ); + } + + private static String toSqlStringLiteral(String value) { + return "'" + value.replace("'", "''") + "'"; + } + + private static String toOracleDateExpression(Instant instant) { + LocalDateTime utc = LocalDateTime.ofInstant(instant, ZoneOffset.UTC); + return "to_date('" + utc.format(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")) + + "', 'yyyy-mm-dd hh24:mi:ss')"; + } + + private static String toOracleTimestampExpression(Instant instant) { + LocalDateTime utc = LocalDateTime.ofInstant(instant, ZoneOffset.UTC); + return "to_timestamp('" + utc.format(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")) + + "', 'yyyy-mm-dd hh24:mi:ss')"; + } + + private static final class Scenario { + private final String name; + private final String locationId; + private final String seriesId; + private final String units; + private final Instant beginTime; + private final Instant endTime; + private final List rows; + private final boolean versioned; + private final boolean includeEntryDate; + private final String expectedDateVersionType; + private final String expectedInterval; + private final long expectedIntervalOffset; + private final Instant versionDate; + + private Scenario(String name, String locationId, String seriesId, String units, Instant beginTime, + Instant endTime, List rows, boolean versioned, boolean includeEntryDate, + String expectedDateVersionType, String expectedInterval, long expectedIntervalOffset, + Instant versionDate) { + this.name = name; + this.locationId = locationId; + this.seriesId = seriesId; + this.units = units; + this.beginTime = beginTime; + this.endTime = endTime; + this.rows = rows; + this.versioned = versioned; + this.includeEntryDate = includeEntryDate; + this.expectedDateVersionType = expectedDateVersionType; + this.expectedInterval = expectedInterval; + this.expectedIntervalOffset = expectedIntervalOffset; + this.versionDate = versionDate; + } + + @Override + public String toString() { + return name; + } + } + + private static final class SeedRow { + private final Instant dateTime; + private final Double value; + private final int qualityCode; + private final Instant dataEntryDate; + private final Instant versionDate; + + private SeedRow(Instant dateTime, Double value, int qualityCode, Instant dataEntryDate, + Instant versionDate) { + this.dateTime = dateTime; + this.value = value; + this.qualityCode = qualityCode; + this.dataEntryDate = dataEntryDate; + this.versionDate = versionDate; + } + } + + private static final class RetrievedRow { + private final long dateTimeMillis; + private final Double value; + private final int qualityCode; + private final Long dataEntryDateMillis; + + private RetrievedRow(long dateTimeMillis, Double value, int qualityCode, Long dataEntryDateMillis) { + this.dateTimeMillis = dateTimeMillis; + this.value = value; + this.qualityCode = qualityCode; + this.dataEntryDateMillis = dataEntryDateMillis; + } + } + + private static final class TimeSeriesResponse { + private final List rows; + private final int total; + private final String dateVersionType; + private final String interval; + private final long intervalOffset; + private final Instant versionDate; + + private TimeSeriesResponse(List rows, int total, String dateVersionType, + String interval, long intervalOffset, Instant versionDate) { + this.rows = rows; + this.total = total; + this.dateVersionType = dateVersionType; + this.interval = interval; + this.intervalOffset = intervalOffset; + this.versionDate = versionDate; + } + } +} From baae043791194d96c6c3b81dca1d5331c766e12b Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Tue, 14 Apr 2026 01:35:01 -0500 Subject: [PATCH 04/16] Port timeseries benchmark harness to Java --- cwms-data-api/build.gradle | 29 + .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 56 ++ .../cda/api/TimeSeriesDirectReadParityIT.java | 33 +- .../fixtures/CwmsDataApiSetupCallback.java | 33 + .../test/java/fixtures/KeyCloakExtension.java | 10 + .../test/java/fixtures/MinIOExtension.java | 6 + .../java/helpers/TimeSeriesReadBenchmark.java | 705 ++++++++++++++++ .../invoke-timeseries-parity-check.ps1 | 751 ------------------ .../invoke-timeseries-read-benchmark.ps1 | 454 ----------- 9 files changed, 871 insertions(+), 1206 deletions(-) create mode 100644 cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java delete mode 100644 load_data/performance/invoke-timeseries-parity-check.ps1 delete mode 100644 load_data/performance/invoke-timeseries-read-benchmark.ps1 diff --git a/cwms-data-api/build.gradle b/cwms-data-api/build.gradle index f89f388f14..dfc00e2b89 100644 --- a/cwms-data-api/build.gradle +++ b/cwms-data-api/build.gradle @@ -304,6 +304,35 @@ task integrationTests(type: Test) { jvmArgs += "-Dcatalina.base=$buildDir/tomcat" } +task timeseriesReadBenchmark(type: JavaExec) { + group "verification" + description = "Run the local time-series read benchmark harness" + dependsOn generateConfig + dependsOn war + dependsOn testClasses + + workingDir = projectDir + classpath = sourceSets.test.runtimeClasspath + classpath += configurations.baseLibs + classpath += configurations.tomcatLibs + + mainClass = "helpers.TimeSeriesReadBenchmark" + + systemProperties += project.properties.findAll { k, v -> k.startsWith("RADAR") && !k.startsWith("RADAR_JDBC") } + systemProperties += project.properties.findAll { k, v -> k.startsWith("CDA") && !k.startsWith("CDA_JDBC") } + systemProperties += project.properties.findAll { k, v -> k.startsWith("testcontainer") } + systemProperties += project.properties.findAll { k, v -> k.startsWith("benchmark.") } + + jvmArgs += "-DwarFile=$buildDir/libs/${project.name}-${project.version}.war" + jvmArgs += "-DwarContext=/cwms-data" + jvmArgs += "-Djava.util.logging.manager=org.apache.juli.ClassLoaderLogManager" + jvmArgs += "-Djava.util.logging.config.file=$projectDir/logging.properties" + jvmArgs += "-Dorg.apache.tomcat.util.digester.PROPERTY_SOURCE=org.apache.tomcat.util.digester.EnvironmentPropertySource" + jvmArgs += "-Dcwms.dataapi.access.provider=MultipleAccessManager" + jvmArgs += "-Dcwms.dataapi.access.providers=KeyAccessManager,CwmsAccessManager" + jvmArgs += "-Dcatalina.base=$buildDir/tomcat" +} + task prepareDockerBuild(type: Copy, dependsOn: war) { doFirst { project.mkdir("$buildDir/docker") diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index 10e4d27b01..d9db374174 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -64,6 +64,7 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; @@ -920,6 +921,10 @@ private List fetchExpectedRegularTimes(RequestedTimeSeriesMetadata me : Timestamp.from(requestParameters.getEndTime().toInstant()); long offsetMinutes = resolveIntervalOffset(metadata, rawRows); + if (canGenerateExpectedTimesInJava(metadata)) { + return buildExpectedRegularTimesUtc(rangeStart, rangeEnd, metadata.getIntervalMinutes(), offsetMinutes); + } + String intervalTimeZone = metadata.isLrts() ? metadata.getTimeZoneId() : UTC; DATE_RANGE_T dateRange = new DATE_RANGE_T(rangeStart, rangeEnd, UTC, "T", "T", null); DATE_TABLE_TYPE expectedTimeTable = CWMS_TS_PACKAGE.call_GET_REG_TS_TIMES_UTC_F( @@ -951,6 +956,11 @@ private long resolveIntervalOffset(RequestedTimeSeriesMetadata metadata, return 0L; } + if (canGenerateExpectedTimesInJava(metadata)) { + long intervalMillis = TimeUnit.MINUTES.toMillis(metadata.getIntervalMinutes()); + return TimeUnit.MILLISECONDS.toMinutes(Math.floorMod(rawRows.get(0).getDateTime().getTime(), intervalMillis)); + } + String intervalTimeZone = metadata.isLrts() ? metadata.getTimeZoneId() : UTC; Timestamp topOfInterval = normalizeOracleUtcTimestamp(CWMS_TS_PACKAGE.call_TOP_OF_INTERVAL_UTC( dsl.configuration(), @@ -1072,6 +1082,52 @@ private Timestamp normalizeOracleUtcTimestamp(Timestamp timestamp) { return Timestamp.from(utcWallTime.toInstant(ZoneOffset.UTC)); } + private boolean canGenerateExpectedTimesInJava(RequestedTimeSeriesMetadata metadata) { + if (metadata.isLrts() || metadata.getIntervalMinutes() <= 0L) { + return false; + } + + String intervalPart = metadata.getIntervalPart(); + if (intervalPart == null) { + return false; + } + + String normalizedInterval = intervalPart.toLowerCase(Locale.ENGLISH); + return normalizedInterval.endsWith("minute") + || normalizedInterval.endsWith("minutes") + || normalizedInterval.endsWith("hour") + || normalizedInterval.endsWith("hours") + || normalizedInterval.endsWith("day") + || normalizedInterval.endsWith("days") + || normalizedInterval.endsWith("week") + || normalizedInterval.endsWith("weeks"); + } + + private List buildExpectedRegularTimesUtc(Timestamp rangeStart, + Timestamp rangeEnd, + long intervalMinutes, + long offsetMinutes) { + long intervalMillis = TimeUnit.MINUTES.toMillis(intervalMinutes); + long offsetMillis = TimeUnit.MINUTES.toMillis(Math.floorMod(offsetMinutes, intervalMinutes)); + long startMillis = rangeStart.getTime(); + long endMillis = rangeEnd.getTime(); + long firstMillis = alignToInterval(startMillis, intervalMillis, offsetMillis); + + List expectedTimes = new ArrayList<>(); + for (long millis = firstMillis; millis <= endMillis; millis += intervalMillis) { + expectedTimes.add(new Timestamp(millis)); + } + return expectedTimes; + } + + private long alignToInterval(long timestampMillis, long intervalMillis, long offsetMillis) { + long remainder = Math.floorMod(timestampMillis - offsetMillis, intervalMillis); + if (remainder == 0L) { + return timestampMillis; + } + return timestampMillis + (intervalMillis - remainder); + } + private void validateRequestedUnits(String sourceUnit, String requestedUnit) { if (sourceUnit == null || requestedUnit == null || sourceUnit.equalsIgnoreCase(requestedUnit)) { return; diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java index 43e55241ce..b3e9d68834 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -17,6 +17,7 @@ import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; +import java.time.Duration; import java.time.Instant; import java.time.LocalDateTime; import java.time.OffsetDateTime; @@ -29,6 +30,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; +import java.util.stream.IntStream; import javax.servlet.http.HttpServletResponse; import mil.army.usace.hec.test.database.CwmsDatabaseContainer; import org.jooq.impl.DSL; @@ -129,6 +131,9 @@ private static Stream scenarios() { row("2024-01-05T12:33:10Z", 40.0, 0, "2024-01-06T00:03:00Z", null) ); + Instant dstStart = Instant.parse("2024-03-09T00:00:00Z"); + List dstRows = regularRows(dstStart, 5000, 1.0, Duration.ofDays(1)); + return Stream.of( new Scenario("dense-regular", "ITPARREG", @@ -207,6 +212,19 @@ private static Stream scenarios() { "UNVERSIONED", "PT0S", Integer.MIN_VALUE, + null), + new Scenario("dense-regular-dst-window", + "ITPARDST", + "ITPARDST.Stage.Inst.1Minute.0.BENCH", + "ft", + dstStart, + dstStart.plus(Duration.ofMinutes(4999)), + dstRows, + false, + false, + "UNVERSIONED", + "PT1M", + 0L, null) ); } @@ -221,6 +239,18 @@ private static SeedRow row(String dateTime, Double value, int qualityCode, Strin ); } + private static List regularRows(Instant start, int count, double firstValue, Duration entryDateOffset) { + return IntStream.range(0, count) + .mapToObj(index -> new SeedRow( + start.plusSeconds(index * 60L), + firstValue + index, + 0, + start.plus(entryDateOffset).plusSeconds(index * 60L), + null + )) + .collect(Collectors.toList()); + } + private static void assertRowsEqual(RetrievedRow expected, RetrievedRow actual, int index) { assertEquals(expected.dateTimeMillis, actual.dateTimeMillis, "Row " + index + " timestamp"); assertEquals(expected.qualityCode, actual.qualityCode, "Row " + index + " quality"); @@ -405,6 +435,7 @@ private static List fetchOracleRows(Scenario scenario) throws SQLE } private static TimeSeriesResponse fetchCdaRows(Scenario scenario) throws Exception { + int pageSize = Math.max(1000, scenario.rows.size() * 2); RequestSpecification request = given() .log().ifValidationFails(LogDetail.ALL, true) .accept(Formats.JSONV2) @@ -413,7 +444,7 @@ private static TimeSeriesResponse fetchCdaRows(Scenario scenario) throws Excepti .queryParam(Controllers.UNIT, scenario.units) .queryParam(Controllers.BEGIN, scenario.beginTime.toString()) .queryParam(Controllers.END, scenario.endTime.toString()) - .queryParam("page-size", 1000) + .queryParam("page-size", pageSize) .queryParam(Controllers.INCLUDE_ENTRY_DATE, scenario.includeEntryDate); if (scenario.versionDate != null) { request = request.queryParam(Controllers.VERSION_DATE, scenario.versionDate.toString()); diff --git a/cwms-data-api/src/test/java/fixtures/CwmsDataApiSetupCallback.java b/cwms-data-api/src/test/java/fixtures/CwmsDataApiSetupCallback.java index 7781b7ca0c..03994a2321 100644 --- a/cwms-data-api/src/test/java/fixtures/CwmsDataApiSetupCallback.java +++ b/cwms-data-api/src/test/java/fixtures/CwmsDataApiSetupCallback.java @@ -266,6 +266,39 @@ public static CwmsDatabaseContainer getDatabaseLink() { return cwmsDb; } + public static void shutdown() throws Exception { + Exception failure = null; + if (cdaInstance != null) { + try { + cdaInstance.stop(); + } catch (Exception e) { + failure = e; + } finally { + cdaInstance = null; + } + } + + if (cwmsDb != null) { + try { + cwmsDb.stop(); + } catch (Exception e) { + if (failure == null) { + failure = e; + } else { + failure.addSuppressed(e); + } + } finally { + cwmsDb = null; + } + } + + webUser = null; + + if (failure != null) { + throw failure; + } + } + private String loadResourceAsString(String fileName) { try { return IOUtils.toString( diff --git a/cwms-data-api/src/test/java/fixtures/KeyCloakExtension.java b/cwms-data-api/src/test/java/fixtures/KeyCloakExtension.java index 4949b27186..70a0bd2232 100644 --- a/cwms-data-api/src/test/java/fixtures/KeyCloakExtension.java +++ b/cwms-data-api/src/test/java/fixtures/KeyCloakExtension.java @@ -121,6 +121,16 @@ public static String getCodeUrl() { public static String getTokenUrl() { return tokenUrl; } + + public static void shutdown() { + if (kcc.isRunning()) { + kcc.stop(); + } + authUrl = null; + issuer = null; + codeUrl = null; + tokenUrl = null; + } /** * Retrieve the Access token for the user. diff --git a/cwms-data-api/src/test/java/fixtures/MinIOExtension.java b/cwms-data-api/src/test/java/fixtures/MinIOExtension.java index 15dce3f721..8eeacb4455 100644 --- a/cwms-data-api/src/test/java/fixtures/MinIOExtension.java +++ b/cwms-data-api/src/test/java/fixtures/MinIOExtension.java @@ -52,5 +52,11 @@ private static void createTestBucket() { } } + public static void shutdown() { + if (MINIO_CONTAINER.isRunning()) { + MINIO_CONTAINER.stop(); + } + } + } diff --git a/cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java b/cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java new file mode 100644 index 0000000000..1ae74aae4f --- /dev/null +++ b/cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java @@ -0,0 +1,705 @@ +package helpers; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.databind.ObjectMapper; +import fixtures.CwmsDataApiSetupCallback; +import fixtures.KeyCloakExtension; +import fixtures.MinIOExtension; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import mil.army.usace.hec.test.database.CwmsDatabaseContainer; +import org.jooq.impl.DSL; +import usace.cwms.db.jooq.codegen.packages.CWMS_TS_PACKAGE; + +public final class TimeSeriesReadBenchmark { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final JsonFactory JSON_FACTORY = new JsonFactory(); + private static final DateTimeFormatter REQUEST_TIME_FORMAT = + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZone(ZoneOffset.UTC); + private static final DateTimeFormatter ORACLE_DATE_TIME_FORMAT = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneOffset.UTC); + private static final String ACCEPT_JSON_V2 = "application/json;version=2"; + private static final String NON_VERSIONED_DATE_SQL = "date '1111-11-11'"; + + private TimeSeriesReadBenchmark() { + } + + public static void main(String[] args) throws Exception { + BenchmarkConfig config = BenchmarkConfig.fromSystemProperties(); + System.out.println("Starting benchmark fixtures..."); + + try { + new KeyCloakExtension().beforeAll(null); + new MinIOExtension().beforeAll(null); + new CwmsDataApiSetupCallback().beforeAll(null); + + System.out.println("Running benchmark..."); + BenchmarkReport report = runBenchmark(config); + + Files.createDirectories(config.resultsDir); + Path resultFile = config.resultsDir.resolve("timeseries-read-benchmark-" + + DateTimeFormatter.ofPattern("yyyyMMdd-HHmmss").withZone(ZoneOffset.UTC).format(Instant.now()) + + ".json"); + + OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValue(resultFile.toFile(), report); + OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValue(System.out, report); + System.out.println(); + System.out.println("Benchmark report written to " + resultFile); + + for (BenchmarkRun run : report.runs) { + if (run.httpCode != 200) { + throw new IllegalStateException( + "Benchmark completed with HTTP failures. Results saved to " + resultFile); + } + } + } finally { + System.out.println("Shutting down benchmark fixtures..."); + shutdownFixtures(); + } + } + + private static BenchmarkReport runBenchmark(BenchmarkConfig config) throws Exception { + Files.createDirectories(config.resultsDir); + Files.createDirectories(config.responsesDir); + + SeedInfo seed = ensureBenchmarkSeed(config); + if (seed.pointCount != config.pointCount) { + throw new IllegalStateException("Expected " + config.pointCount + " seeded points but found " + + seed.pointCount); + } + + waitForCdaReady(config); + if (config.warmup) { + Path warmupFile = config.responsesDir.resolve("warmup.json"); + executeRequest(config, warmupFile); + if (!config.keepResponses) { + Files.deleteIfExists(warmupFile); + } + } + + List runs = new ArrayList<>(); + for (int runIndex = 1; runIndex <= config.runs; runIndex++) { + runs.add(executeRun(config, runIndex)); + } + + return new BenchmarkReport( + "timeseries-read", + Instant.now().toString(), + resolveGitValue("git", "branch", "--show-current"), + resolveGitValue("git", "rev-parse", "HEAD"), + config.office, + config.locationId, + config.seriesId, + config.units, + config.startTime.toString(), + config.endTime.toString(), + config.pointCount, + config.pageSize, + config.requestUrl().toString(), + seed, + BenchmarkSummary.fromRuns(runs), + runs + ); + } + + private static SeedInfo ensureBenchmarkSeed(BenchmarkConfig config) throws SQLException { + long existingCount = getSeededPointCount(config); + if (config.skipSeed) { + return new SeedInfo(false, existingCount); + } + if (!config.forceReseed && existingCount == config.pointCount) { + return new SeedInfo(false, existingCount); + } + + CwmsDatabaseContainer database = CwmsDataApiSetupCallback.getDatabaseLink(); + database.connection(connection -> { + try { + ensureLocationExists(connection, config); + ensureTimeSeriesExists(connection, config); + CWMS_TS_PACKAGE.call_SET_TSID_VERSIONED( + DSL.using(connection).configuration(), config.seriesId, "F", config.office); + + long tsCode = findTsCode(connection, config.office, config.seriesId); + List segments = buildYearSegments(config.startTime, config.pointCount); + clearSeededRows(connection, tsCode, segments); + insertSeededRows(connection, tsCode, segments); + updateTsExtents(connection, tsCode); + if (!connection.getAutoCommit()) { + connection.commit(); + } + } catch (SQLException e) { + throw new RuntimeException("Unable to seed benchmark series " + config.seriesId, e); + } + }, "cwms_20"); + + return new SeedInfo(true, getSeededPointCount(config)); + } + + private static void ensureLocationExists(Connection connection, BenchmarkConfig config) throws SQLException { + String sql = "declare " + + "location_exists exception; " + + "pragma exception_init(location_exists, -20026); " + + "begin " + + "cwms_loc.create_location(?, ?, null, null, null, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); " + + "exception when location_exists then null; " + + "end;"; + try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.setString(1, config.locationId); + statement.setString(2, "SITE"); + statement.setDouble(3, 38.0d); + statement.setDouble(4, -90.0d); + statement.setString(5, "NAD83"); + statement.setString(6, config.locationId); + statement.setString(7, config.locationId + " Benchmark Location"); + statement.setString(8, "Performance benchmark location"); + statement.setString(9, "UTC"); + statement.setString(10, null); + statement.setString(11, null); + statement.setString(12, "T"); + statement.setString(13, config.office); + statement.execute(); + } + } + + private static void ensureTimeSeriesExists(Connection connection, BenchmarkConfig config) throws SQLException { + String sql = "declare " + + "ts_exists exception; " + + "pragma exception_init(ts_exists, -20003); " + + "begin " + + "cwms_ts.create_ts(?, ?, 0); " + + "exception when ts_exists then null; " + + "end;"; + try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.setString(1, config.office); + statement.setString(2, config.seriesId); + statement.execute(); + } + } + + private static long findTsCode(Connection connection, String office, String seriesId) throws SQLException { + try (PreparedStatement statement = connection.prepareStatement( + "select ts_code from at_cwms_ts_id where db_office_id = ? and cwms_ts_id = ?")) { + statement.setString(1, office); + statement.setString(2, seriesId); + try (ResultSet resultSet = statement.executeQuery()) { + if (!resultSet.next()) { + throw new IllegalStateException("Unable to find ts_code for " + seriesId); + } + return resultSet.getLong(1); + } + } + } + + private static void clearSeededRows(Connection connection, long tsCode, List segments) throws SQLException { + for (YearSegment segment : segments) { + try (PreparedStatement statement = connection.prepareStatement( + "delete from at_tsv_" + segment.year + " where ts_code = ?")) { + statement.setLong(1, tsCode); + statement.executeUpdate(); + } + } + try (PreparedStatement statement = connection.prepareStatement( + "delete from at_ts_extents where ts_code = ?")) { + statement.setLong(1, tsCode); + statement.executeUpdate(); + } + } + + private static void insertSeededRows(Connection connection, long tsCode, List segments) throws SQLException { + for (YearSegment segment : segments) { + String sql = "insert /*+ APPEND */ into at_tsv_" + segment.year + + " (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag) " + + "select ?, to_date(?, 'yyyy-mm-dd hh24:mi:ss') + numtodsinterval(level - 1, 'MINUTE'), " + + NON_VERSIONED_DATE_SQL + ", systimestamp, ? + level - 1, 0, 0 " + + "from dual connect by level <= ?"; + try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.setLong(1, tsCode); + statement.setString(2, ORACLE_DATE_TIME_FORMAT.format(segment.startTime)); + statement.setLong(3, segment.valueStart); + statement.setInt(4, segment.count); + statement.executeUpdate(); + } + } + } + + private static void updateTsExtents(Connection connection, long tsCode) throws SQLException { + try (PreparedStatement statement = connection.prepareStatement( + "begin cwms_ts.update_ts_extents(?, " + NON_VERSIONED_DATE_SQL + "); end;")) { + statement.setLong(1, tsCode); + statement.execute(); + } + } + + private static long getSeededPointCount(BenchmarkConfig config) throws SQLException { + CwmsDatabaseContainer database = CwmsDataApiSetupCallback.getDatabaseLink(); + return database.connection(connection -> { + try (PreparedStatement statement = connection.prepareStatement( + "select count(*) from av_tsv v " + + "join at_cwms_ts_id t on t.ts_code = v.ts_code " + + "where t.db_office_id = ? and t.cwms_ts_id = ?")) { + statement.setString(1, config.office); + statement.setString(2, config.seriesId); + try (ResultSet resultSet = statement.executeQuery()) { + resultSet.next(); + return resultSet.getLong(1); + } + } catch (SQLException e) { + throw new RuntimeException("Unable to count seeded rows for " + config.seriesId, e); + } + }, "cwms_20"); + } + + private static List buildYearSegments(Instant startTime, int pointCount) { + List segments = new ArrayList<>(); + Instant cursor = startTime; + int remaining = pointCount; + long valueStart = 1L; + while (remaining > 0) { + Instant nextYear = cursor.atOffset(ZoneOffset.UTC) + .withDayOfYear(1) + .withHour(0) + .withMinute(0) + .withSecond(0) + .withNano(0) + .plusYears(1) + .toInstant(); + long minutesUntilNextYear = Math.max(1L, Duration.between(cursor, nextYear).toMinutes()); + int segmentCount = (int) Math.min(remaining, minutesUntilNextYear); + segments.add(new YearSegment(cursor.atOffset(ZoneOffset.UTC).getYear(), cursor, segmentCount, valueStart)); + cursor = cursor.plusSeconds(segmentCount * 60L); + valueStart += segmentCount; + remaining -= segmentCount; + } + return segments; + } + + private static void waitForCdaReady(BenchmarkConfig config) throws Exception { + HttpClient client = HttpClient.newHttpClient(); + URI readinessUri = URI.create(config.resolvedBaseUrl() + "/offices/" + urlEncode(config.office)); + for (int attempt = 0; attempt < 30; attempt++) { + HttpRequest request = HttpRequest.newBuilder(readinessUri) + .header("Accept", ACCEPT_JSON_V2) + .GET() + .build(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofInputStream()); + try (InputStream ignored = response.body()) { + if (response.statusCode() == 200) { + return; + } + } + Thread.sleep(1000L); + } + throw new IllegalStateException("CDA did not become ready at " + readinessUri); + } + + private static BenchmarkRun executeRun(BenchmarkConfig config, int runIndex) throws Exception { + Path responseFile = config.responsesDir.resolve("timeseries-read-run-" + runIndex + ".json"); + RequestResult requestResult = executeRequest(config, responseFile); + ResponseSummary responseSummary = summarizeResponse(responseFile); + String responseFileValue = responseFile.toAbsolutePath().toString(); + if (!config.keepResponses && requestResult.httpCode == 200) { + Files.deleteIfExists(responseFile); + responseFileValue = null; + } + return new BenchmarkRun( + runIndex, + requestResult.httpCode, + roundSeconds(requestResult.timeTotalNanos), + responseSummary.responseBytes, + responseSummary.reportedTotal, + responseSummary.reportedPageSize, + responseSummary.firstTimestamp, + responseSummary.lastTimestamp, + requestResult.httpCode == 200 ? null : Files.readString(responseFile), + responseFileValue + ); + } + + private static RequestResult executeRequest(BenchmarkConfig config, Path responseFile) throws Exception { + HttpClient client = HttpClient.newHttpClient(); + HttpRequest request = HttpRequest.newBuilder(config.requestUrl()) + .header("Accept", ACCEPT_JSON_V2) + .GET() + .build(); + long startNanos = System.nanoTime(); + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofFile(responseFile)); + long endNanos = System.nanoTime(); + return new RequestResult(response.statusCode(), endNanos - startNanos); + } + + private static ResponseSummary summarizeResponse(Path responseFile) throws IOException { + Integer reportedTotal = null; + Integer reportedPageSize = null; + Long firstTimestamp = null; + Long lastTimestamp = null; + + try (InputStream inputStream = Files.newInputStream(responseFile); + JsonParser parser = JSON_FACTORY.createParser(inputStream)) { + while (parser.nextToken() != null) { + if (parser.currentToken() != JsonToken.FIELD_NAME) { + continue; + } + String fieldName = parser.currentName(); + JsonToken valueToken = parser.nextToken(); + if ("total".equals(fieldName) && valueToken != JsonToken.VALUE_NULL) { + reportedTotal = parser.getIntValue(); + } else if ("page-size".equals(fieldName) && valueToken != JsonToken.VALUE_NULL) { + reportedPageSize = parser.getIntValue(); + } else if ("values".equals(fieldName) && valueToken == JsonToken.START_ARRAY) { + while (parser.nextToken() != JsonToken.END_ARRAY) { + if (parser.currentToken() != JsonToken.START_ARRAY) { + parser.skipChildren(); + continue; + } + parser.nextToken(); + long timestamp = parser.getLongValue(); + if (firstTimestamp == null) { + firstTimestamp = timestamp; + } + lastTimestamp = timestamp; + while (parser.nextToken() != JsonToken.END_ARRAY) { + parser.skipChildren(); + } + } + } else { + parser.skipChildren(); + } + } + } + + return new ResponseSummary( + Files.size(responseFile), + reportedTotal, + reportedPageSize, + firstTimestamp, + lastTimestamp + ); + } + + private static String resolveGitValue(String... command) { + ProcessBuilder processBuilder = new ProcessBuilder(command); + processBuilder.redirectErrorStream(true); + try { + Process process = processBuilder.start(); + byte[] outputBytes = process.getInputStream().readAllBytes(); + int exitCode = process.waitFor(); + if (exitCode != 0) { + return null; + } + String value = new String(outputBytes, StandardCharsets.UTF_8).trim(); + return value.isEmpty() ? null : value; + } catch (IOException e) { + return null; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return null; + } + } + + private static double roundSeconds(long nanos) { + return Math.round((nanos / 1_000_000_000.0d) * 1_000_000.0d) / 1_000_000.0d; + } + + private static String urlEncode(String value) { + return URLEncoder.encode(value, StandardCharsets.UTF_8); + } + + private static void shutdownFixtures() throws Exception { + Exception failure = null; + try { + CwmsDataApiSetupCallback.shutdown(); + } catch (Exception e) { + failure = e; + } + + try { + MinIOExtension.shutdown(); + } catch (Exception e) { + if (failure == null) { + failure = e; + } else { + failure.addSuppressed(e); + } + } + + try { + KeyCloakExtension.shutdown(); + } catch (Exception e) { + if (failure == null) { + failure = e; + } else { + failure.addSuppressed(e); + } + } + + if (failure != null) { + throw failure; + } + } + + private static final class BenchmarkConfig { + private final String office; + private final String locationId; + private final String seriesId; + private final String units; + private final String baseUrl; + private final Instant startTime; + private final Instant endTime; + private final int pointCount; + private final int pageSize; + private final int runs; + private final boolean warmup; + private final boolean skipSeed; + private final boolean forceReseed; + private final boolean keepResponses; + private final Path resultsDir; + private final Path responsesDir; + + private BenchmarkConfig(String office, String locationId, String seriesId, String units, String baseUrl, + Instant startTime, int pointCount, int pageSize, int runs, boolean warmup, + boolean skipSeed, boolean forceReseed, boolean keepResponses, Path resultsDir, + Path responsesDir) { + this.office = office; + this.locationId = locationId; + this.seriesId = seriesId; + this.units = units; + this.baseUrl = baseUrl; + this.startTime = startTime; + this.endTime = startTime.plusSeconds(Math.max(0L, pointCount - 1L) * 60L); + this.pointCount = pointCount; + this.pageSize = pageSize; + this.runs = runs; + this.warmup = warmup; + this.skipSeed = skipSeed; + this.forceReseed = forceReseed; + this.keepResponses = keepResponses; + this.resultsDir = resultsDir; + this.responsesDir = responsesDir; + } + + private static BenchmarkConfig fromSystemProperties() { + String office = System.getProperty("benchmark.office", "SPK"); + String locationId = System.getProperty("benchmark.locationId", "PERF1MREAD"); + String seriesId = System.getProperty("benchmark.seriesId", "PERF1MREAD.Stage.Inst.1Minute.0.BENCH"); + String units = System.getProperty("benchmark.units", "ft"); + String baseUrl = System.getProperty("benchmark.baseUrl"); + Instant startTime = Instant.parse(System.getProperty("benchmark.startTime", "2024-01-01T00:00:00Z")); + int pointCount = Integer.parseInt(System.getProperty("benchmark.pointCount", "1000000")); + int pageSize = Integer.parseInt(System.getProperty("benchmark.pageSize", String.valueOf(pointCount))); + int runs = Integer.parseInt(System.getProperty("benchmark.runs", "1")); + boolean warmup = Boolean.parseBoolean(System.getProperty("benchmark.warmup", "false")); + boolean skipSeed = Boolean.parseBoolean(System.getProperty("benchmark.skipSeed", "false")); + boolean forceReseed = Boolean.parseBoolean(System.getProperty("benchmark.forceReseed", "false")); + boolean keepResponses = Boolean.parseBoolean(System.getProperty("benchmark.keepResponses", "false")); + Path resultsDir = Paths.get(System.getProperty("benchmark.resultsDir", + "..\\load_data\\performance\\results")).normalize().toAbsolutePath(); + Path responsesDir = Paths.get(System.getProperty("benchmark.responsesDir", + "..\\load_data\\performance\\responses")).normalize().toAbsolutePath(); + return new BenchmarkConfig(office, locationId, seriesId, units, baseUrl, startTime, pointCount, + pageSize, runs, warmup, skipSeed, forceReseed, keepResponses, resultsDir, responsesDir); + } + + private URI requestUrl() { + StringBuilder builder = new StringBuilder(resolvedBaseUrl()); + builder.append("/timeseries?office=").append(urlEncode(office)); + builder.append("&name=").append(urlEncode(seriesId)); + builder.append("&units=").append(urlEncode(units)); + builder.append("&begin=").append(urlEncode(REQUEST_TIME_FORMAT.format(startTime))); + builder.append("&end=").append(urlEncode(REQUEST_TIME_FORMAT.format(endTime))); + builder.append("&page-size=").append(pageSize); + return URI.create(builder.toString()); + } + + private String resolvedBaseUrl() { + if (baseUrl != null && !baseUrl.isBlank()) { + return baseUrl; + } + return CwmsDataApiSetupCallback.httpUrl() + ":" + CwmsDataApiSetupCallback.httpPort() + + System.getProperty("warContext"); + } + } + + private static final class YearSegment { + private final int year; + private final Instant startTime; + private final int count; + private final long valueStart; + + private YearSegment(int year, Instant startTime, int count, long valueStart) { + this.year = year; + this.startTime = startTime; + this.count = count; + this.valueStart = valueStart; + } + } + + private static final class RequestResult { + private final int httpCode; + private final long timeTotalNanos; + + private RequestResult(int httpCode, long timeTotalNanos) { + this.httpCode = httpCode; + this.timeTotalNanos = timeTotalNanos; + } + } + + private static final class ResponseSummary { + private final long responseBytes; + private final Integer reportedTotal; + private final Integer reportedPageSize; + private final Long firstTimestamp; + private final Long lastTimestamp; + + private ResponseSummary(long responseBytes, Integer reportedTotal, Integer reportedPageSize, + Long firstTimestamp, Long lastTimestamp) { + this.responseBytes = responseBytes; + this.reportedTotal = reportedTotal; + this.reportedPageSize = reportedPageSize; + this.firstTimestamp = firstTimestamp; + this.lastTimestamp = lastTimestamp; + } + } + + public static final class SeedInfo { + public final boolean seeded; + public final long pointCount; + + private SeedInfo(boolean seeded, long pointCount) { + this.seeded = seeded; + this.pointCount = pointCount; + } + } + + public static final class BenchmarkSummary { + public final int successfulRuns; + public final Double averageTimeTotalSeconds; + public final Double minTimeTotalSeconds; + public final Double maxTimeTotalSeconds; + + private BenchmarkSummary(int successfulRuns, Double averageTimeTotalSeconds, + Double minTimeTotalSeconds, Double maxTimeTotalSeconds) { + this.successfulRuns = successfulRuns; + this.averageTimeTotalSeconds = averageTimeTotalSeconds; + this.minTimeTotalSeconds = minTimeTotalSeconds; + this.maxTimeTotalSeconds = maxTimeTotalSeconds; + } + + private static BenchmarkSummary fromRuns(List runs) { + List successfulRuns = new ArrayList<>(); + for (BenchmarkRun run : runs) { + if (run.httpCode == 200) { + successfulRuns.add(run); + } + } + if (successfulRuns.isEmpty()) { + return new BenchmarkSummary(0, null, null, null); + } + + double total = 0.0d; + double min = Double.MAX_VALUE; + double max = Double.MIN_VALUE; + for (BenchmarkRun run : successfulRuns) { + total += run.timeTotalSeconds; + min = Math.min(min, run.timeTotalSeconds); + max = Math.max(max, run.timeTotalSeconds); + } + return new BenchmarkSummary( + successfulRuns.size(), + Math.round((total / successfulRuns.size()) * 1_000_000.0d) / 1_000_000.0d, + Math.round(min * 1_000_000.0d) / 1_000_000.0d, + Math.round(max * 1_000_000.0d) / 1_000_000.0d + ); + } + } + + public static final class BenchmarkRun { + public final int run; + public final int httpCode; + public final double timeTotalSeconds; + public final long responseBytesOnDisk; + public final Integer reportedTotal; + public final Integer reportedPageSize; + public final Long firstTimestamp; + public final Long lastTimestamp; + public final String errorBody; + public final String responseFile; + + private BenchmarkRun(int run, int httpCode, double timeTotalSeconds, long responseBytesOnDisk, + Integer reportedTotal, Integer reportedPageSize, Long firstTimestamp, + Long lastTimestamp, String errorBody, String responseFile) { + this.run = run; + this.httpCode = httpCode; + this.timeTotalSeconds = timeTotalSeconds; + this.responseBytesOnDisk = responseBytesOnDisk; + this.reportedTotal = reportedTotal; + this.reportedPageSize = reportedPageSize; + this.firstTimestamp = firstTimestamp; + this.lastTimestamp = lastTimestamp; + this.errorBody = errorBody; + this.responseFile = responseFile; + } + } + + public static final class BenchmarkReport { + public final String benchmark; + public final String generatedAt; + public final String gitBranch; + public final String gitCommit; + public final String office; + public final String locationId; + public final String seriesId; + public final String units; + public final String startTimeUtc; + public final String endTimeUtc; + public final int pointCount; + public final int pageSize; + public final String requestUrl; + public final SeedInfo seed; + public final BenchmarkSummary summary; + public final List runs; + + private BenchmarkReport(String benchmark, String generatedAt, String gitBranch, String gitCommit, + String office, String locationId, String seriesId, String units, + String startTimeUtc, String endTimeUtc, int pointCount, int pageSize, + String requestUrl, SeedInfo seed, BenchmarkSummary summary, + List runs) { + this.benchmark = benchmark; + this.generatedAt = generatedAt; + this.gitBranch = gitBranch; + this.gitCommit = gitCommit; + this.office = office; + this.locationId = locationId; + this.seriesId = seriesId; + this.units = units; + this.startTimeUtc = startTimeUtc; + this.endTimeUtc = endTimeUtc; + this.pointCount = pointCount; + this.pageSize = pageSize; + this.requestUrl = requestUrl; + this.seed = seed; + this.summary = summary; + this.runs = runs; + } + } +} diff --git a/load_data/performance/invoke-timeseries-parity-check.ps1 b/load_data/performance/invoke-timeseries-parity-check.ps1 deleted file mode 100644 index 6191e56c41..0000000000 --- a/load_data/performance/invoke-timeseries-parity-check.ps1 +++ /dev/null @@ -1,751 +0,0 @@ -[CmdletBinding()] -param( - [string]$Office = "SPK", - [string]$CdaBaseUrl = "http://localhost:8081/cwms-data", - [string]$DbContainer = "cwms-data-api-db-1", - [string]$DbUser = "CWMS_20", - [string]$DbPassword = "simplecwmspasswD1", - [string]$DbService = "localhost:1521/FREEPDB1", - [string[]]$Scenarios = @( - "dense-regular", - "dense-regular-entry-date", - "gap-regular", - "versioned-max", - "versioned-single", - "irregular" - ), - [switch]$KeepResponses -) - -Set-StrictMode -Version Latest -$ErrorActionPreference = "Stop" - -$SqlPlusPath = "/opt/oracle/product/23ai/dbhomeFree/bin/sqlplus" -$ResultsDir = Join-Path $PSScriptRoot "results" -$ResponsesDir = Join-Path $PSScriptRoot "responses" -$NonVersionedDateSql = "date '1111-11-11'" -$FloatTolerance = 1e-9 - -function Convert-ToSqlStringLiteral { - param([string]$Value) - return "'" + $Value.Replace("'", "''") + "'" -} - -function Convert-ToOracleDateExpression { - param([datetimeoffset]$Value) - $utc = $Value.ToUniversalTime().ToString("yyyy-MM-dd HH:mm:ss") - return "to_date('$utc', 'yyyy-mm-dd hh24:mi:ss')" -} - -function Convert-ToOracleTimestampExpression { - param([datetimeoffset]$Value) - $utc = $Value.ToUniversalTime().ToString("yyyy-MM-dd HH:mm:ss") - return "to_timestamp('$utc', 'yyyy-mm-dd hh24:mi:ss')" -} - -function Invoke-OracleSql { - param( - [string]$Sql, - [string]$Label = "oracle" - ) - - $sqlFile = Join-Path $env:TEMP ("cwms-parity-{0}-{1}.sql" -f $Label, [guid]::NewGuid().ToString("N")) - try { - Set-Content -LiteralPath $sqlFile -Value $Sql -Encoding ASCII - - $containerSqlFile = "/tmp/" + [System.IO.Path]::GetFileName($sqlFile) - $null = & docker cp $sqlFile "${DbContainer}:${containerSqlFile}" - if ($LASTEXITCODE -ne 0) { - throw "Failed to copy SQL to container $DbContainer" - } - - $command = "$SqlPlusPath -s -L $DbUser/$DbPassword@$DbService @$containerSqlFile" - $output = & docker exec $DbContainer bash -lc $command 2>&1 - if ($LASTEXITCODE -ne 0) { - throw ("Oracle SQL failed for {0}:`n{1}" -f $Label, ($output -join [Environment]::NewLine)) - } - - return ($output -join [Environment]::NewLine) - } - finally { - if (Test-Path -LiteralPath $sqlFile) { - Remove-Item -LiteralPath $sqlFile -Force - } - } -} - -function Invoke-CdaRequest { - param( - [string]$Url, - [string]$ResponseFile - ) - - $format = '{"http_code":%{http_code},"time_total":%{time_total},"time_starttransfer":%{time_starttransfer},"time_connect":%{time_connect},"size_download":%{size_download},"speed_download":%{speed_download}}' - $json = & curl.exe -sS -H "Accept: application/json;version=2" -o $ResponseFile -w $format $Url 2>&1 - if ($LASTEXITCODE -ne 0) { - throw ("curl failed: {0}" -f ($json -join [Environment]::NewLine)) - } - - return ($json | ConvertFrom-Json) -} - -function Wait-ForCdaReady { - param( - [string]$Url, - [int]$MaxAttempts = 30, - [int]$DelaySeconds = 1 - ) - - $probeFile = Join-Path $ResponsesDir "parity-readiness-probe.json" - try { - for ($attempt = 1; $attempt -le $MaxAttempts; $attempt++) { - if (Test-Path -LiteralPath $probeFile) { - Remove-Item -LiteralPath $probeFile -Force - } - - $response = Invoke-CdaRequest -Url $Url -ResponseFile $probeFile - if ($response.http_code -eq 200) { - return - } - - Start-Sleep -Seconds $DelaySeconds - } - } - finally { - if (Test-Path -LiteralPath $probeFile) { - Remove-Item -LiteralPath $probeFile -Force - } - } - - throw "CDA did not become ready after $MaxAttempts attempts: $Url" -} - -function New-SeedRow { - param( - [datetimeoffset]$DateTime, - [double]$Value, - [int]$QualityCode = 0, - [datetimeoffset]$DataEntryDate, - [Nullable[datetimeoffset]]$VersionDate = $null - ) - - return [pscustomobject]@{ - DateTime = $DateTime.ToUniversalTime() - Value = $Value - QualityCode = $QualityCode - DataEntryDate = $DataEntryDate.ToUniversalTime() - VersionDate = $VersionDate - } -} - -function New-Scenario { - param( - [string]$Name, - [string]$LocationId, - [string]$SeriesId, - [string]$Units, - [datetimeoffset]$BeginTime, - [datetimeoffset]$EndTime, - [object[]]$Rows, - [bool]$Versioned, - [bool]$IncludeEntryDate, - [string]$ExpectedDateVersionType, - [long]$ExpectedIntervalOffset, - [string]$ExpectedInterval, - [Nullable[datetimeoffset]]$VersionDate = $null - ) - - return [pscustomobject]@{ - Name = $Name - LocationId = $LocationId - SeriesId = $SeriesId - Units = $Units - BeginTime = $BeginTime.ToUniversalTime() - EndTime = $EndTime.ToUniversalTime() - Rows = $Rows - Versioned = $Versioned - IncludeEntryDate = $IncludeEntryDate - ExpectedDateVersionType = $ExpectedDateVersionType - ExpectedIntervalOffset = $ExpectedIntervalOffset - ExpectedInterval = $ExpectedInterval - VersionDate = $VersionDate - } -} - -function Get-ScenarioDefinitions { - $denseRows = @( - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-01-02T00:00:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:01:00Z") -Value 2 -DataEntryDate ([datetimeoffset]"2024-01-02T00:01:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:02:00Z") -Value 3 -DataEntryDate ([datetimeoffset]"2024-01-02T00:02:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:03:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-01-02T00:03:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:04:00Z") -Value 5 -DataEntryDate ([datetimeoffset]"2024-01-02T00:04:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Value 6 -DataEntryDate ([datetimeoffset]"2024-01-02T00:05:00Z")) - ) - - $gapRows = @( - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-01-03T00:00:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:01:00Z") -Value 2 -DataEntryDate ([datetimeoffset]"2024-01-03T00:01:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:02:00Z") -Value 3 -DataEntryDate ([datetimeoffset]"2024-01-03T00:02:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Value 6 -DataEntryDate ([datetimeoffset]"2024-01-03T00:05:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:06:00Z") -Value 7 -DataEntryDate ([datetimeoffset]"2024-01-03T00:06:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:07:00Z") -Value 8 -DataEntryDate ([datetimeoffset]"2024-01-03T00:07:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:08:00Z") -Value 9 -DataEntryDate ([datetimeoffset]"2024-01-03T00:08:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-01T00:09:00Z") -Value 10 -DataEntryDate ([datetimeoffset]"2024-01-03T00:09:00Z")) - ) - - $versionDateOlder = [datetimeoffset]"2024-06-20T08:00:00Z" - $versionDateNewer = [datetimeoffset]"2024-06-21T08:00:00Z" - $versionedRows = @( - (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T15:00:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-06-20T09:00:00Z") -VersionDate $versionDateOlder), - (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T16:00:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-06-20T09:01:00Z") -VersionDate $versionDateOlder), - (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T17:00:00Z") -Value 4 -DataEntryDate ([datetimeoffset]"2024-06-20T09:02:00Z") -VersionDate $versionDateOlder), - (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T18:00:00Z") -Value 3 -DataEntryDate ([datetimeoffset]"2024-06-20T09:03:00Z") -VersionDate $versionDateOlder), - (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T15:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-06-21T09:00:00Z") -VersionDate $versionDateNewer), - (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T16:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-06-21T09:01:00Z") -VersionDate $versionDateNewer), - (New-SeedRow -DateTime ([datetimeoffset]"2024-05-01T17:00:00Z") -Value 1 -DataEntryDate ([datetimeoffset]"2024-06-21T09:02:00Z") -VersionDate $versionDateNewer) - ) - - $irregularRows = @( - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:00:00Z") -Value 10 -DataEntryDate ([datetimeoffset]"2024-01-06T00:00:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:07:20Z") -Value 20 -DataEntryDate ([datetimeoffset]"2024-01-06T00:01:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:19:45Z") -Value 30 -DataEntryDate ([datetimeoffset]"2024-01-06T00:02:00Z")), - (New-SeedRow -DateTime ([datetimeoffset]"2024-01-05T12:33:10Z") -Value 40 -DataEntryDate ([datetimeoffset]"2024-01-06T00:03:00Z")) - ) - - return @( - (New-Scenario -Name "dense-regular" -LocationId "PARREG" -SeriesId "PARREG.Stage.Inst.1Minute.0.BENCH" -Units "ft" -BeginTime ([datetimeoffset]"2024-01-01T00:00:00Z") -EndTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Rows $denseRows -Versioned $false -IncludeEntryDate $false -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1M"), - (New-Scenario -Name "dense-regular-entry-date" -LocationId "PARREG" -SeriesId "PARREG.Stage.Inst.1Minute.0.BENCH" -Units "ft" -BeginTime ([datetimeoffset]"2024-01-01T00:00:00Z") -EndTime ([datetimeoffset]"2024-01-01T00:05:00Z") -Rows $denseRows -Versioned $false -IncludeEntryDate $true -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1M"), - (New-Scenario -Name "gap-regular" -LocationId "PARGAP" -SeriesId "PARGAP.Stage.Inst.1Minute.0.BENCH" -Units "ft" -BeginTime ([datetimeoffset]"2024-01-01T00:00:00Z") -EndTime ([datetimeoffset]"2024-01-01T00:09:00Z") -Rows $gapRows -Versioned $false -IncludeEntryDate $false -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1M"), - (New-Scenario -Name "versioned-max" -LocationId "PARVER" -SeriesId "PARVER.Flow.Inst.1Hour.0.BENCH" -Units "cfs" -BeginTime ([datetimeoffset]"2024-05-01T15:00:00Z") -EndTime ([datetimeoffset]"2024-05-01T18:00:00Z") -Rows $versionedRows -Versioned $true -IncludeEntryDate $false -ExpectedDateVersionType "MAX_AGGREGATE" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1H"), - (New-Scenario -Name "versioned-single" -LocationId "PARVER" -SeriesId "PARVER.Flow.Inst.1Hour.0.BENCH" -Units "cfs" -BeginTime ([datetimeoffset]"2024-05-01T15:00:00Z") -EndTime ([datetimeoffset]"2024-05-01T18:00:00Z") -Rows $versionedRows -Versioned $true -IncludeEntryDate $false -ExpectedDateVersionType "SINGLE_VERSION" -ExpectedIntervalOffset 0 -ExpectedInterval "PT1H" -VersionDate $versionDateNewer), - (New-Scenario -Name "irregular" -LocationId "PARIRR" -SeriesId "PARIRR.Flow.Inst.0.0.BENCH" -Units "cfs" -BeginTime ([datetimeoffset]"2024-01-05T12:00:00Z") -EndTime ([datetimeoffset]"2024-01-05T12:33:10Z") -Rows $irregularRows -Versioned $false -IncludeEntryDate $false -ExpectedDateVersionType "UNVERSIONED" -ExpectedIntervalOffset (-2147483648L) -ExpectedInterval "PT0S") - ) -} - -function Convert-SeedValueToSqlLiteral { - param([double]$Value) - return ([System.Globalization.CultureInfo]::InvariantCulture.TextInfo.ToLower($Value.ToString("0.################", [System.Globalization.CultureInfo]::InvariantCulture))) -} - -function Get-SeedSql { - param($Scenario) - - $seriesLiteral = Convert-ToSqlStringLiteral $Scenario.SeriesId - $locationLiteral = Convert-ToSqlStringLiteral $Scenario.LocationId - $officeLiteral = Convert-ToSqlStringLiteral $Office - $locationTypeLiteral = Convert-ToSqlStringLiteral "SITE" - $publicNameLiteral = Convert-ToSqlStringLiteral $Scenario.LocationId - $longNameLiteral = Convert-ToSqlStringLiteral "$($Scenario.LocationId) Parity Location" - $descriptionLiteral = Convert-ToSqlStringLiteral "Parity harness location" - $timeZoneLiteral = Convert-ToSqlStringLiteral "UTC" - $horizontalDatumLiteral = Convert-ToSqlStringLiteral "NAD83" - $versionedFlagLiteral = if ($Scenario.Versioned) { "'T'" } else { "'F'" } - - $groupedRows = $Scenario.Rows | Group-Object { $_.DateTime.Year } - $insertStatements = foreach ($group in $groupedRows) { - $intoStatements = foreach ($row in $group.Group) { - $dateExpr = Convert-ToOracleDateExpression $row.DateTime - $versionExpr = if ($null -ne $row.VersionDate) { - Convert-ToOracleDateExpression $row.VersionDate - } else { - $NonVersionedDateSql - } - $entryExpr = Convert-ToOracleTimestampExpression $row.DataEntryDate - $valueExpr = Convert-SeedValueToSqlLiteral $row.Value - " into at_tsv_$($group.Name) (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag) values (l_ts_code, $dateExpr, $versionExpr, $entryExpr, $valueExpr, $($row.QualityCode), 0)" - } - - @" -insert all -$($intoStatements -join [Environment]::NewLine) -select 1 from dual; -"@ - } - - $distinctVersionDates = @($Scenario.Rows | - ForEach-Object { $_.VersionDate } | - Where-Object { $null -ne $_ } | - Sort-Object | - Get-Unique) - - $extentStatements = if ($distinctVersionDates.Count -gt 0) { - foreach ($versionDate in $distinctVersionDates) { - " cwms_ts.update_ts_extents(l_ts_code, $(Convert-ToOracleDateExpression $versionDate));" - } - } else { - " cwms_ts.update_ts_extents(l_ts_code, $NonVersionedDateSql);" - } - - return @" -set serveroutput on feedback on -whenever sqlerror exit failure rollback -declare - location_exists exception; - pragma exception_init(location_exists, -20026); - ts_exists exception; - pragma exception_init(ts_exists, -20003); - l_ts_code number; -begin - begin - cwms_loc.create_location( - p_location_id => $locationLiteral, - p_location_type => $locationTypeLiteral, - p_elevation => null, - p_elev_unit_id => null, - p_vertical_datum => null, - p_latitude => 38.0, - p_longitude => -90.0, - p_horizontal_datum => $horizontalDatumLiteral, - p_public_name => $publicNameLiteral, - p_long_name => $longNameLiteral, - p_description => $descriptionLiteral, - p_time_zone_id => $timeZoneLiteral, - p_county_name => null, - p_state_initial => null, - p_active => 'T', - p_db_office_id => $officeLiteral - ); - exception - when location_exists then null; - end; - - begin - cwms_ts.create_ts($officeLiteral, $seriesLiteral, 0); - exception - when ts_exists then null; - end; - - cwms_ts.set_tsid_versioned($seriesLiteral, $versionedFlagLiteral, $officeLiteral); - - select ts_code - into l_ts_code - from at_cwms_ts_id - where db_office_id = $officeLiteral - and cwms_ts_id = $seriesLiteral; - - for rec in (select table_name from at_ts_table_properties) loop - execute immediate 'delete from ' || rec.table_name || ' where ts_code = :1' using l_ts_code; - end loop; - - delete from at_ts_extents where ts_code = l_ts_code; - -$($insertStatements -join [Environment]::NewLine) - -$($extentStatements -join [Environment]::NewLine) - commit; -end; -/ -exit; -"@ -} - -function Convert-CdaResponseToRows { - param( - [object]$Payload, - [bool]$IncludeEntryDate - ) - - $rows = @() - foreach ($entry in $Payload.values) { - $row = [ordered]@{ - date_time = [long]$entry[0] - value = if ($null -eq $entry[1]) { $null } else { [double]$entry[1] } - quality_code = [int]$entry[2] - } - if ($IncludeEntryDate) { - $row.data_entry_date = if ($entry.Count -gt 3 -and $null -ne $entry[3]) { - [long]$entry[3] - } else { - $null - } - } - $rows += [pscustomobject]$row - } - - return @($rows | Sort-Object date_time) -} - -function Get-CdaScenarioResult { - param($Scenario) - - $responseFile = Join-Path $ResponsesDir ("parity-{0}-cda.json" -f $Scenario.Name) - $escapedOffice = [uri]::EscapeDataString($Office) - $escapedSeriesId = [uri]::EscapeDataString($Scenario.SeriesId) - $escapedUnits = [uri]::EscapeDataString($Scenario.Units) - $escapedBegin = [uri]::EscapeDataString($Scenario.BeginTime.ToString("yyyy-MM-ddTHH:mm:ssZ")) - $escapedEnd = [uri]::EscapeDataString($Scenario.EndTime.ToString("yyyy-MM-ddTHH:mm:ssZ")) - $requestUrl = "{0}/timeseries?office={1}&name={2}&units={3}&begin={4}&end={5}&page-size=1000" -f ` - $CdaBaseUrl.TrimEnd("/"), ` - $escapedOffice, ` - $escapedSeriesId, ` - $escapedUnits, ` - $escapedBegin, ` - $escapedEnd - - if ($Scenario.IncludeEntryDate) { - $requestUrl += "&include-entry-date=true" - } - - if ($null -ne $Scenario.VersionDate) { - $escapedVersionDate = [uri]::EscapeDataString($Scenario.VersionDate.ToString("yyyy-MM-ddTHH:mm:ssZ")) - $requestUrl += "&version-date=$escapedVersionDate" - } - - $curlMetrics = Invoke-CdaRequest -Url $requestUrl -ResponseFile $responseFile - $payload = Get-Content -LiteralPath $responseFile -Raw | ConvertFrom-Json - $rows = Convert-CdaResponseToRows -Payload $payload -IncludeEntryDate $Scenario.IncludeEntryDate - - if (-not $KeepResponses -and (Test-Path -LiteralPath $responseFile)) { - Remove-Item -LiteralPath $responseFile -Force - $responseFile = $null - } - - return [pscustomobject]@{ - RequestUrl = $requestUrl - HttpCode = [int]$curlMetrics.http_code - TimeTotalSeconds = [double]$curlMetrics.time_total - Payload = $payload - Rows = $rows - ResponseFile = $responseFile - } -} - -function Get-OracleRowsSql { - param($Scenario) - - $seriesLiteral = Convert-ToSqlStringLiteral $Scenario.SeriesId - $unitsLiteral = Convert-ToSqlStringLiteral $Scenario.Units - $officeLiteral = Convert-ToSqlStringLiteral $Office - $beginExpr = Convert-ToOracleDateExpression $Scenario.BeginTime - $endExpr = Convert-ToOracleDateExpression $Scenario.EndTime - $versionDateExpr = if ($null -ne $Scenario.VersionDate) { - Convert-ToOracleDateExpression $Scenario.VersionDate - } else { - "null" - } - $maxVersionLiteral = if ($null -ne $Scenario.VersionDate) { "'F'" } else { "'T'" } - $retrieveFunction = if ($Scenario.IncludeEntryDate) { - "cwms_20.cwms_ts.retrieve_ts_entry_out_tab" - } else { - "cwms_20.cwms_ts.retrieve_ts_out_tab" - } - - $rowProjection = if ($Scenario.IncludeEntryDate) { - @" -json_object( - 'date_time' value round((date_time - date '1970-01-01') * 86400000), - 'value' value value, - 'quality_code' value quality_code, - 'data_entry_date' value case - when data_entry_date is null then null - else round((cast(data_entry_date as date) - date '1970-01-01') * 86400000) - end null on null -) -"@ - } else { - @" -json_object( - 'date_time' value round((date_time - date '1970-01-01') * 86400000), - 'value' value value, - 'quality_code' value quality_code -) -"@ - } - - return @" -set heading off feedback off verify off pagesize 0 linesize 32767 long 1000000 longchunksize 1000000 trimspool on -with oracle_rows as ( - select * - from table($retrieveFunction( - $seriesLiteral, - $unitsLiteral, - $beginExpr, - $endExpr, - 'UTC', - 'T', - 'T', - 'T', - 'F', - 'F', - $versionDateExpr, - $maxVersionLiteral, - $officeLiteral - )) -) -select json_object( - 'row_count' value (select count(*) from oracle_rows), - 'rows' value nvl( - ( - select json_arrayagg( - $rowProjection - returning clob - ) - from ( - select * - from oracle_rows - order by date_time - ) - ), - '[]' - ) format json - returning clob -) -from dual; -exit; -"@ -} - -function Get-OracleScenarioResult { - param($Scenario) - - $responseFile = Join-Path $ResponsesDir ("parity-{0}-oracle.json" -f $Scenario.Name) - $raw = Invoke-OracleSql -Sql (Get-OracleRowsSql -Scenario $Scenario) -Label ("oracle-{0}" -f $Scenario.Name) - $json = (($raw -split "\r?\n") | ForEach-Object { $_.Trim() } | Where-Object { $_ }) -join "" - Set-Content -LiteralPath $responseFile -Value $json -Encoding ASCII - $payload = $json | ConvertFrom-Json - $rows = @() - foreach ($entry in $payload.rows) { - $row = [ordered]@{ - date_time = [long]$entry.date_time - value = if ($null -eq $entry.value) { $null } else { [double]$entry.value } - quality_code = [int]$entry.quality_code - } - if ($Scenario.IncludeEntryDate) { - $row.data_entry_date = if ($null -ne $entry.PSObject.Properties["data_entry_date"] -and $null -ne $entry.data_entry_date) { - [long]$entry.data_entry_date - } else { - $null - } - } - $rows += [pscustomobject]$row - } - - if (-not $KeepResponses -and (Test-Path -LiteralPath $responseFile)) { - Remove-Item -LiteralPath $responseFile -Force - $responseFile = $null - } - - return [pscustomobject]@{ - Payload = $payload - Rows = @($rows | Sort-Object date_time) - ResponseFile = $responseFile - } -} - -function Test-RowEquality { - param( - $Expected, - $Actual, - [bool]$IncludeEntryDate - ) - - if ($Expected.date_time -ne $Actual.date_time) { - return $false - } - - if ($Expected.quality_code -ne $Actual.quality_code) { - return $false - } - - if ($null -eq $Expected.value -and $null -ne $Actual.value) { - return $false - } - - if ($null -ne $Expected.value -and $null -eq $Actual.value) { - return $false - } - - if ($null -ne $Expected.value -and $null -ne $Actual.value) { - if ([math]::Abs([double]$Expected.value - [double]$Actual.value) -gt $FloatTolerance) { - return $false - } - } - - if ($IncludeEntryDate) { - if ($Expected.data_entry_date -ne $Actual.data_entry_date) { - return $false - } - } - - return $true -} - -function Compare-ScenarioRows { - param( - [object[]]$ExpectedRows, - [object[]]$ActualRows, - [bool]$IncludeEntryDate - ) - - $mismatchCount = 0 - $firstMismatch = $null - $maxLength = [math]::Max($ExpectedRows.Count, $ActualRows.Count) - - for ($index = 0; $index -lt $maxLength; $index++) { - $expected = if ($index -lt $ExpectedRows.Count) { $ExpectedRows[$index] } else { $null } - $actual = if ($index -lt $ActualRows.Count) { $ActualRows[$index] } else { $null } - - $equal = $false - if ($null -ne $expected -and $null -ne $actual) { - $equal = Test-RowEquality -Expected $expected -Actual $actual -IncludeEntryDate $IncludeEntryDate - } - - if (-not $equal) { - $mismatchCount++ - if ($null -eq $firstMismatch) { - $firstMismatch = [pscustomobject]@{ - index = $index - expected = $expected - actual = $actual - } - } - } - } - - return [pscustomobject]@{ - mismatch_count = $mismatchCount - first_mismatch = $firstMismatch - } -} - -function Test-MetadataExpectation { - param( - $Scenario, - $CdaResult, - $OracleResult - ) - - $metadataMismatches = @() - if ($CdaResult.Payload.total -ne $OracleResult.Payload.row_count) { - $metadataMismatches += [pscustomobject]@{ - field = "total" - expected = [int]$OracleResult.Payload.row_count - actual = $CdaResult.Payload.total - } - } - - if ($CdaResult.Payload.'date-version-type' -ne $Scenario.ExpectedDateVersionType) { - $metadataMismatches += [pscustomobject]@{ - field = "date-version-type" - expected = $Scenario.ExpectedDateVersionType - actual = $CdaResult.Payload.'date-version-type' - } - } - - if ($CdaResult.Payload.'interval-offset' -ne $Scenario.ExpectedIntervalOffset) { - $metadataMismatches += [pscustomobject]@{ - field = "interval-offset" - expected = $Scenario.ExpectedIntervalOffset - actual = $CdaResult.Payload.'interval-offset' - } - } - - if ($CdaResult.Payload.interval -ne $Scenario.ExpectedInterval) { - $metadataMismatches += [pscustomobject]@{ - field = "interval" - expected = $Scenario.ExpectedInterval - actual = $CdaResult.Payload.interval - } - } - - if ($null -ne $Scenario.VersionDate) { - $expectedVersionDate = $Scenario.VersionDate.ToString("yyyy-MM-ddTHH:mm:ssZ") - if ($CdaResult.Payload.'version-date' -ne $expectedVersionDate) { - $metadataMismatches += [pscustomobject]@{ - field = "version-date" - expected = $expectedVersionDate - actual = $CdaResult.Payload.'version-date' - } - } - } - - return @($metadataMismatches) -} - -New-Item -ItemType Directory -Path $ResultsDir -Force | Out-Null -New-Item -ItemType Directory -Path $ResponsesDir -Force | Out-Null - -$scenarioMap = @{} -foreach ($scenario in Get-ScenarioDefinitions) { - $scenarioMap[$scenario.Name] = $scenario -} - -$requestedScenarios = foreach ($scenarioName in $Scenarios) { - if (-not $scenarioMap.ContainsKey($scenarioName)) { - throw "Unknown scenario '$scenarioName'. Available scenarios: $($scenarioMap.Keys -join ', ')" - } - $scenarioMap[$scenarioName] -} - -Wait-ForCdaReady -Url ("{0}/offices/{1}" -f $CdaBaseUrl.TrimEnd("/"), [uri]::EscapeDataString($Office)) - -$results = @() -$failedScenarios = @() -foreach ($scenario in $requestedScenarios) { - Invoke-OracleSql -Sql (Get-SeedSql -Scenario $scenario) -Label ("seed-{0}" -f $scenario.Name) | Out-Null - - $oracleResult = Get-OracleScenarioResult -Scenario $scenario - $cdaResult = Get-CdaScenarioResult -Scenario $scenario - $rowComparison = Compare-ScenarioRows -ExpectedRows $oracleResult.Rows -ActualRows $cdaResult.Rows -IncludeEntryDate $scenario.IncludeEntryDate - $metadataMismatches = @(Test-MetadataExpectation -Scenario $scenario -CdaResult $cdaResult -OracleResult $oracleResult) - $passed = $cdaResult.HttpCode -eq 200 -and $rowComparison.mismatch_count -eq 0 -and $metadataMismatches.Count -eq 0 - - $result = [pscustomobject]@{ - scenario = $scenario.Name - http_code = $cdaResult.HttpCode - time_total_seconds = $cdaResult.TimeTotalSeconds - request_url = $cdaResult.RequestUrl - include_entry_date = [bool]$scenario.IncludeEntryDate - version_date = if ($null -ne $scenario.VersionDate) { $scenario.VersionDate.ToString("o") } else { $null } - expected_row_count = [int]$oracleResult.Payload.row_count - actual_row_count = $cdaResult.Rows.Count - reported_total = $cdaResult.Payload.total - expected_date_version_type = $scenario.ExpectedDateVersionType - actual_date_version_type = $cdaResult.Payload.'date-version-type' - expected_interval = $scenario.ExpectedInterval - actual_interval = $cdaResult.Payload.interval - expected_interval_offset = $scenario.ExpectedIntervalOffset - actual_interval_offset = $cdaResult.Payload.'interval-offset' - metadata_mismatches = $metadataMismatches - row_mismatch_count = $rowComparison.mismatch_count - first_row_mismatch = $rowComparison.first_mismatch - oracle_response_file = $oracleResult.ResponseFile - cda_response_file = $cdaResult.ResponseFile - passed = $passed - } - - $results += $result - if (-not $passed) { - $failedScenarios += $result - } -} - -$gitBranch = (& git branch --show-current 2>$null) -$gitBranchExitCode = $LASTEXITCODE -$gitCommit = (& git rev-parse HEAD 2>$null) -$gitCommitExitCode = $LASTEXITCODE -$timestamp = Get-Date -Format "yyyyMMdd-HHmmss" -$resultFile = Join-Path $ResultsDir ("timeseries-parity-{0}.json" -f $timestamp) -$summary = [pscustomobject]@{ - total_scenarios = $results.Count - passed_scenarios = @($results | Where-Object { $_.passed }).Count - failed_scenarios = @($results | Where-Object { -not $_.passed }).Count -} - -$payload = [pscustomobject]@{ - parity = "timeseries" - generated_at = (Get-Date).ToUniversalTime().ToString("o") - git_branch = if ($gitBranchExitCode -eq 0 -and $null -ne $gitBranch) { $gitBranch.Trim() } else { $null } - git_commit = if ($gitCommitExitCode -eq 0 -and $null -ne $gitCommit) { $gitCommit.Trim() } else { $null } - office = $Office - summary = $summary - results = $results -} - -$payload | ConvertTo-Json -Depth 8 | Set-Content -LiteralPath $resultFile -Encoding ASCII -$payload | ConvertTo-Json -Depth 8 - -if ($failedScenarios.Count -gt 0) { - $failedNames = ($failedScenarios | ForEach-Object { $_.scenario }) -join ", " - throw "Parity check found mismatches in: $failedNames. Results saved to $resultFile" -} diff --git a/load_data/performance/invoke-timeseries-read-benchmark.ps1 b/load_data/performance/invoke-timeseries-read-benchmark.ps1 deleted file mode 100644 index ba737d3183..0000000000 --- a/load_data/performance/invoke-timeseries-read-benchmark.ps1 +++ /dev/null @@ -1,454 +0,0 @@ -[CmdletBinding()] -param( - [string]$Office = "SPK", - [string]$LocationId = "PERF1MREAD", - [string]$SeriesId = "PERF1MREAD.Stage.Inst.1Minute.0.BENCH", - [string]$Units = "ft", - [string]$CdaBaseUrl = "http://localhost:8081/cwms-data", - [string]$DbContainer = "cwms-data-api-db-1", - [string]$DbUser = "CWMS_20", - [string]$DbPassword = "simplecwmspasswD1", - [string]$DbService = "localhost:1521/FREEPDB1", - [string]$StartTime = "2024-01-01T00:00:00Z", - [int]$PointCount = 1000000, - [int]$PageSize = 1000000, - [int]$Runs = 1, - [switch]$Warmup, - [switch]$SkipSeed, - [switch]$ForceReseed, - [switch]$KeepResponses -) - -Set-StrictMode -Version Latest -$ErrorActionPreference = "Stop" - -$SqlPlusPath = "/opt/oracle/product/23ai/dbhomeFree/bin/sqlplus" -$ResultsDir = Join-Path $PSScriptRoot "results" -$ResponsesDir = Join-Path $PSScriptRoot "responses" -$NonVersionedDateSql = "date '1111-11-11'" - -function Convert-ToSqlStringLiteral { - param([string]$Value) - return "'" + $Value.Replace("'", "''") + "'" -} - -function Convert-ToOracleDateExpression { - param([datetimeoffset]$Value) - $utc = $Value.ToUniversalTime().ToString("yyyy-MM-dd HH:mm:ss") - return "to_date('$utc', 'yyyy-mm-dd hh24:mi:ss')" -} - -function Invoke-OracleSql { - param( - [string]$Sql, - [string]$Label = "oracle" - ) - - $sqlFile = Join-Path $env:TEMP ("cwms-benchmark-{0}-{1}.sql" -f $Label, [guid]::NewGuid().ToString("N")) - try { - Set-Content -LiteralPath $sqlFile -Value $Sql -Encoding ASCII - - $containerSqlFile = "/tmp/" + [System.IO.Path]::GetFileName($sqlFile) - $null = & docker cp $sqlFile "${DbContainer}:${containerSqlFile}" - if ($LASTEXITCODE -ne 0) { - throw "Failed to copy SQL to container $DbContainer" - } - - $command = "$SqlPlusPath -s -L $DbUser/$DbPassword@$DbService @$containerSqlFile" - $output = & docker exec $DbContainer bash -lc $command 2>&1 - if ($LASTEXITCODE -ne 0) { - throw ("Oracle SQL failed for {0}:`n{1}" -f $Label, ($output -join [Environment]::NewLine)) - } - - return ($output -join [Environment]::NewLine) - } - finally { - if (Test-Path -LiteralPath $sqlFile) { - Remove-Item -LiteralPath $sqlFile -Force - } - } -} - -function Get-YearSegments { - param( - [datetimeoffset]$StartUtc, - [int]$Count - ) - - $segments = @() - $remaining = $Count - $offset = 0 - $cursor = $StartUtc.ToUniversalTime() - - while ($remaining -gt 0) { - $yearStart = [datetimeoffset]::ParseExact( - "{0}-01-01T00:00:00+00:00" -f $cursor.Year, - "yyyy-MM-ddTHH:mm:sszzz", - [System.Globalization.CultureInfo]::InvariantCulture - ) - $nextYear = $yearStart.AddYears(1) - $minutesUntilNextYear = [int][Math]::Floor(($nextYear - $cursor).TotalMinutes) - if ($minutesUntilNextYear -le 0) { - throw "Computed non-positive year segment size for $($cursor.Year)" - } - - $segmentCount = [Math]::Min($remaining, $minutesUntilNextYear) - $segments += [pscustomobject]@{ - Year = $cursor.Year - Start = $cursor - Count = $segmentCount - ValueStart = $offset + 1 - } - - $cursor = $cursor.AddMinutes($segmentCount) - $remaining -= $segmentCount - $offset += $segmentCount - } - - return $segments -} - -function Get-SeededPointCount { - $seriesLiteral = Convert-ToSqlStringLiteral $SeriesId - $officeLiteral = Convert-ToSqlStringLiteral $Office - $sql = @" -set heading off feedback off verify off pagesize 0 trimspool on -select count(*) - from av_tsv v - join at_cwms_ts_id t - on t.ts_code = v.ts_code - where t.db_office_id = $officeLiteral - and t.cwms_ts_id = $seriesLiteral; -exit; -"@ - - $raw = Invoke-OracleSql -Sql $sql -Label "count" - $countText = (($raw -split "\r?\n") | ForEach-Object { $_.Trim() } | Where-Object { $_ } | Select-Object -Last 1) - return [int]$countText -} - -function Ensure-BenchmarkSeed { - param( - [datetimeoffset]$StartUtc, - [int]$Count - ) - - if ($SkipSeed) { - return [pscustomobject]@{ - Seeded = $false - ExistingPointCount = Get-SeededPointCount - } - } - - $existingCount = Get-SeededPointCount - if (-not $ForceReseed -and $existingCount -eq $Count) { - return [pscustomobject]@{ - Seeded = $false - ExistingPointCount = $existingCount - } - } - - $seriesLiteral = Convert-ToSqlStringLiteral $SeriesId - $locationLiteral = Convert-ToSqlStringLiteral $LocationId - $officeLiteral = Convert-ToSqlStringLiteral $Office - $locationTypeLiteral = Convert-ToSqlStringLiteral "SITE" - $publicNameLiteral = Convert-ToSqlStringLiteral $LocationId - $longNameLiteral = Convert-ToSqlStringLiteral "$LocationId Benchmark Location" - $descriptionLiteral = Convert-ToSqlStringLiteral "Performance benchmark location" - $timeZoneLiteral = Convert-ToSqlStringLiteral "UTC" - $horizontalDatumLiteral = Convert-ToSqlStringLiteral "NAD83" - $segments = Get-YearSegments -StartUtc $StartUtc -Count $Count - - $insertStatements = foreach ($segment in $segments) { - $dateExpr = Convert-ToOracleDateExpression $segment.Start - @" - execute immediate q'[ - insert /*+ APPEND */ into at_tsv_$($segment.Year) - (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag) - select :1, - $dateExpr + numtodsinterval(level - 1, 'MINUTE'), - $NonVersionedDateSql, - systimestamp, - $($segment.ValueStart) + level - 1, - 0, - 0 - from dual - connect by level <= $($segment.Count) - ]' using l_ts_code; -"@ - } - - $seedSql = @" -set serveroutput on feedback on -whenever sqlerror exit failure rollback -declare - location_exists exception; - pragma exception_init(location_exists, -20026); - ts_exists exception; - pragma exception_init(ts_exists, -20003); - l_ts_code number; -begin - begin - cwms_loc.create_location( - p_location_id => $locationLiteral, - p_location_type => $locationTypeLiteral, - p_elevation => null, - p_elev_unit_id => null, - p_vertical_datum => null, - p_latitude => 38.0, - p_longitude => -90.0, - p_horizontal_datum => $horizontalDatumLiteral, - p_public_name => $publicNameLiteral, - p_long_name => $longNameLiteral, - p_description => $descriptionLiteral, - p_time_zone_id => $timeZoneLiteral, - p_county_name => null, - p_state_initial => null, - p_active => 'T', - p_db_office_id => $officeLiteral - ); - exception - when location_exists then null; - end; - - begin - cwms_ts.create_ts($officeLiteral, $seriesLiteral, 0); - exception - when ts_exists then null; - end; - - select ts_code - into l_ts_code - from at_cwms_ts_id - where db_office_id = $officeLiteral - and cwms_ts_id = $seriesLiteral; - - for rec in (select table_name from at_ts_table_properties) loop - execute immediate 'delete from ' || rec.table_name || ' where ts_code = :1' using l_ts_code; - end loop; - - delete from at_ts_extents where ts_code = l_ts_code; - -$($insertStatements -join [Environment]::NewLine) - - cwms_ts.update_ts_extents(l_ts_code, $NonVersionedDateSql); - commit; -end; -/ -set heading off feedback off verify off pagesize 0 trimspool on -select count(*) - from av_tsv v - join at_cwms_ts_id t - on t.ts_code = v.ts_code - where t.db_office_id = $officeLiteral - and t.cwms_ts_id = $seriesLiteral; -exit; -"@ - - $raw = Invoke-OracleSql -Sql $seedSql -Label "seed" - $countText = (($raw -split "\r?\n") | ForEach-Object { $_.Trim() } | Where-Object { $_ } | Select-Object -Last 1) - return [pscustomobject]@{ - Seeded = $true - ExistingPointCount = [int]$countText - } -} - -function Invoke-CdaRequest { - param( - [string]$Url, - [string]$ResponseFile - ) - - $format = '{"http_code":%{http_code},"time_total":%{time_total},"time_starttransfer":%{time_starttransfer},"time_connect":%{time_connect},"size_download":%{size_download},"speed_download":%{speed_download}}' - $json = & curl.exe -sS -H "Accept: application/json;version=2" -o $ResponseFile -w $format $Url 2>&1 - if ($LASTEXITCODE -ne 0) { - throw ("curl failed: {0}" -f ($json -join [Environment]::NewLine)) - } - - return ($json | ConvertFrom-Json) -} - -function Wait-ForCdaReady { - param( - [string]$Url, - [int]$MaxAttempts = 30, - [int]$DelaySeconds = 1 - ) - - $probeFile = Join-Path $ResponsesDir "readiness-probe.json" - try { - for ($attempt = 1; $attempt -le $MaxAttempts; $attempt++) { - if (Test-Path -LiteralPath $probeFile) { - Remove-Item -LiteralPath $probeFile -Force - } - - $response = Invoke-CdaRequest -Url $Url -ResponseFile $probeFile - if ($response.http_code -eq 200) { - return - } - - Start-Sleep -Seconds $DelaySeconds - } - } - finally { - if (Test-Path -LiteralPath $probeFile) { - Remove-Item -LiteralPath $probeFile -Force - } - } - - throw "CDA did not become ready after $MaxAttempts attempts: $Url" -} - -function Get-ResponseSummary { - param([string]$ResponseFile) - - $content = Get-Content -LiteralPath $ResponseFile -Raw - $total = $null - $pageSize = $null - $firstTimestamp = $null - $lastTimestamp = $null - - if ($content -match '"total":(?\d+)') { - $total = [int]$Matches["total"] - } - if ($content -match '"page-size":(?\d+)') { - $pageSize = [int]$Matches["pageSize"] - } - if ($content -match '\[\[(?\d+),') { - $firstTimestamp = [long]$Matches["first"] - } - $allMatches = [regex]::Matches($content, '\[(?\d+),') - if ($allMatches.Count -gt 0) { - $lastTimestamp = [long]$allMatches[$allMatches.Count - 1].Groups["ts"].Value - } - - return [pscustomobject]@{ - Total = $total - PageSize = $pageSize - FirstTimestamp = $firstTimestamp - LastTimestamp = $lastTimestamp - ResponseBytes = (Get-Item -LiteralPath $ResponseFile).Length - } -} - -$startUtc = [datetimeoffset]::Parse($StartTime, [System.Globalization.CultureInfo]::InvariantCulture).ToUniversalTime() -$endUtc = $startUtc.AddMinutes($PointCount - 1) -$escapedSeriesId = [uri]::EscapeDataString($SeriesId) -$escapedOffice = [uri]::EscapeDataString($Office) -$escapedUnits = [uri]::EscapeDataString($Units) -$escapedBegin = [uri]::EscapeDataString($startUtc.ToString("yyyy-MM-ddTHH:mm:ssZ")) -$escapedEnd = [uri]::EscapeDataString($endUtc.ToString("yyyy-MM-ddTHH:mm:ssZ")) -$requestUrl = "{0}/timeseries?office={1}&name={2}&units={3}&begin={4}&end={5}&page-size={6}" -f ` - $CdaBaseUrl.TrimEnd("/"), ` - $escapedOffice, ` - $escapedSeriesId, ` - $escapedUnits, ` - $escapedBegin, ` - $escapedEnd, ` - $PageSize - -New-Item -ItemType Directory -Path $ResultsDir -Force | Out-Null -New-Item -ItemType Directory -Path $ResponsesDir -Force | Out-Null - -$seedInfo = Ensure-BenchmarkSeed -StartUtc $startUtc -Count $PointCount -if ($seedInfo.ExistingPointCount -ne $PointCount) { - throw "Expected $PointCount seeded points but found $($seedInfo.ExistingPointCount)" -} - -Wait-ForCdaReady -Url ("{0}/offices/{1}" -f $CdaBaseUrl.TrimEnd("/"), $escapedOffice) - -if ($Warmup) { - $warmupFile = Join-Path $ResponsesDir "warmup.json" - $null = Invoke-CdaRequest -Url $requestUrl -ResponseFile $warmupFile - if (-not $KeepResponses -and (Test-Path -LiteralPath $warmupFile)) { - Remove-Item -LiteralPath $warmupFile -Force - } -} - -$results = @() -$failedRuns = @() -for ($run = 1; $run -le $Runs; $run++) { - $responseFile = Join-Path $ResponsesDir ("timeseries-read-run-{0}.json" -f $run) - $curlMetrics = Invoke-CdaRequest -Url $requestUrl -ResponseFile $responseFile - $responseSummary = Get-ResponseSummary -ResponseFile $responseFile - $errorBody = $null - if ($curlMetrics.http_code -ne 200) { - $errorBody = [string](Get-Content -LiteralPath $responseFile -Raw) - } - - $result = [pscustomobject]@{ - run = $run - http_code = [int]$curlMetrics.http_code - time_total_seconds = [double]$curlMetrics.time_total - time_starttransfer_seconds = [double]$curlMetrics.time_starttransfer - time_connect_seconds = [double]$curlMetrics.time_connect - size_download_bytes = [double]$curlMetrics.size_download - speed_download_bytes_per_second = [double]$curlMetrics.speed_download - response_bytes_on_disk = [long]$responseSummary.ResponseBytes - reported_total = $responseSummary.Total - reported_page_size = $responseSummary.PageSize - first_timestamp = $responseSummary.FirstTimestamp - last_timestamp = $responseSummary.LastTimestamp - error_body = $errorBody - response_file = $responseFile - } - $results += $result - if ($curlMetrics.http_code -ne 200) { - $failedRuns += $result - } - - if (-not $KeepResponses -and (Test-Path -LiteralPath $responseFile)) { - Remove-Item -LiteralPath $responseFile -Force - $result.response_file = $null - } -} - -$gitBranch = (& git branch --show-current 2>$null) -$gitBranchExitCode = $LASTEXITCODE -$gitCommit = (& git rev-parse HEAD 2>$null) -$gitCommitExitCode = $LASTEXITCODE -$timestamp = Get-Date -Format "yyyyMMdd-HHmmss" -$resultFile = Join-Path $ResultsDir ("timeseries-read-benchmark-{0}.json" -f $timestamp) -$successfulRuns = @($results | Where-Object { $_.http_code -eq 200 }) -$summary = $null -if ($successfulRuns.Count -gt 0) { - $avg = ($successfulRuns | Measure-Object -Property time_total_seconds -Average).Average - $min = ($successfulRuns | Measure-Object -Property time_total_seconds -Minimum).Minimum - $max = ($successfulRuns | Measure-Object -Property time_total_seconds -Maximum).Maximum - $summary = [pscustomobject]@{ - successful_runs = $successfulRuns.Count - average_time_total_seconds = [math]::Round([double]$avg, 6) - min_time_total_seconds = [math]::Round([double]$min, 6) - max_time_total_seconds = [math]::Round([double]$max, 6) - } -} - -$payload = [pscustomobject]@{ - benchmark = "timeseries-read" - generated_at = (Get-Date).ToUniversalTime().ToString("o") - git_branch = if ($gitBranchExitCode -eq 0) { $gitBranch.Trim() } else { $null } - git_commit = if ($gitCommitExitCode -eq 0) { $gitCommit.Trim() } else { $null } - office = $Office - location_id = $LocationId - series_id = $SeriesId - units = $Units - start_time_utc = $startUtc.ToString("o") - end_time_utc = $endUtc.ToString("o") - point_count = $PointCount - page_size = $PageSize - request_url = $requestUrl - seed = [pscustomobject]@{ - seeded = [bool]$seedInfo.Seeded - point_count = [int]$seedInfo.ExistingPointCount - } - summary = $summary - runs = $results -} - -$payload | ConvertTo-Json -Depth 6 | Set-Content -LiteralPath $resultFile -Encoding ASCII -$payload | ConvertTo-Json -Depth 6 - -if ($failedRuns.Count -gt 0) { - $statusList = ($failedRuns | ForEach-Object { $_.http_code }) -join ", " - throw "Benchmark completed with HTTP failures ($statusList). Results saved to $resultFile" -} From 4e45eded7fce064102597638b2be66a8d43b4a33 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Tue, 21 Apr 2026 00:29:00 -0500 Subject: [PATCH 05/16] Reuse existing TimeSeries types in in tests/Dao --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 249 +++----- .../cda/api/TimeSeriesDirectReadParityIT.java | 561 +++++++++--------- 2 files changed, 346 insertions(+), 464 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index d9db374174..e79c6877dd 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -686,13 +686,31 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, } } - RequestedTimeSeriesMetadata metadata = fetchRequestedTimeSeriesMetadata(requestParameters); + Record metadata = fetchRequestedTimeSeriesMetadataRecord(requestParameters); if (metadata == null) { throw new DataAccessException("Unable to resolve time series metadata for " + names); } - String parmPart = metadata.getParmPart(); - String locPart = metadata.getLocPart(); + BigDecimal intervalValue = metadata.getValue("interval", BigDecimal.class); + Number offsetValue = metadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET); + BigDecimal tsCodeValue = metadata.getValue("tscode", BigDecimal.class); + long tsCode = tsCodeValue.longValue(); + String tsId = metadata.getValue("tsid", String.class); + String metadataOfficeId = metadata.getValue("office_id", String.class); + String metadataUnits = metadata.getValue("units", String.class); + String sourceUnit = metadata.getValue("source_unit", String.class); + String locPart = metadata.getValue("loc_part", String.class); + String parmPart = metadata.getValue("parm_part", String.class); + String intervalPart = metadata.getValue("interval_part", String.class); + long intervalMinutes = intervalValue == null ? 0L : intervalValue.longValue(); + long intervalOffset = offsetValue == null ? UTC_OFFSET_IRREGULAR : offsetValue.longValue(); + String timeZoneId = metadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID) == null + ? UTC + : metadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID); + boolean isLrts = parseBool(CWMS_TS_PACKAGE.call_IS_LRTS__2(dsl.configuration(), tsCode)); + + validateRequestedUnits(sourceUnit, metadataUnits); + VerticalDatumInfo verticalDatumInfo = null; if (shouldFetchVerticalDatum(parmPart)) { verticalDatumInfo = fetchVerticalDatumInfoSeparately(locPart, requestedUnits, office); @@ -703,23 +721,25 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, return null; } - List rawRows = fetchRequestedTimeSeriesRows(metadata, requestParameters); - List expectedTimes = fetchExpectedRegularTimes(metadata, requestParameters, rawRows); + List rawRows = fetchRequestedTimeSeriesRows(tsCode, metadataOfficeId, metadataUnits, + requestParameters); + List expectedTimes = fetchExpectedRegularTimes(intervalMinutes, intervalOffset, timeZoneId, + intervalPart, isLrts, requestParameters, rawRows); int total = countMergedRows(rawRows, expectedTimes); TimeSeries timeseries = new TimeSeries( cursor, pageSize, total, - metadata.getTsId(), - metadata.getOfficeId(), + tsId, + metadataOfficeId, beginTime, endTime, - metadata.getUnits(), - Duration.ofMinutes(metadata.getIntervalMinutes()), + metadataUnits, + Duration.ofMinutes(intervalMinutes), verticalDatumInfo, - metadata.getIntervalOffset(), - metadata.getTimeZoneId(), + intervalOffset, + timeZoneId, versionDate, finalDateVersionType ); @@ -728,7 +748,7 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, return timeseries; } - private RequestedTimeSeriesMetadata fetchRequestedTimeSeriesMetadata( + private Record fetchRequestedTimeSeriesMetadataRecord( TimeSeriesRequestParameters requestParameters) { String names = requestParameters.getNames(); String office = requestParameters.getOffice(); @@ -804,35 +824,11 @@ private RequestedTimeSeriesMetadata fetchRequestedTimeSeriesMetadata( logger.atFine().log("%s", lazy(() -> metadataQuery.getSQL(ParamType.INLINED))); - return metadataQuery.fetchOne(tsMetadata -> { - BigDecimal intervalValue = tsMetadata.getValue("interval", BigDecimal.class); - Number offsetValue = tsMetadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET); - BigDecimal tsCodeValue = tsMetadata.getValue("tscode", BigDecimal.class); - long tsCodeLong = tsCodeValue.longValue(); - String requestedUnit = tsMetadata.getValue("units", String.class); - String sourceUnit = tsMetadata.getValue("source_unit", String.class); - validateRequestedUnits(sourceUnit, requestedUnit); - boolean isLrts = parseBool(CWMS_TS_PACKAGE.call_IS_LRTS__2(dsl.configuration(), tsCodeLong)); - return new RequestedTimeSeriesMetadata( - tsCodeLong, - tsMetadata.getValue("tsid", String.class), - tsMetadata.getValue("office_id", String.class), - requestedUnit, - intervalValue == null ? 0L : intervalValue.longValue(), - offsetValue == null ? UTC_OFFSET_IRREGULAR : offsetValue.longValue(), - tsMetadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID) == null - ? UTC - : tsMetadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID), - tsMetadata.getValue("loc_part", String.class), - tsMetadata.getValue("parm_part", String.class), - tsMetadata.getValue("interval_part", String.class), - isLrts - ); - }); + return metadataQuery.fetchOne(); } - private List fetchRequestedTimeSeriesRows(RequestedTimeSeriesMetadata metadata, - TimeSeriesRequestParameters requestParameters) { + private List fetchRequestedTimeSeriesRows(long tsCode, String officeId, String units, + TimeSeriesRequestParameters requestParameters) { ZonedDateTime beginTime = requestParameters.getBeginTime(); ZonedDateTime endTime = requestParameters.getEndTime(); ZonedDateTime versionDate = requestParameters.getVersionDate(); @@ -848,9 +844,9 @@ private List fetchRequestedTimeSeriesRows(RequestedTim qualityForNormalization).as("quality_norm"); Condition baseCondition = view.ALIASED_ITEM.isNull() - .and(view.TS_CODE.eq(metadata.getTsCode())) - .and(view.OFFICE_ID.eq(metadata.getOfficeId())) - .and(view.UNIT_ID.equalIgnoreCase(metadata.getUnits())) + .and(view.TS_CODE.eq(tsCode)) + .and(view.OFFICE_ID.eq(officeId)) + .and(view.UNIT_ID.equalIgnoreCase(units)) .and(view.DATE_TIME.ge(beginTimestamp)) .and(view.DATE_TIME.le(endTimestamp)) .and(view.START_DATE.le(endTimestamp)) @@ -895,18 +891,23 @@ private List fetchRequestedTimeSeriesRows(RequestedTim query.orderBy(field(DATE_TIME, Timestamp.class).asc()); logger.atFine().log("%s", lazy(() -> query.getSQL(ParamType.INLINED))); - return query.fetch(record -> new RetrievedTimeSeriesValue( - record.getValue(0, Timestamp.class), - record.getValue(1, Double.class), - record.getValue(2, BigDecimal.class).intValue(), - record.getValue(3, Timestamp.class) - )); + return query.fetch(record -> { + Timestamp dateTime = record.getValue(0, Timestamp.class); + Double value = record.getValue(1, Double.class); + int qualityCode = record.getValue(2, BigDecimal.class).intValue(); + Timestamp dataEntryDate = record.getValue(3, Timestamp.class); + if (dataEntryDate != null) { + return new TimeSeries.Record(dateTime, value, qualityCode, dataEntryDate); + } + return new TimeSeries.Record(dateTime, value, qualityCode); + }); } - private List fetchExpectedRegularTimes(RequestedTimeSeriesMetadata metadata, + private List fetchExpectedRegularTimes(long intervalMinutes, long intervalOffset, String timeZoneId, + String intervalPart, boolean isLrts, TimeSeriesRequestParameters requestParameters, - List rawRows) { - if (!isRegularSeries(metadata)) { + List rawRows) { + if (!isRegularSeries(intervalMinutes, intervalOffset)) { return Collections.emptyList(); } if (rawRows.isEmpty() && requestParameters.isShouldTrim()) { @@ -920,17 +921,18 @@ private List fetchExpectedRegularTimes(RequestedTimeSeriesMetadata me ? rawRows.get(rawRows.size() - 1).getDateTime() : Timestamp.from(requestParameters.getEndTime().toInstant()); - long offsetMinutes = resolveIntervalOffset(metadata, rawRows); - if (canGenerateExpectedTimesInJava(metadata)) { - return buildExpectedRegularTimesUtc(rangeStart, rangeEnd, metadata.getIntervalMinutes(), offsetMinutes); + long offsetMinutes = resolveIntervalOffset(intervalMinutes, intervalOffset, timeZoneId, intervalPart, isLrts, + rawRows); + if (canGenerateExpectedTimesInJava(intervalMinutes, intervalPart, isLrts)) { + return buildExpectedRegularTimesUtc(rangeStart, rangeEnd, intervalMinutes, offsetMinutes); } - String intervalTimeZone = metadata.isLrts() ? metadata.getTimeZoneId() : UTC; + String intervalTimeZone = isLrts ? timeZoneId : UTC; DATE_RANGE_T dateRange = new DATE_RANGE_T(rangeStart, rangeEnd, UTC, "T", "T", null); DATE_TABLE_TYPE expectedTimeTable = CWMS_TS_PACKAGE.call_GET_REG_TS_TIMES_UTC_F( dsl.configuration(), dateRange, - metadata.getIntervalPart(), + intervalPart, String.valueOf(offsetMinutes), intervalTimeZone ); @@ -946,9 +948,8 @@ private List fetchExpectedRegularTimes(RequestedTimeSeriesMetadata me return retVal; } - private long resolveIntervalOffset(RequestedTimeSeriesMetadata metadata, - List rawRows) { - long intervalOffset = metadata.getIntervalOffset(); + private long resolveIntervalOffset(long intervalMinutes, long intervalOffset, String timeZoneId, + String intervalPart, boolean isLrts, List rawRows) { if (intervalOffset != UTC_OFFSET_UNDEFINED) { return intervalOffset; } @@ -956,27 +957,27 @@ private long resolveIntervalOffset(RequestedTimeSeriesMetadata metadata, return 0L; } - if (canGenerateExpectedTimesInJava(metadata)) { - long intervalMillis = TimeUnit.MINUTES.toMillis(metadata.getIntervalMinutes()); + if (canGenerateExpectedTimesInJava(intervalMinutes, intervalPart, isLrts)) { + long intervalMillis = TimeUnit.MINUTES.toMillis(intervalMinutes); return TimeUnit.MILLISECONDS.toMinutes(Math.floorMod(rawRows.get(0).getDateTime().getTime(), intervalMillis)); } - String intervalTimeZone = metadata.isLrts() ? metadata.getTimeZoneId() : UTC; + String intervalTimeZone = isLrts ? timeZoneId : UTC; Timestamp topOfInterval = normalizeOracleUtcTimestamp(CWMS_TS_PACKAGE.call_TOP_OF_INTERVAL_UTC( dsl.configuration(), rawRows.get(0).getDateTime(), - metadata.getIntervalPart(), + intervalPart, intervalTimeZone, "F" )); return (rawRows.get(0).getDateTime().getTime() - topOfInterval.getTime()) / TimeUnit.MINUTES.toMillis(1); } - private boolean isRegularSeries(RequestedTimeSeriesMetadata metadata) { - return metadata.getIntervalMinutes() != 0L || metadata.getIntervalOffset() != UTC_OFFSET_IRREGULAR; + private boolean isRegularSeries(long intervalMinutes, long intervalOffset) { + return intervalMinutes != 0L || intervalOffset != UTC_OFFSET_IRREGULAR; } - private int countMergedRows(List rawRows, List expectedTimes) { + private int countMergedRows(List rawRows, List expectedTimes) { if (expectedTimes.isEmpty()) { return rawRows.size(); } @@ -1009,7 +1010,7 @@ private int countMergedRows(List rawRows, List rawRows, + List rawRows, List expectedTimes, Timestamp tsCursor, boolean includeEntryDate) { @@ -1019,11 +1020,11 @@ private void populateTimeSeriesValues(TimeSeries timeseries, int maxRecords = timeseries.getPageSize() > 0 ? timeseries.getPageSize() + 1 : Integer.MAX_VALUE; while ((rawIndex < rawRows.size() || expectedIndex < expectedTimes.size()) && collected < maxRecords) { - RetrievedTimeSeriesValue rawRow = rawIndex < rawRows.size() ? rawRows.get(rawIndex) : null; + TimeSeries.Record rawRow = rawIndex < rawRows.size() ? rawRows.get(rawIndex) : null; Timestamp expectedTime = expectedIndex < expectedTimes.size() ? expectedTimes.get(expectedIndex) : null; Timestamp candidateTime; - RetrievedTimeSeriesValue candidateRow = null; + TimeSeries.Record candidateRow = null; boolean syntheticRow = false; if (rawRow == null) { @@ -1082,12 +1083,11 @@ private Timestamp normalizeOracleUtcTimestamp(Timestamp timestamp) { return Timestamp.from(utcWallTime.toInstant(ZoneOffset.UTC)); } - private boolean canGenerateExpectedTimesInJava(RequestedTimeSeriesMetadata metadata) { - if (metadata.isLrts() || metadata.getIntervalMinutes() <= 0L) { + private boolean canGenerateExpectedTimesInJava(long intervalMinutes, String intervalPart, boolean isLrts) { + if (isLrts || intervalMinutes <= 0L) { return false; } - String intervalPart = metadata.getIntervalPart(); if (intervalPart == null) { return false; } @@ -1139,111 +1139,6 @@ private void validateRequestedUnits(String sourceUnit, String requestedUnit) { .fetchOne(0, Double.class); } - private static final class RequestedTimeSeriesMetadata { - private final long tsCode; - private final String tsId; - private final String officeId; - private final String units; - private final long intervalMinutes; - private final long intervalOffset; - private final String timeZoneId; - private final String locPart; - private final String parmPart; - private final String intervalPart; - private final boolean isLrts; - - private RequestedTimeSeriesMetadata(long tsCode, String tsId, String officeId, String units, - long intervalMinutes, long intervalOffset, String timeZoneId, - String locPart, String parmPart, String intervalPart, - boolean isLrts) { - this.tsCode = tsCode; - this.tsId = tsId; - this.officeId = officeId; - this.units = units; - this.intervalMinutes = intervalMinutes; - this.intervalOffset = intervalOffset; - this.timeZoneId = timeZoneId; - this.locPart = locPart; - this.parmPart = parmPart; - this.intervalPart = intervalPart; - this.isLrts = isLrts; - } - - private long getTsCode() { - return tsCode; - } - - private String getTsId() { - return tsId; - } - - private String getOfficeId() { - return officeId; - } - - private String getUnits() { - return units; - } - - private long getIntervalMinutes() { - return intervalMinutes; - } - - private long getIntervalOffset() { - return intervalOffset; - } - - private String getTimeZoneId() { - return timeZoneId; - } - - private String getLocPart() { - return locPart; - } - - private String getParmPart() { - return parmPart; - } - - private String getIntervalPart() { - return intervalPart; - } - - private boolean isLrts() { - return isLrts; - } - } - - private static final class RetrievedTimeSeriesValue { - private final Timestamp dateTime; - private final Double value; - private final int qualityCode; - private final Timestamp dataEntryDate; - - private RetrievedTimeSeriesValue(Timestamp dateTime, Double value, int qualityCode, Timestamp dataEntryDate) { - this.dateTime = dateTime; - this.value = value; - this.qualityCode = qualityCode; - this.dataEntryDate = dataEntryDate; - } - - private Timestamp getDateTime() { - return dateTime; - } - - private Double getValue() { - return value; - } - - private int getQualityCode() { - return qualityCode; - } - - private Timestamp getDataEntryDate() { - return dataEntryDate; - } - } - private boolean shouldFetchVerticalDatum(String parmPart) { // Check if parameter requires vertical datum (e.g., "ELEV") if (parmPart == null) { diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java index b3e9d68834..7a121250f4 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -8,15 +8,20 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import cwms.cda.api.enums.VersionType; +import cwms.cda.data.dto.TimeSeries; import cwms.cda.formatters.Formats; +import cwms.cda.formatters.json.JsonV2; import fixtures.CwmsDataApiSetupCallback; import io.restassured.filter.log.LogDetail; import io.restassured.response.ExtractableResponse; import io.restassured.response.Response; +import io.restassured.specification.RequestSpecification; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Timestamp; import java.time.Duration; import java.time.Instant; import java.time.LocalDateTime; @@ -29,72 +34,184 @@ import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.Stream; import java.util.stream.IntStream; import javax.servlet.http.HttpServletResponse; import mil.army.usace.hec.test.database.CwmsDatabaseContainer; import org.jooq.impl.DSL; import org.junit.jupiter.api.Tag; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.api.Test; import usace.cwms.db.jooq.codegen.packages.CWMS_TS_PACKAGE; -import io.restassured.specification.RequestSpecification; @Tag("integration") final class TimeSeriesDirectReadParityIT extends DataApiTestIT { - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = JsonV2.buildObjectMapper(); private static final String OFFICE = "SPK"; private static final double DOUBLE_TOLERANCE = 1e-9; - @ParameterizedTest(name = "{0}") - @MethodSource("scenarios") - void directReadMatchesOracleRetrieveTs(Scenario scenario) throws Exception { - seedScenario(scenario); + @Test + void denseRegularReadMatchesRetrieveTs() throws Exception { + assertDirectReadMatchesOracle( + "ITPARREG", + "ITPARREG.Stage.Inst.1Minute.0.BENCH", + "ft", + Instant.parse("2024-01-01T00:00:00Z"), + Instant.parse("2024-01-01T00:05:00Z"), + denseRows(), + false, + false, + VersionType.UNVERSIONED, + Duration.ofMinutes(1), + 0L, + null + ); + } - List expectedRows = fetchOracleRows(scenario); - TimeSeriesResponse actualResponse = fetchCdaRows(scenario); - String mismatchSummary = buildMismatchSummary(expectedRows, actualResponse); + @Test + void denseRegularEntryDateReadMatchesRetrieveTs() throws Exception { + assertDirectReadMatchesOracle( + "ITPARREG", + "ITPARREG.Stage.Inst.1Minute.0.BENCH", + "ft", + Instant.parse("2024-01-01T00:00:00Z"), + Instant.parse("2024-01-01T00:05:00Z"), + denseRows(), + false, + true, + VersionType.UNVERSIONED, + Duration.ofMinutes(1), + 0L, + null + ); + } - assertEquals(expectedRows.size(), actualResponse.total, "Reported total " + mismatchSummary); - assertEquals(scenario.expectedDateVersionType, actualResponse.dateVersionType, "Date version type"); - assertEquals(scenario.expectedInterval, actualResponse.interval, "Interval"); - assertEquals(scenario.expectedIntervalOffset, actualResponse.intervalOffset, "Interval offset"); + @Test + void gapFilledRegularReadMatchesRetrieveTs() throws Exception { + assertDirectReadMatchesOracle( + "ITPARGAP", + "ITPARGAP.Stage.Inst.1Minute.0.BENCH", + "ft", + Instant.parse("2024-01-01T00:00:00Z"), + Instant.parse("2024-01-01T00:09:00Z"), + gapRows(), + false, + false, + VersionType.UNVERSIONED, + Duration.ofMinutes(1), + 0L, + null + ); + } - if (scenario.versionDate != null) { - assertNotNull(actualResponse.versionDate, "Version date"); - assertEquals(scenario.versionDate, actualResponse.versionDate, "Version date"); - } else { - assertNull(actualResponse.versionDate, "Version date"); - } + @Test + void maxVersionReadMatchesRetrieveTs() throws Exception { + assertDirectReadMatchesOracle( + "ITPARVER", + "ITPARVER.Flow.Inst.1Hour.0.BENCH", + "cfs", + Instant.parse("2024-05-01T15:00:00Z"), + Instant.parse("2024-05-01T18:00:00Z"), + versionedRows(), + true, + false, + VersionType.MAX_AGGREGATE, + Duration.ofHours(1), + 0L, + null + ); + } - assertEquals(expectedRows.size(), actualResponse.rows.size(), "Row count " + mismatchSummary); - for (int i = 0; i < expectedRows.size(); i++) { - assertRowsEqual(expectedRows.get(i), actualResponse.rows.get(i), i); - } + @Test + void specificVersionReadMatchesRetrieveTs() throws Exception { + Instant newerVersion = Instant.parse("2024-06-21T08:00:00Z"); + assertDirectReadMatchesOracle( + "ITPARVER", + "ITPARVER.Flow.Inst.1Hour.0.BENCH", + "cfs", + Instant.parse("2024-05-01T15:00:00Z"), + Instant.parse("2024-05-01T18:00:00Z"), + versionedRows(), + true, + false, + VersionType.SINGLE_VERSION, + Duration.ofHours(1), + 0L, + newerVersion + ); } - private static String buildMismatchSummary(List expectedRows, TimeSeriesResponse actualResponse) { - return "expectedRows=" + summarizeRows(expectedRows) - + " actualRows=" + summarizeRows(actualResponse.rows) - + " actualTotal=" + actualResponse.total; + @Test + void irregularReadMatchesRetrieveTs() throws Exception { + assertDirectReadMatchesOracle( + "ITPARIRR", + "ITPARIRR.Flow.Inst.0.0.BENCH", + "cfs", + Instant.parse("2024-01-05T12:00:00Z"), + Instant.parse("2024-01-05T12:33:10Z"), + irregularRows(), + false, + false, + VersionType.UNVERSIONED, + Duration.ZERO, + (long) Integer.MIN_VALUE, + null + ); } - private static String summarizeRows(List rows) { - return rows.stream() - .limit(12) - .map(row -> "{t=" + row.dateTimeMillis - + ",v=" + row.value - + ",q=" + row.qualityCode - + ",e=" + row.dataEntryDateMillis - + "}") - .collect(Collectors.joining(", ", "[", rows.size() > 12 ? ", ...]" : "]")); + @Test + void dstWindowRegularReadMatchesRetrieveTs() throws Exception { + Instant dstStart = Instant.parse("2024-03-09T00:00:00Z"); + assertDirectReadMatchesOracle( + "ITPARDST", + "ITPARDST.Stage.Inst.1Minute.0.BENCH", + "ft", + dstStart, + dstStart.plus(Duration.ofMinutes(4999)), + regularRows(dstStart, 5000, 1.0, Duration.ofDays(1)), + false, + false, + VersionType.UNVERSIONED, + Duration.ofMinutes(1), + 0L, + null + ); } - private static Stream scenarios() { - Instant olderVersion = Instant.parse("2024-06-20T08:00:00Z"); - Instant newerVersion = Instant.parse("2024-06-21T08:00:00Z"); + private static void assertDirectReadMatchesOracle(String locationId, String seriesId, String units, + Instant beginTime, Instant endTime, List rows, + boolean versioned, boolean includeEntryDate, + VersionType expectedDateVersionType, + Duration expectedInterval, long expectedIntervalOffset, + Instant versionDate) throws Exception { + seedTimeSeries(locationId, seriesId, rows, versioned); + + List expectedRows = fetchOracleRows(seriesId, units, beginTime, endTime, + includeEntryDate, versionDate); + TimeSeries actualResponse = fetchCdaRows(seriesId, units, beginTime, endTime, rows.size(), + includeEntryDate, versionDate); + String mismatchSummary = buildMismatchSummary(expectedRows, actualResponse); + + assertNotNull(actualResponse.getTotal(), "Reported total " + mismatchSummary); + assertEquals(expectedRows.size(), actualResponse.getTotal(), "Reported total " + mismatchSummary); + assertEquals(expectedDateVersionType, actualResponse.getDateVersionType(), "Date version type"); + assertEquals(expectedInterval, actualResponse.getInterval(), "Interval"); + assertEquals(expectedIntervalOffset, actualResponse.getIntervalOffset(), "Interval offset"); + + if (versionDate != null) { + assertNotNull(actualResponse.getVersionDate(), "Version date"); + assertEquals(versionDate, actualResponse.getVersionDate().toInstant(), "Version date"); + } else { + assertNull(actualResponse.getVersionDate(), "Version date"); + } + + assertNotNull(actualResponse.getValues(), "Values " + mismatchSummary); + assertEquals(expectedRows.size(), actualResponse.getValues().size(), "Row count " + mismatchSummary); + for (int index = 0; index < expectedRows.size(); index++) { + assertRecordsEqual(expectedRows.get(index), actualResponse.getValues().get(index), index); + } + } - List denseRows = List.of( + private static List denseRows() { + return List.of( row("2024-01-01T00:00:00Z", 1.0, 0, "2024-01-02T00:00:00Z", null), row("2024-01-01T00:01:00Z", 2.0, 0, "2024-01-02T00:01:00Z", null), row("2024-01-01T00:02:00Z", 3.0, 0, "2024-01-02T00:02:00Z", null), @@ -102,8 +219,10 @@ private static Stream scenarios() { row("2024-01-01T00:04:00Z", 5.0, 0, "2024-01-02T00:04:00Z", null), row("2024-01-01T00:05:00Z", 6.0, 0, "2024-01-02T00:05:00Z", null) ); + } - List gapRows = List.of( + private static List gapRows() { + return List.of( row("2024-01-01T00:00:00Z", 1.0, 0, "2024-01-03T00:00:00Z", null), row("2024-01-01T00:01:00Z", 2.0, 0, "2024-01-03T00:01:00Z", null), row("2024-01-01T00:02:00Z", 3.0, 0, "2024-01-03T00:02:00Z", null), @@ -113,8 +232,12 @@ private static Stream scenarios() { row("2024-01-01T00:08:00Z", 9.0, 0, "2024-01-03T00:08:00Z", null), row("2024-01-01T00:09:00Z", 10.0, 0, "2024-01-03T00:09:00Z", null) ); + } - List versionedRows = List.of( + private static List versionedRows() { + Instant olderVersion = Instant.parse("2024-06-20T08:00:00Z"); + Instant newerVersion = Instant.parse("2024-06-21T08:00:00Z"); + return List.of( row("2024-05-01T15:00:00Z", 4.0, 0, "2024-06-20T09:00:00Z", olderVersion), row("2024-05-01T16:00:00Z", 4.0, 0, "2024-06-20T09:01:00Z", olderVersion), row("2024-05-01T17:00:00Z", 4.0, 0, "2024-06-20T09:02:00Z", olderVersion), @@ -123,113 +246,19 @@ private static Stream scenarios() { row("2024-05-01T16:00:00Z", 1.0, 0, "2024-06-21T09:01:00Z", newerVersion), row("2024-05-01T17:00:00Z", 1.0, 0, "2024-06-21T09:02:00Z", newerVersion) ); + } - List irregularRows = List.of( + private static List irregularRows() { + return List.of( row("2024-01-05T12:00:00Z", 10.0, 0, "2024-01-06T00:00:00Z", null), row("2024-01-05T12:07:20Z", 20.0, 0, "2024-01-06T00:01:00Z", null), row("2024-01-05T12:19:45Z", 30.0, 0, "2024-01-06T00:02:00Z", null), row("2024-01-05T12:33:10Z", 40.0, 0, "2024-01-06T00:03:00Z", null) ); - - Instant dstStart = Instant.parse("2024-03-09T00:00:00Z"); - List dstRows = regularRows(dstStart, 5000, 1.0, Duration.ofDays(1)); - - return Stream.of( - new Scenario("dense-regular", - "ITPARREG", - "ITPARREG.Stage.Inst.1Minute.0.BENCH", - "ft", - Instant.parse("2024-01-01T00:00:00Z"), - Instant.parse("2024-01-01T00:05:00Z"), - denseRows, - false, - false, - "UNVERSIONED", - "PT1M", - 0L, - null), - new Scenario("dense-regular-entry-date", - "ITPARREG", - "ITPARREG.Stage.Inst.1Minute.0.BENCH", - "ft", - Instant.parse("2024-01-01T00:00:00Z"), - Instant.parse("2024-01-01T00:05:00Z"), - denseRows, - false, - true, - "UNVERSIONED", - "PT1M", - 0L, - null), - new Scenario("gap-regular", - "ITPARGAP", - "ITPARGAP.Stage.Inst.1Minute.0.BENCH", - "ft", - Instant.parse("2024-01-01T00:00:00Z"), - Instant.parse("2024-01-01T00:09:00Z"), - gapRows, - false, - false, - "UNVERSIONED", - "PT1M", - 0L, - null), - new Scenario("versioned-max", - "ITPARVER", - "ITPARVER.Flow.Inst.1Hour.0.BENCH", - "cfs", - Instant.parse("2024-05-01T15:00:00Z"), - Instant.parse("2024-05-01T18:00:00Z"), - versionedRows, - true, - false, - "MAX_AGGREGATE", - "PT1H", - 0L, - null), - new Scenario("versioned-single", - "ITPARVER", - "ITPARVER.Flow.Inst.1Hour.0.BENCH", - "cfs", - Instant.parse("2024-05-01T15:00:00Z"), - Instant.parse("2024-05-01T18:00:00Z"), - versionedRows, - true, - false, - "SINGLE_VERSION", - "PT1H", - 0L, - newerVersion), - new Scenario("irregular", - "ITPARIRR", - "ITPARIRR.Flow.Inst.0.0.BENCH", - "cfs", - Instant.parse("2024-01-05T12:00:00Z"), - Instant.parse("2024-01-05T12:33:10Z"), - irregularRows, - false, - false, - "UNVERSIONED", - "PT0S", - Integer.MIN_VALUE, - null), - new Scenario("dense-regular-dst-window", - "ITPARDST", - "ITPARDST.Stage.Inst.1Minute.0.BENCH", - "ft", - dstStart, - dstStart.plus(Duration.ofMinutes(4999)), - dstRows, - false, - false, - "UNVERSIONED", - "PT1M", - 0L, - null) - ); } - private static SeedRow row(String dateTime, Double value, int qualityCode, String dataEntryDate, Instant versionDate) { + private static SeedRow row(String dateTime, Double value, int qualityCode, String dataEntryDate, + Instant versionDate) { return new SeedRow( Instant.parse(dateTime), value, @@ -239,7 +268,8 @@ private static SeedRow row(String dateTime, Double value, int qualityCode, Strin ); } - private static List regularRows(Instant start, int count, double firstValue, Duration entryDateOffset) { + private static List regularRows(Instant start, int count, double firstValue, + Duration entryDateOffset) { return IntStream.range(0, count) .mapToObj(index -> new SeedRow( start.plusSeconds(index * 60L), @@ -251,47 +281,69 @@ private static List regularRows(Instant start, int count, double firstV .collect(Collectors.toList()); } - private static void assertRowsEqual(RetrievedRow expected, RetrievedRow actual, int index) { - assertEquals(expected.dateTimeMillis, actual.dateTimeMillis, "Row " + index + " timestamp"); - assertEquals(expected.qualityCode, actual.qualityCode, "Row " + index + " quality"); + private static String buildMismatchSummary(List expectedRows, TimeSeries actualResponse) { + return "expectedRows=" + summarizeRows(expectedRows) + + " actualRows=" + summarizeRows(actualResponse.getValues()) + + " actualTotal=" + actualResponse.getTotal(); + } - if (expected.value == null) { - assertNull(actual.value, "Row " + index + " value"); - } else { - assertNotNull(actual.value, "Row " + index + " value"); - assertEquals(expected.value, actual.value, DOUBLE_TOLERANCE, "Row " + index + " value"); + private static String summarizeRows(List rows) { + if (rows == null) { + return "null"; } - if (expected.dataEntryDateMillis == null) { - assertNull(actual.dataEntryDateMillis, "Row " + index + " entry date"); + return rows.stream() + .limit(12) + .map(row -> "{t=" + toMillis(row.getDateTime()) + + ",v=" + row.getValue() + + ",q=" + row.getQualityCode() + + ",e=" + toMillis(row.getDataEntryDate()) + + "}") + .collect(Collectors.joining(", ", "[", rows.size() > 12 ? ", ...]" : "]")); + } + + private static long toMillis(Timestamp timestamp) { + return timestamp != null ? timestamp.getTime() : Long.MIN_VALUE; + } + + private static void assertRecordsEqual(TimeSeries.Record expected, TimeSeries.Record actual, int index) { + assertEquals(expected.getDateTime(), actual.getDateTime(), "Row " + index + " timestamp"); + assertEquals(expected.getQualityCode(), actual.getQualityCode(), "Row " + index + " quality"); + + if (expected.getValue() == null) { + assertNull(actual.getValue(), "Row " + index + " value"); } else { - assertEquals(expected.dataEntryDateMillis, actual.dataEntryDateMillis, "Row " + index + " entry date"); + assertNotNull(actual.getValue(), "Row " + index + " value"); + assertEquals(expected.getValue(), actual.getValue(), DOUBLE_TOLERANCE, "Row " + index + " value"); } + + assertEquals(expected.getDataEntryDate(), actual.getDataEntryDate(), "Row " + index + " entry date"); } - private static void seedScenario(Scenario scenario) throws SQLException { - createLocation(scenario.locationId, true, OFFICE); - createTimeseries(OFFICE, scenario.seriesId, 0); + private static void seedTimeSeries(String locationId, String seriesId, List rows, + boolean versioned) throws SQLException { + createLocation(locationId, true, OFFICE); + createTimeseries(OFFICE, seriesId, 0); CwmsDatabaseContainer database = CwmsDataApiSetupCallback.getDatabaseLink(); database.connection(connection -> { try { CWMS_TS_PACKAGE.call_SET_TSID_VERSIONED(DSL.using(connection).configuration(), - scenario.seriesId, - scenario.versioned ? "T" : "F", + seriesId, + versioned ? "T" : "F", OFFICE); - long tsCode = findTsCode(connection, scenario.seriesId); - List years = scenario.rows.stream() - .map(row -> OffsetDateTime.ofInstant(row.dateTime, ZoneOffset.UTC).getYear()) + long tsCode = findTsCode(connection, seriesId); + List years = rows.stream() + .map(seedRow -> OffsetDateTime.ofInstant(seedRow.dateTime, ZoneOffset.UTC).getYear()) .distinct() .collect(Collectors.toList()); clearScenarioRows(connection, tsCode, years); - insertScenarioRows(connection, tsCode, scenario.rows); - updateScenarioExtents(connection, tsCode, scenario.rows); + insertScenarioRows(connection, tsCode, rows); + updateScenarioExtents(connection, tsCode, rows); } catch (SQLException e) { - throw new RuntimeException("Unable to seed scenario " + scenario.name, e); + throw new RuntimeException("Unable to seed time series " + seriesId, e); } }, "cwms_20"); } @@ -326,7 +378,8 @@ private static void clearScenarioRows(Connection connection, long tsCode, List rows) throws SQLException { + private static void insertScenarioRows(Connection connection, long tsCode, List rows) + throws SQLException { List sortedRows = new ArrayList<>(rows); sortedRows.sort(Comparator.comparing(seedRow -> seedRow.dateTime)); @@ -348,7 +401,8 @@ private static void insertScenarioRows(Connection connection, long tsCode, List< } } - private static void updateScenarioExtents(Connection connection, long tsCode, List rows) throws SQLException { + private static void updateScenarioExtents(Connection connection, long tsCode, List rows) + throws SQLException { Set distinctVersionDates = rows.stream() .map(seedRow -> seedRow.versionDate) .filter(Objects::nonNull) @@ -364,36 +418,39 @@ private static void updateScenarioExtents(Connection connection, long tsCode, Li } } - private static void updateTsExtents(Connection connection, long tsCode, String versionDateExpression) throws SQLException { + private static void updateTsExtents(Connection connection, long tsCode, String versionDateExpression) + throws SQLException { String sql = "begin cwms_ts.update_ts_extents(" + tsCode + ", " + versionDateExpression + "); end;"; try (PreparedStatement statement = connection.prepareStatement(sql)) { statement.execute(); } } - private static List fetchOracleRows(Scenario scenario) throws SQLException { + private static List fetchOracleRows(String seriesId, String units, Instant beginTime, + Instant endTime, boolean includeEntryDate, + Instant versionDate) throws SQLException { CwmsDatabaseContainer database = CwmsDataApiSetupCallback.getDatabaseLink(); return database.connection(connection -> { try { - String functionName = scenario.includeEntryDate + String functionName = includeEntryDate ? "cwms_20.cwms_ts.retrieve_ts_entry_out_tab" : "cwms_20.cwms_ts.retrieve_ts_out_tab"; - String rowProjection = scenario.includeEntryDate + String rowProjection = includeEntryDate ? ", case when data_entry_date is null then null else round((cast(data_entry_date as date) - date '1970-01-01') * 86400000) end as data_entry_date_ms" : ""; - String versionDateExpression = scenario.versionDate != null - ? toOracleDateExpression(scenario.versionDate) + String versionDateExpression = versionDate != null + ? toOracleDateExpression(versionDate) : "null"; - String maxVersionFlag = scenario.versionDate != null ? "'F'" : "'T'"; + String maxVersionFlag = versionDate != null ? "'F'" : "'T'"; String sql = "select round((date_time - date '1970-01-01') * 86400000) as date_time_ms," + " value," + " quality_code" + rowProjection + " from table(" + functionName + "(" - + toSqlStringLiteral(scenario.seriesId) + ", " - + toSqlStringLiteral(scenario.units) + ", " - + toOracleDateExpression(scenario.beginTime) + ", " - + toOracleDateExpression(scenario.endTime) + ", " + + toSqlStringLiteral(seriesId) + ", " + + toSqlStringLiteral(units) + ", " + + toOracleDateExpression(beginTime) + ", " + + toOracleDateExpression(endTime) + ", " + "'UTC', 'T', 'T', 'T', 'F', 'F', " + versionDateExpression + ", " + maxVersionFlag + ", " @@ -403,7 +460,7 @@ private static List fetchOracleRows(Scenario scenario) throws SQLE try (PreparedStatement statement = connection.prepareStatement(sql)) { try (ResultSet resultSet = statement.executeQuery()) { - List rows = new ArrayList<>(); + List rows = new ArrayList<>(); while (resultSet.next()) { Double value = resultSet.getDouble("value"); if (resultSet.wasNull()) { @@ -411,14 +468,14 @@ private static List fetchOracleRows(Scenario scenario) throws SQLE } Long dataEntryDateMillis = null; - if (scenario.includeEntryDate) { + if (includeEntryDate) { long entryMillis = resultSet.getLong("data_entry_date_ms"); if (!resultSet.wasNull()) { dataEntryDateMillis = entryMillis; } } - rows.add(new RetrievedRow( + rows.add(toRecord( resultSet.getLong("date_time_ms"), value, resultSet.getInt("quality_code"), @@ -429,25 +486,37 @@ private static List fetchOracleRows(Scenario scenario) throws SQLE } } } catch (SQLException e) { - throw new RuntimeException("Unable to fetch Oracle rows for " + scenario.name, e); + throw new RuntimeException("Unable to fetch Oracle rows for " + seriesId, e); } }, "cwms_20"); } - private static TimeSeriesResponse fetchCdaRows(Scenario scenario) throws Exception { - int pageSize = Math.max(1000, scenario.rows.size() * 2); + private static TimeSeries.Record toRecord(long dateTimeMillis, Double value, int qualityCode, + Long dataEntryDateMillis) { + Timestamp dateTime = Timestamp.from(Instant.ofEpochMilli(dateTimeMillis)); + if (dataEntryDateMillis != null) { + return new TimeSeries.Record(dateTime, value, qualityCode, + Timestamp.from(Instant.ofEpochMilli(dataEntryDateMillis))); + } + return new TimeSeries.Record(dateTime, value, qualityCode); + } + + private static TimeSeries fetchCdaRows(String seriesId, String units, Instant beginTime, Instant endTime, + int seedRowCount, boolean includeEntryDate, Instant versionDate) + throws Exception { + int pageSize = Math.max(1000, seedRowCount * 2); RequestSpecification request = given() .log().ifValidationFails(LogDetail.ALL, true) .accept(Formats.JSONV2) .queryParam(Controllers.OFFICE, OFFICE) - .queryParam(Controllers.NAME, scenario.seriesId) - .queryParam(Controllers.UNIT, scenario.units) - .queryParam(Controllers.BEGIN, scenario.beginTime.toString()) - .queryParam(Controllers.END, scenario.endTime.toString()) + .queryParam(Controllers.NAME, seriesId) + .queryParam(Controllers.UNIT, units) + .queryParam(Controllers.BEGIN, beginTime.toString()) + .queryParam(Controllers.END, endTime.toString()) .queryParam("page-size", pageSize) - .queryParam(Controllers.INCLUDE_ENTRY_DATE, scenario.includeEntryDate); - if (scenario.versionDate != null) { - request = request.queryParam(Controllers.VERSION_DATE, scenario.versionDate.toString()); + .queryParam(Controllers.INCLUDE_ENTRY_DATE, includeEntryDate); + if (versionDate != null) { + request = request.queryParam(Controllers.VERSION_DATE, versionDate.toString()); } ExtractableResponse response = request.when() @@ -460,36 +529,27 @@ private static TimeSeriesResponse fetchCdaRows(Scenario scenario) throws Excepti .statusCode(is(HttpServletResponse.SC_OK)) .extract(); - JsonNode payload = OBJECT_MAPPER.readTree(response.asString()); - List rows = new ArrayList<>(); + String responseBody = response.asString(); + TimeSeries timeSeries = OBJECT_MAPPER.readValue(responseBody, TimeSeries.class); + if (!includeEntryDate) { + return timeSeries; + } + + JsonNode payload = OBJECT_MAPPER.readTree(responseBody); + List values = new ArrayList<>(); for (JsonNode entry : payload.get("values")) { - Double value = entry.get(1).isNull() ? null : entry.get(1).asDouble(); Long dataEntryDateMillis = null; - if (scenario.includeEntryDate && entry.size() > 3 && !entry.get(3).isNull()) { + if (entry.size() > 3 && !entry.get(3).isNull()) { dataEntryDateMillis = entry.get(3).asLong(); } - rows.add(new RetrievedRow( + values.add(toRecord( entry.get(0).asLong(), - value, + entry.get(1).isNull() ? null : entry.get(1).asDouble(), entry.get(2).asInt(), dataEntryDateMillis )); } - - Instant versionDate = null; - JsonNode versionDateNode = payload.get("version-date"); - if (versionDateNode != null && !versionDateNode.isNull()) { - versionDate = OffsetDateTime.parse(versionDateNode.asText()).toInstant(); - } - - return new TimeSeriesResponse( - rows, - payload.get("total").asInt(), - payload.get("date-version-type").asText(), - payload.get("interval").asText(), - payload.get("interval-offset").asLong(), - versionDate - ); + return timeSeries.withValues(values); } private static String toSqlStringLiteral(String value) { @@ -508,46 +568,6 @@ private static String toOracleTimestampExpression(Instant instant) { + "', 'yyyy-mm-dd hh24:mi:ss')"; } - private static final class Scenario { - private final String name; - private final String locationId; - private final String seriesId; - private final String units; - private final Instant beginTime; - private final Instant endTime; - private final List rows; - private final boolean versioned; - private final boolean includeEntryDate; - private final String expectedDateVersionType; - private final String expectedInterval; - private final long expectedIntervalOffset; - private final Instant versionDate; - - private Scenario(String name, String locationId, String seriesId, String units, Instant beginTime, - Instant endTime, List rows, boolean versioned, boolean includeEntryDate, - String expectedDateVersionType, String expectedInterval, long expectedIntervalOffset, - Instant versionDate) { - this.name = name; - this.locationId = locationId; - this.seriesId = seriesId; - this.units = units; - this.beginTime = beginTime; - this.endTime = endTime; - this.rows = rows; - this.versioned = versioned; - this.includeEntryDate = includeEntryDate; - this.expectedDateVersionType = expectedDateVersionType; - this.expectedInterval = expectedInterval; - this.expectedIntervalOffset = expectedIntervalOffset; - this.versionDate = versionDate; - } - - @Override - public String toString() { - return name; - } - } - private static final class SeedRow { private final Instant dateTime; private final Double value; @@ -564,37 +584,4 @@ private SeedRow(Instant dateTime, Double value, int qualityCode, Instant dataEnt this.versionDate = versionDate; } } - - private static final class RetrievedRow { - private final long dateTimeMillis; - private final Double value; - private final int qualityCode; - private final Long dataEntryDateMillis; - - private RetrievedRow(long dateTimeMillis, Double value, int qualityCode, Long dataEntryDateMillis) { - this.dateTimeMillis = dateTimeMillis; - this.value = value; - this.qualityCode = qualityCode; - this.dataEntryDateMillis = dataEntryDateMillis; - } - } - - private static final class TimeSeriesResponse { - private final List rows; - private final int total; - private final String dateVersionType; - private final String interval; - private final long intervalOffset; - private final Instant versionDate; - - private TimeSeriesResponse(List rows, int total, String dateVersionType, - String interval, long intervalOffset, Instant versionDate) { - this.rows = rows; - this.total = total; - this.dateVersionType = dateVersionType; - this.interval = interval; - this.intervalOffset = intervalOffset; - this.versionDate = versionDate; - } - } } From 378e72e7da9a03456bbcc34803dbce1f7c2aa152 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Wed, 22 Apr 2026 21:11:12 -0500 Subject: [PATCH 06/16] Simplify direct read metadata query --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 126 +++++++++--------- 1 file changed, 60 insertions(+), 66 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index e79c6877dd..6a4b6103d6 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -665,8 +665,6 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, String cursor = null; Timestamp tsCursor = null; - validateEntryDateSupport(includeEntryDate); - if (page != null && !page.isEmpty()) { final String[] parts = CwmsDTOPaginated.decodeCursor(page); @@ -692,37 +690,36 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, } BigDecimal intervalValue = metadata.getValue("interval", BigDecimal.class); - Number offsetValue = metadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET); + Number offsetValue = metadata.getValue("interval_utc_offset", Number.class); BigDecimal tsCodeValue = metadata.getValue("tscode", BigDecimal.class); long tsCode = tsCodeValue.longValue(); String tsId = metadata.getValue("tsid", String.class); + String[] tsIdParts = splitTimeSeriesId(tsId); String metadataOfficeId = metadata.getValue("office_id", String.class); String metadataUnits = metadata.getValue("units", String.class); - String sourceUnit = metadata.getValue("source_unit", String.class); - String locPart = metadata.getValue("loc_part", String.class); - String parmPart = metadata.getValue("parm_part", String.class); - String intervalPart = metadata.getValue("interval_part", String.class); + String locPart = getTimeSeriesIdPart(tsIdParts, 0); + String parmPart = getTimeSeriesIdPart(tsIdParts, 1); + String intervalPart = getTimeSeriesIdPart(tsIdParts, 3); long intervalMinutes = intervalValue == null ? 0L : intervalValue.longValue(); long intervalOffset = offsetValue == null ? UTC_OFFSET_IRREGULAR : offsetValue.longValue(); - String timeZoneId = metadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID) == null + String timeZoneId = metadata.getValue("time_zone_id", String.class) == null ? UTC - : metadata.getValue(AV_CWMS_TS_ID2.TIME_ZONE_ID); + : metadata.getValue("time_zone_id", String.class); boolean isLrts = parseBool(CWMS_TS_PACKAGE.call_IS_LRTS__2(dsl.configuration(), tsCode)); - validateRequestedUnits(sourceUnit, metadataUnits); - VerticalDatumInfo verticalDatumInfo = null; if (shouldFetchVerticalDatum(parmPart)) { verticalDatumInfo = fetchVerticalDatumInfoSeparately(locPart, requestedUnits, office); } - VersionType finalDateVersionType = getVersionType(dsl, names, office, versionDate != null); + VersionType finalDateVersionType = getDirectReadVersionType( + metadata.getValue("version_flag", String.class), versionDate != null); if (pageSize == 0) { return null; } List rawRows = fetchRequestedTimeSeriesRows(tsCode, metadataOfficeId, metadataUnits, - requestParameters); + requestParameters, includeEntryDate); List expectedTimes = fetchExpectedRegularTimes(intervalMinutes, intervalOffset, timeZoneId, intervalPart, isLrts, requestParameters, rawRows); int total = countMergedRows(rawRows, expectedTimes); @@ -765,19 +762,6 @@ private Record fetchRequestedTimeSeriesMetadataRecord( officeId.as("office_id")) .asTable("validts"); - Field loc = CWMS_UTIL_PACKAGE.call_SPLIT_TEXT( - validTs.field("tsid", String.class), - DSL.val(BigInteger.valueOf(1L)), DSL.val("."), - DSL.val(BigInteger.valueOf(6L))); - Field param = DSL.upper(CWMS_UTIL_PACKAGE.call_SPLIT_TEXT( - validTs.field("tsid", String.class), - DSL.val(BigInteger.valueOf(2L)), DSL.val("."), - DSL.val(BigInteger.valueOf(6L)))); - Field intervalPart = CWMS_UTIL_PACKAGE.call_SPLIT_TEXT( - validTs.field("tsid", String.class), - DSL.val(BigInteger.valueOf(4L)), DSL.val("."), - DSL.val(BigInteger.valueOf(6L))); - Field unit = units.compareToIgnoreCase("SI") == 0 || units.compareToIgnoreCase("EN") == 0 ? CWMS_UTIL_PACKAGE.call_GET_DEFAULT_UNITS( @@ -788,20 +772,18 @@ private Record fetchRequestedTimeSeriesMetadataRecord( Field interval = CWMS_TS_PACKAGE.call_GET_TS_INTERVAL__2(validTs.field("tsid", String.class)); CommonTableExpression valid = - name("valid").fields("tscode", "tsid", "office_id", "loc_part", "units", - "interval", "parm_part", "interval_part") + name("valid").fields("tscode", "tsid", "office_id", "units", "interval") .as( select( validTs.field("tscode", BigDecimal.class).as("tscode"), validTs.field("tsid", String.class).as("tsid"), validTs.field("office_id", String.class).as("office_id"), - loc.as("loc_part"), unit.as("units"), - interval.as("interval"), - param.as("parm_part"), - intervalPart.as("interval_part")) + interval.as("interval")) .from(validTs)); + var tsIdView = AV_CWMS_TS_ID.AV_CWMS_TS_ID; + SelectJoinStep metadataQuery = dsl.with(valid) .select( @@ -809,18 +791,14 @@ private Record fetchRequestedTimeSeriesMetadataRecord( valid.field("tsid", String.class).as("tsid"), valid.field("office_id", String.class).as("office_id"), valid.field("units", String.class).as("units"), - AV_CWMS_TS_ID2.UNIT_ID.as("source_unit"), valid.field("interval", BigDecimal.class).as("interval"), - valid.field("loc_part", String.class).as("loc_part"), - valid.field("parm_part", String.class).as("parm_part"), - valid.field("interval_part", String.class).as("interval_part"), - AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET, - AV_CWMS_TS_ID2.TIME_ZONE_ID) + tsIdView.INTERVAL_UTC_OFFSET.as("interval_utc_offset"), + tsIdView.TIME_ZONE_ID.as("time_zone_id"), + tsIdView.field("VERSION_FLAG", String.class).as("version_flag")) .from(valid) - .leftOuterJoin(AV_CWMS_TS_ID2) - .on(AV_CWMS_TS_ID2.DB_OFFICE_ID.eq(valid.field("office_id", String.class)) - .and(AV_CWMS_TS_ID2.TS_CODE.eq(valid.field("tscode", BigDecimal.class))) - .and(AV_CWMS_TS_ID2.ALIASED_ITEM.isNull())); + .leftOuterJoin(tsIdView) + .on(tsIdView.DB_OFFICE_ID.eq(valid.field("office_id", String.class)) + .and(tsIdView.TS_CODE.eq(valid.field("tscode", BigDecimal.class)))); logger.atFine().log("%s", lazy(() -> metadataQuery.getSQL(ParamType.INLINED))); @@ -828,7 +806,8 @@ private Record fetchRequestedTimeSeriesMetadataRecord( } private List fetchRequestedTimeSeriesRows(long tsCode, String officeId, String units, - TimeSeriesRequestParameters requestParameters) { + TimeSeriesRequestParameters requestParameters, + boolean includeEntryDate) { ZonedDateTime beginTime = requestParameters.getBeginTime(); ZonedDateTime endTime = requestParameters.getEndTime(); ZonedDateTime versionDate = requestParameters.getVersionDate(); @@ -856,19 +835,31 @@ private List fetchRequestedTimeSeriesRows(long tsCode, String if (versionDate != null) { Field versionTimestamp = CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2( DSL.val(versionDate.toInstant().toEpochMilli())); - query = dsl.select( - view.DATE_TIME, - view.VALUE, - normalizedQuality, - view.DATA_ENTRY_DATE) - .from(view) - .where(baseCondition.and(view.VERSION_DATE.eq(versionTimestamp))); + if (includeEntryDate) { + query = dsl.select( + view.DATE_TIME, + view.VALUE, + normalizedQuality, + view.DATA_ENTRY_DATE) + .from(view) + .where(baseCondition.and(view.VERSION_DATE.eq(versionTimestamp))); + } else { + query = dsl.select( + view.DATE_TIME, + view.VALUE, + normalizedQuality, + DSL.castNull(Timestamp.class).as(DATA_ENTRY_DATE)) + .from(view) + .where(baseCondition.and(view.VERSION_DATE.eq(versionTimestamp))); + } } else { - Table rankedRows = dsl.select( + var rankedRows = dsl.select( view.DATE_TIME.as(DATE_TIME), view.VALUE.as(VALUE), normalizedQuality, - view.DATA_ENTRY_DATE.as(DATA_ENTRY_DATE), + includeEntryDate + ? view.DATA_ENTRY_DATE.as(DATA_ENTRY_DATE) + : DSL.castNull(Timestamp.class).as(DATA_ENTRY_DATE), DSL.rowNumber() .over(partitionBy(view.DATE_TIME) .orderBy(view.VERSION_DATE.desc(), view.DATA_ENTRY_DATE.desc())) @@ -974,7 +965,7 @@ private long resolveIntervalOffset(long intervalMinutes, long intervalOffset, St } private boolean isRegularSeries(long intervalMinutes, long intervalOffset) { - return intervalMinutes != 0L || intervalOffset != UTC_OFFSET_IRREGULAR; + return intervalMinutes != 0L; } private int countMergedRows(List rawRows, List expectedTimes) { @@ -1128,17 +1119,6 @@ private long alignToInterval(long timestampMillis, long intervalMillis, long off return timestampMillis + (intervalMillis - remainder); } - private void validateRequestedUnits(String sourceUnit, String requestedUnit) { - if (sourceUnit == null || requestedUnit == null || sourceUnit.equalsIgnoreCase(requestedUnit)) { - return; - } - dsl.select(CWMS_UTIL_PACKAGE.call_CONVERT_UNITS( - DSL.val(0.0d), - DSL.val(sourceUnit), - DSL.val(requestedUnit))) - .fetchOne(0, Double.class); - } - private boolean shouldFetchVerticalDatum(String parmPart) { // Check if parameter requires vertical datum (e.g., "ELEV") if (parmPart == null) { @@ -1183,9 +1163,23 @@ private static String getVersionPart(ZonedDateTime versionDate) { return "?"; } + private static VersionType getDirectReadVersionType(String versionFlag, boolean versionDateProvided) { + if (versionDateProvided) { + return VersionType.SINGLE_VERSION; + } + return parseBool(versionFlag) ? VersionType.MAX_AGGREGATE : VersionType.UNVERSIONED; + } + + private static String[] splitTimeSeriesId(String tsId) { + return tsId.split("\\.", 6); + } + + private static String getTimeSeriesIdPart(String[] tsIdParts, int index) { + return tsIdParts.length > index ? tsIdParts[index] : null; + } + public static String parseLocFromTimeSeriesId(String tsId) { - String[] parts = tsId.split("\\."); - return parts[0]; + return getTimeSeriesIdPart(splitTimeSeriesId(tsId), 0); } public static String getTimeZoneId(DSLContext dsl, String tsId, String officeId) { From 7b2bf283599f83b23387f4f7f05544bb12b9a885 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Wed, 22 Apr 2026 21:50:44 -0500 Subject: [PATCH 07/16] Clean up benchmark task review items --- cwms-data-api/build.gradle | 1 - .../src/test/java/helpers/TimeSeriesReadBenchmark.java | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/cwms-data-api/build.gradle b/cwms-data-api/build.gradle index dfc00e2b89..130f69ac8f 100644 --- a/cwms-data-api/build.gradle +++ b/cwms-data-api/build.gradle @@ -318,7 +318,6 @@ task timeseriesReadBenchmark(type: JavaExec) { mainClass = "helpers.TimeSeriesReadBenchmark" - systemProperties += project.properties.findAll { k, v -> k.startsWith("RADAR") && !k.startsWith("RADAR_JDBC") } systemProperties += project.properties.findAll { k, v -> k.startsWith("CDA") && !k.startsWith("CDA_JDBC") } systemProperties += project.properties.findAll { k, v -> k.startsWith("testcontainer") } systemProperties += project.properties.findAll { k, v -> k.startsWith("benchmark.") } diff --git a/cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java b/cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java index 1ae74aae4f..b86ca3a50a 100644 --- a/cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java +++ b/cwms-data-api/src/test/java/helpers/TimeSeriesReadBenchmark.java @@ -67,7 +67,7 @@ public static void main(String[] args) throws Exception { System.out.println(); System.out.println("Benchmark report written to " + resultFile); - for (BenchmarkRun run : report.runs) { + for (BenchmarkRun run: report.runs) { if (run.httpCode != 200) { throw new IllegalStateException( "Benchmark completed with HTTP failures. Results saved to " + resultFile); From 2b09c938437b022bd76a6f57416853d8435f6540 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Wed, 22 Apr 2026 23:07:54 -0500 Subject: [PATCH 08/16] Use HEC intervals for direct read gaps --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 142 ++++++++++++------ .../cda/api/TimeSeriesDirectReadParityIT.java | 33 +++- 2 files changed, 122 insertions(+), 53 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index 6a4b6103d6..fcb3b8098c 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -43,6 +43,7 @@ import cwms.cda.data.dto.filteredtimeseries.FilteredTimeSeries; import cwms.cda.formatters.xml.XMLv1; import cwms.cda.helpers.DateUtils; +import cwms.cda.helpers.ZoneIdHelper; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Connection; @@ -64,7 +65,6 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; @@ -75,6 +75,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; +import mil.army.usace.hec.metadata.Interval; +import mil.army.usace.hec.metadata.IntervalFactory; +import mil.army.usace.hec.metadata.IntervalOffset; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jooq.*; @@ -720,7 +723,12 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, List rawRows = fetchRequestedTimeSeriesRows(tsCode, metadataOfficeId, metadataUnits, requestParameters, includeEntryDate); - List expectedTimes = fetchExpectedRegularTimes(intervalMinutes, intervalOffset, timeZoneId, + long effectiveIntervalOffset = intervalOffset; + if (isRegularSeries(intervalMinutes, intervalPart)) { + effectiveIntervalOffset = resolveIntervalOffset(intervalOffset, timeZoneId, intervalPart, isLrts, rawRows); + } + + List expectedTimes = fetchExpectedRegularTimes(intervalMinutes, effectiveIntervalOffset, timeZoneId, intervalPart, isLrts, requestParameters, rawRows); int total = countMergedRows(rawRows, expectedTimes); @@ -733,9 +741,9 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, beginTime, endTime, metadataUnits, - Duration.ofMinutes(intervalMinutes), + resolveIntervalDuration(intervalMinutes, intervalPart), verticalDatumInfo, - intervalOffset, + effectiveIntervalOffset, timeZoneId, versionDate, finalDateVersionType @@ -898,7 +906,7 @@ private List fetchExpectedRegularTimes(long intervalMinutes, long int String intervalPart, boolean isLrts, TimeSeriesRequestParameters requestParameters, List rawRows) { - if (!isRegularSeries(intervalMinutes, intervalOffset)) { + if (!isRegularSeries(intervalMinutes, intervalPart)) { return Collections.emptyList(); } if (rawRows.isEmpty() && requestParameters.isShouldTrim()) { @@ -912,10 +920,10 @@ private List fetchExpectedRegularTimes(long intervalMinutes, long int ? rawRows.get(rawRows.size() - 1).getDateTime() : Timestamp.from(requestParameters.getEndTime().toInstant()); - long offsetMinutes = resolveIntervalOffset(intervalMinutes, intervalOffset, timeZoneId, intervalPart, isLrts, - rawRows); - if (canGenerateExpectedTimesInJava(intervalMinutes, intervalPart, isLrts)) { - return buildExpectedRegularTimesUtc(rangeStart, rangeEnd, intervalMinutes, offsetMinutes); + Interval expectedInterval = resolveExpectedInterval(intervalPart); + if (expectedInterval != null) { + return buildExpectedRegularTimes(rangeStart, rangeEnd, intervalOffset, expectedInterval, + getExpectedTimeZone(timeZoneId, isLrts)); } String intervalTimeZone = isLrts ? timeZoneId : UTC; @@ -924,7 +932,7 @@ private List fetchExpectedRegularTimes(long intervalMinutes, long int dsl.configuration(), dateRange, intervalPart, - String.valueOf(offsetMinutes), + String.valueOf(intervalOffset), intervalTimeZone ); @@ -939,18 +947,28 @@ private List fetchExpectedRegularTimes(long intervalMinutes, long int return retVal; } - private long resolveIntervalOffset(long intervalMinutes, long intervalOffset, String timeZoneId, + private long resolveIntervalOffset(long intervalOffset, String timeZoneId, String intervalPart, boolean isLrts, List rawRows) { - if (intervalOffset != UTC_OFFSET_UNDEFINED) { + if (intervalOffset != UTC_OFFSET_UNDEFINED && intervalOffset != UTC_OFFSET_IRREGULAR) { return intervalOffset; } if (rawRows.isEmpty()) { return 0L; } - if (canGenerateExpectedTimesInJava(intervalMinutes, intervalPart, isLrts)) { - long intervalMillis = TimeUnit.MINUTES.toMillis(intervalMinutes); - return TimeUnit.MILLISECONDS.toMinutes(Math.floorMod(rawRows.get(0).getDateTime().getTime(), intervalMillis)); + Interval expectedInterval = resolveExpectedInterval(intervalPart); + if (expectedInterval != null) { + try { + Instant firstTime = rawRows.get(0).getDateTime().toInstant(); + Instant topOfInterval = expectedInterval.getTimeOnPreviousOrCurrentInterval( + firstTime, + IntervalOffset.zeroOffset(), + getExpectedTimeZone(timeZoneId, isLrts) + ); + return TimeUnit.MILLISECONDS.toMinutes(firstTime.toEpochMilli() - topOfInterval.toEpochMilli()); + } catch (mil.army.usace.hec.metadata.DataSetIllegalArgumentException ex) { + throw new IllegalArgumentException("Unable to resolve interval offset for " + intervalPart, ex); + } } String intervalTimeZone = isLrts ? timeZoneId : UTC; @@ -964,8 +982,21 @@ private long resolveIntervalOffset(long intervalMinutes, long intervalOffset, St return (rawRows.get(0).getDateTime().getTime() - topOfInterval.getTime()) / TimeUnit.MINUTES.toMillis(1); } - private boolean isRegularSeries(long intervalMinutes, long intervalOffset) { - return intervalMinutes != 0L; + private boolean isRegularSeries(long intervalMinutes, String intervalPart) { + return intervalMinutes != 0L || isLocalRegularInterval(intervalPart); + } + + private Duration resolveIntervalDuration(long intervalMinutes, String intervalPart) { + if (intervalMinutes != 0L) { + return Duration.ofMinutes(intervalMinutes); + } + + Interval interval = resolveExpectedInterval(intervalPart); + if (interval != null) { + return Duration.ofSeconds(interval.getSeconds()); + } + + return Duration.ZERO; } private int countMergedRows(List rawRows, List expectedTimes) { @@ -1074,49 +1105,62 @@ private Timestamp normalizeOracleUtcTimestamp(Timestamp timestamp) { return Timestamp.from(utcWallTime.toInstant(ZoneOffset.UTC)); } - private boolean canGenerateExpectedTimesInJava(long intervalMinutes, String intervalPart, boolean isLrts) { - if (isLrts || intervalMinutes <= 0L) { - return false; - } - + @Nullable + private Interval resolveExpectedInterval(String intervalPart) { if (intervalPart == null) { - return false; + return null; } - String normalizedInterval = intervalPart.toLowerCase(Locale.ENGLISH); - return normalizedInterval.endsWith("minute") - || normalizedInterval.endsWith("minutes") - || normalizedInterval.endsWith("hour") - || normalizedInterval.endsWith("hours") - || normalizedInterval.endsWith("day") - || normalizedInterval.endsWith("days") - || normalizedInterval.endsWith("week") - || normalizedInterval.endsWith("weeks"); + return IntervalFactory.findAny(IntervalFactory.equalsName(normalizeIntervalNameForNucleus(intervalPart))) + .orElse(null); } - private List buildExpectedRegularTimesUtc(Timestamp rangeStart, - Timestamp rangeEnd, - long intervalMinutes, - long offsetMinutes) { - long intervalMillis = TimeUnit.MINUTES.toMillis(intervalMinutes); - long offsetMillis = TimeUnit.MINUTES.toMillis(Math.floorMod(offsetMinutes, intervalMinutes)); - long startMillis = rangeStart.getTime(); - long endMillis = rangeEnd.getTime(); - long firstMillis = alignToInterval(startMillis, intervalMillis, offsetMillis); - + private List buildExpectedRegularTimes(Timestamp rangeStart, + Timestamp rangeEnd, + long offsetMinutes, + Interval interval, + ZoneId intervalTimeZone) { List expectedTimes = new ArrayList<>(); - for (long millis = firstMillis; millis <= endMillis; millis += intervalMillis) { - expectedTimes.add(new Timestamp(millis)); + IntervalOffset intervalOffset = IntervalOffset.fromSeconds(Math.toIntExact( + TimeUnit.MINUTES.toSeconds(offsetMinutes))); + Instant endTime = rangeEnd.toInstant(); + + try { + Instant nextTime = interval.getTimeOnNextOrCurrentInterval(rangeStart.toInstant(), intervalOffset, + intervalTimeZone); + while (!nextTime.isAfter(endTime)) { + expectedTimes.add(Timestamp.from(nextTime)); + nextTime = interval.getNextIntervalTime(nextTime, intervalTimeZone); + } + } catch (mil.army.usace.hec.metadata.DataSetIllegalArgumentException ex) { + throw new IllegalArgumentException("Unable to build expected times for " + interval.getInterval(), ex); } return expectedTimes; } - private long alignToInterval(long timestampMillis, long intervalMillis, long offsetMillis) { - long remainder = Math.floorMod(timestampMillis - offsetMillis, intervalMillis); - if (remainder == 0L) { - return timestampMillis; + private ZoneId getExpectedTimeZone(String timeZoneId, boolean isLrts) { + if (!isLrts) { + return ZoneOffset.UTC; + } + return ZoneIdHelper.parseZoneIdWithAliases(timeZoneId); + } + + private String normalizeIntervalNameForNucleus(String intervalPart) { + if (intervalPart.startsWith("~")) { + return intervalPart; + } + if (intervalPart.length() > 5 + && intervalPart.regionMatches(true, intervalPart.length() - 5, "Local", 0, 5)) { + return "~" + intervalPart.substring(0, intervalPart.length() - 5); + } + return intervalPart; + } + + private boolean isLocalRegularInterval(String intervalPart) { + if (intervalPart == null) { + return false; } - return timestampMillis + (intervalMillis - remainder); + return normalizeIntervalNameForNucleus(intervalPart).startsWith("~"); } private boolean shouldFetchVerticalDatum(String parmPart) { diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java index 7a121250f4..13f2d51584 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -176,6 +176,29 @@ void dstWindowRegularReadMatchesRetrieveTs() throws Exception { ); } + @Test + void localRegularGapReadMatchesRetrieveTs() throws Exception { + assertDirectReadMatchesOracle( + "ITPARLCL", + "ITPARLCL.Flow.Inst.~1Day.0.BENCH", + "cfs", + Instant.parse("2024-01-01T00:00:00Z"), + Instant.parse("2024-01-05T00:00:00Z"), + List.of( + row("2024-01-01T00:00:00Z", 1.0, 0, "2024-01-06T00:00:00Z", null), + row("2024-01-02T00:00:00Z", 2.0, 0, "2024-01-06T00:00:00Z", null), + row("2024-01-04T00:00:00Z", 4.0, 0, "2024-01-06T00:00:00Z", null), + row("2024-01-05T00:00:00Z", 5.0, 0, "2024-01-06T00:00:00Z", null) + ), + false, + false, + VersionType.UNVERSIONED, + Duration.ofDays(1), + 0L, + null + ); + } + private static void assertDirectReadMatchesOracle(String locationId, String seriesId, String units, Instant beginTime, Instant endTime, List rows, boolean versioned, boolean includeEntryDate, @@ -328,10 +351,12 @@ private static void seedTimeSeries(String locationId, String seriesId, List database = CwmsDataApiSetupCallback.getDatabaseLink(); database.connection(connection -> { try { - CWMS_TS_PACKAGE.call_SET_TSID_VERSIONED(DSL.using(connection).configuration(), - seriesId, - versioned ? "T" : "F", - OFFICE); + if (versioned) { + CWMS_TS_PACKAGE.call_SET_TSID_VERSIONED(DSL.using(connection).configuration(), + seriesId, + "T", + OFFICE); + } long tsCode = findTsCode(connection, seriesId); List years = rows.stream() From c2c9a044e5111de853fe6bb44e60a0c979da527d Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Wed, 22 Apr 2026 23:32:17 -0500 Subject: [PATCH 09/16] Refactor direct read row queries --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 107 ++++++++++-------- 1 file changed, 60 insertions(+), 47 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index fcb3b8098c..e133fa4156 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -833,61 +833,20 @@ private List fetchRequestedTimeSeriesRows(long tsCode, String Condition baseCondition = view.ALIASED_ITEM.isNull() .and(view.TS_CODE.eq(tsCode)) .and(view.OFFICE_ID.eq(officeId)) + // Invalid unit requests surface as a database error rather than an empty result set. .and(view.UNIT_ID.equalIgnoreCase(units)) .and(view.DATE_TIME.ge(beginTimestamp)) .and(view.DATE_TIME.le(endTimestamp)) .and(view.START_DATE.le(endTimestamp)) .and(view.END_DATE.gt(beginTimestamp)); - SelectConditionStep> query; + ResultQuery> query; if (versionDate != null) { - Field versionTimestamp = CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2( - DSL.val(versionDate.toInstant().toEpochMilli())); - if (includeEntryDate) { - query = dsl.select( - view.DATE_TIME, - view.VALUE, - normalizedQuality, - view.DATA_ENTRY_DATE) - .from(view) - .where(baseCondition.and(view.VERSION_DATE.eq(versionTimestamp))); - } else { - query = dsl.select( - view.DATE_TIME, - view.VALUE, - normalizedQuality, - DSL.castNull(Timestamp.class).as(DATA_ENTRY_DATE)) - .from(view) - .where(baseCondition.and(view.VERSION_DATE.eq(versionTimestamp))); - } + query = buildVersionedRowsQuery(view, normalizedQuality, baseCondition, versionDate, includeEntryDate); } else { - var rankedRows = dsl.select( - view.DATE_TIME.as(DATE_TIME), - view.VALUE.as(VALUE), - normalizedQuality, - includeEntryDate - ? view.DATA_ENTRY_DATE.as(DATA_ENTRY_DATE) - : DSL.castNull(Timestamp.class).as(DATA_ENTRY_DATE), - DSL.rowNumber() - .over(partitionBy(view.DATE_TIME) - .orderBy(view.VERSION_DATE.desc(), view.DATA_ENTRY_DATE.desc())) - .as("version_rank")) - .from(view) - .where(baseCondition) - .asTable("ranked_rows"); - - Field dateTimeCol = rankedRows.field(DATE_TIME, Timestamp.class); - Field valueCol = rankedRows.field(VALUE, Double.class); - Field qualityCol = rankedRows.field("quality_norm", BigDecimal.class); - Field dataEntryDateCol = rankedRows.field(DATA_ENTRY_DATE, Timestamp.class); - Field versionRankCol = rankedRows.field("version_rank", Integer.class); - - query = dsl.select(dateTimeCol, valueCol, qualityCol, dataEntryDateCol) - .from(rankedRows) - .where(versionRankCol.eq(1)); - } - - query.orderBy(field(DATE_TIME, Timestamp.class).asc()); + query = buildMaxVersionRowsQuery(view, normalizedQuality, baseCondition, includeEntryDate); + } + logger.atFine().log("%s", lazy(() -> query.getSQL(ParamType.INLINED))); return query.fetch(record -> { @@ -902,6 +861,60 @@ private List fetchRequestedTimeSeriesRows(long tsCode, String }); } + private ResultQuery> buildVersionedRowsQuery( + AV_TSV_DQU view, + Field normalizedQuality, + Condition baseCondition, + ZonedDateTime versionDate, + boolean includeEntryDate) { + Field versionTimestamp = CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2( + DSL.val(versionDate.toInstant().toEpochMilli())); + Field dataEntryDateField = includeEntryDate + ? view.DATA_ENTRY_DATE + : DSL.castNull(Timestamp.class).as(DATA_ENTRY_DATE); + + return dsl.select( + view.DATE_TIME, + view.VALUE, + normalizedQuality, + dataEntryDateField) + .from(view) + .where(baseCondition.and(view.VERSION_DATE.eq(versionTimestamp))) + .orderBy(view.DATE_TIME.asc()); + } + + private ResultQuery> buildMaxVersionRowsQuery( + AV_TSV_DQU view, + Field normalizedQuality, + Condition baseCondition, + boolean includeEntryDate) { + var rankedRows = dsl.select( + view.DATE_TIME.as(DATE_TIME), + view.VALUE.as(VALUE), + normalizedQuality, + includeEntryDate + ? view.DATA_ENTRY_DATE.as(DATA_ENTRY_DATE) + : DSL.castNull(Timestamp.class).as(DATA_ENTRY_DATE), + DSL.rowNumber() + .over(partitionBy(view.DATE_TIME) + .orderBy(view.VERSION_DATE.desc(), view.DATA_ENTRY_DATE.desc())) + .as("version_rank")) + .from(view) + .where(baseCondition) + .asTable("ranked_rows"); + + Field dateTimeCol = rankedRows.field(DATE_TIME, Timestamp.class); + Field valueCol = rankedRows.field(VALUE, Double.class); + Field qualityCol = rankedRows.field("quality_norm", BigDecimal.class); + Field dataEntryDateCol = rankedRows.field(DATA_ENTRY_DATE, Timestamp.class); + Field versionRankCol = rankedRows.field("version_rank", Integer.class); + + return dsl.select(dateTimeCol, valueCol, qualityCol, dataEntryDateCol) + .from(rankedRows) + .where(versionRankCol.eq(1)) + .orderBy(dateTimeCol.asc()); + } + private List fetchExpectedRegularTimes(long intervalMinutes, long intervalOffset, String timeZoneId, String intervalPart, boolean isLrts, TimeSeriesRequestParameters requestParameters, From 0a76b9b284861a8617fec3600cb42e49b01ae48f Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Wed, 22 Apr 2026 23:47:29 -0500 Subject: [PATCH 10/16] Type direct read metadata internally --- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 64 ++++++++++++++----- 1 file changed, 48 insertions(+), 16 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index e133fa4156..f442793a5d 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -164,6 +164,29 @@ public class TimeSeriesDaoImpl extends JooqDao implements TimeSeries private static final FieldMapping AV_CWMS_TS_ID2_FIELD_MAP = new CwmsTsId2FieldMapping(); private static final FieldMapping AV_CWMS_TS_ID_FIELD_MAP = new CwmsTsIdFieldMapping(); + private static final class DirectReadMetadata { + private final long tsCode; + private final String tsId; + private final String officeId; + private final String units; + private final long intervalMinutes; + private final long intervalUtcOffset; + private final String timeZoneId; + private final String versionFlag; + + private DirectReadMetadata(long tsCode, String tsId, String officeId, String units, + long intervalMinutes, long intervalUtcOffset, + String timeZoneId, String versionFlag) { + this.tsCode = tsCode; + this.tsId = tsId; + this.officeId = officeId; + this.units = units; + this.intervalMinutes = intervalMinutes; + this.intervalUtcOffset = intervalUtcOffset; + this.timeZoneId = timeZoneId; + this.versionFlag = versionFlag; + } + } @NotNull private final Timer getRequestedTimeSeriesTotalQueryTimer; @@ -687,27 +710,22 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, } } - Record metadata = fetchRequestedTimeSeriesMetadataRecord(requestParameters); + DirectReadMetadata metadata = fetchRequestedTimeSeriesMetadataRecord(requestParameters); if (metadata == null) { throw new DataAccessException("Unable to resolve time series metadata for " + names); } - BigDecimal intervalValue = metadata.getValue("interval", BigDecimal.class); - Number offsetValue = metadata.getValue("interval_utc_offset", Number.class); - BigDecimal tsCodeValue = metadata.getValue("tscode", BigDecimal.class); - long tsCode = tsCodeValue.longValue(); - String tsId = metadata.getValue("tsid", String.class); + long tsCode = metadata.tsCode; + String tsId = metadata.tsId; String[] tsIdParts = splitTimeSeriesId(tsId); - String metadataOfficeId = metadata.getValue("office_id", String.class); - String metadataUnits = metadata.getValue("units", String.class); + String metadataOfficeId = metadata.officeId; + String metadataUnits = metadata.units; String locPart = getTimeSeriesIdPart(tsIdParts, 0); String parmPart = getTimeSeriesIdPart(tsIdParts, 1); String intervalPart = getTimeSeriesIdPart(tsIdParts, 3); - long intervalMinutes = intervalValue == null ? 0L : intervalValue.longValue(); - long intervalOffset = offsetValue == null ? UTC_OFFSET_IRREGULAR : offsetValue.longValue(); - String timeZoneId = metadata.getValue("time_zone_id", String.class) == null - ? UTC - : metadata.getValue("time_zone_id", String.class); + long intervalMinutes = metadata.intervalMinutes; + long intervalOffset = metadata.intervalUtcOffset; + String timeZoneId = metadata.timeZoneId; boolean isLrts = parseBool(CWMS_TS_PACKAGE.call_IS_LRTS__2(dsl.configuration(), tsCode)); VerticalDatumInfo verticalDatumInfo = null; @@ -716,7 +734,7 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, } VersionType finalDateVersionType = getDirectReadVersionType( - metadata.getValue("version_flag", String.class), versionDate != null); + metadata.versionFlag, versionDate != null); if (pageSize == 0) { return null; } @@ -753,7 +771,7 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, return timeseries; } - private Record fetchRequestedTimeSeriesMetadataRecord( + private DirectReadMetadata fetchRequestedTimeSeriesMetadataRecord( TimeSeriesRequestParameters requestParameters) { String names = requestParameters.getNames(); String office = requestParameters.getOffice(); @@ -810,7 +828,21 @@ private Record fetchRequestedTimeSeriesMetadataRecord( logger.atFine().log("%s", lazy(() -> metadataQuery.getSQL(ParamType.INLINED))); - return metadataQuery.fetchOne(); + return metadataQuery.fetchOne(record -> new DirectReadMetadata( + record.getValue("tscode", BigDecimal.class).longValue(), + record.getValue("tsid", String.class), + record.getValue("office_id", String.class), + record.getValue("units", String.class), + record.getValue("interval", BigDecimal.class) == null + ? 0L + : record.getValue("interval", BigDecimal.class).longValue(), + record.getValue("interval_utc_offset", Number.class) == null + ? UTC_OFFSET_IRREGULAR + : record.getValue("interval_utc_offset", Number.class).longValue(), + record.getValue("time_zone_id", String.class) == null + ? UTC + : record.getValue("time_zone_id", String.class), + record.getValue("version_flag", String.class))); } private List fetchRequestedTimeSeriesRows(long tsCode, String officeId, String units, From 0c690380cc391d10eea68e177c00b3716006d918 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Thu, 23 Apr 2026 00:01:40 -0500 Subject: [PATCH 11/16] Clarify direct read pagination flow --- .../java/cwms/cda/data/dao/TimeSeriesDaoImpl.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index f442793a5d..1ef0c6b4f4 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -739,6 +739,8 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, return null; } + // Pagination happens after regular-interval gap rows are merged + // fetch the full raw window first List rawRows = fetchRequestedTimeSeriesRows(tsCode, metadataOfficeId, metadataUnits, requestParameters, includeEntryDate); long effectiveIntervalOffset = intervalOffset; @@ -951,17 +953,20 @@ private List fetchExpectedRegularTimes(long intervalMinutes, long int String intervalPart, boolean isLrts, TimeSeriesRequestParameters requestParameters, List rawRows) { + boolean shouldTrim = requestParameters.isShouldTrim(); if (!isRegularSeries(intervalMinutes, intervalPart)) { return Collections.emptyList(); } - if (rawRows.isEmpty() && requestParameters.isShouldTrim()) { + // Trimmed requests collapse to the observed data window + // there is nothing to expand if no rows matched + if (rawRows.isEmpty() && shouldTrim) { return Collections.emptyList(); } - Timestamp rangeStart = requestParameters.isShouldTrim() + Timestamp rangeStart = shouldTrim ? rawRows.get(0).getDateTime() : Timestamp.from(requestParameters.getBeginTime().toInstant()); - Timestamp rangeEnd = requestParameters.isShouldTrim() + Timestamp rangeEnd = shouldTrim ? rawRows.get(rawRows.size() - 1).getDateTime() : Timestamp.from(requestParameters.getEndTime().toInstant()); From 5696cd71a39ddb52dd5db223c26139982b6b8ed1 Mon Sep 17 00:00:00 2001 From: "Charles Graham, SWT" Date: Thu, 23 Apr 2026 00:33:27 -0500 Subject: [PATCH 12/16] Define timeseries page-size and trim semantics --- .../main/java/cwms/cda/api/Controllers.java | 7 ++ .../cwms/cda/api/TimeSeriesController.java | 12 ++- .../cda/api/TimeSeriesFilteredController.java | 9 +- .../cwms/cda/data/dao/TimeSeriesDaoImpl.java | 20 ++++- .../java/cwms/cda/data/dto/TimeSeries.java | 21 ++++- .../cda/api/TimeSeriesDirectReadParityIT.java | 84 ++++++++++++++++++- .../TimeSeriesFilteredControllerTestIT.java | 80 ++++++++++++++++++ 7 files changed, 219 insertions(+), 14 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/api/Controllers.java b/cwms-data-api/src/main/java/cwms/cda/api/Controllers.java index 04bcf6eb82..fd9d40553b 100644 --- a/cwms-data-api/src/main/java/cwms/cda/api/Controllers.java +++ b/cwms-data-api/src/main/java/cwms/cda/api/Controllers.java @@ -257,6 +257,13 @@ private Controllers() { } + public static int validateTimeSeriesPageSize(int pageSize) { + if (pageSize < -1) { + throw new IllegalArgumentException(PAGE_SIZE + " must be -1, 0, or a positive integer"); + } + return pageSize; + } + /** * Marks a meter and starts a timer. * diff --git a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java index d729568380..45893807fc 100644 --- a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java +++ b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java @@ -354,7 +354,8 @@ public void delete(@NotNull Context ctx, @NotNull String timeseries) { + "offset and timezone."), @OpenApiParam(name = Controllers.TRIM, type = Boolean.class, description = "Specifies " + "whether to trim missing values from the beginning and end of the " - + "retrieved values. " + + "retrieved values. When true and values are returned, the response " + + BEGIN + " and " + END + " fields reflect the returned data window. " + "Only supported for:" + Formats.JSONV2 + " and " + Formats.XMLV2 + ". " + "Default is true."), @OpenApiParam(name = FORMAT, description = "Specifies the" @@ -383,7 +384,9 @@ public void delete(@NotNull Context ctx, @NotNull String timeseries) { @OpenApiParam(name = PAGE_SIZE, type = Integer.class, description = "How many entries per page returned. " - + "Default " + DEFAULT_PAGE_SIZE + ".") + + "Default " + DEFAULT_PAGE_SIZE + ". Use 0 to return an empty values array, " + + "or -1 to return the entire window in one response without a next-page cursor. " + + "Values less than -1 are invalid.") }, responses = { @OpenApiResponse(status = STATUS_200, @@ -438,6 +441,8 @@ public void getAll(@NotNull Context ctx) { int pageSize = queryParamAsClass(ctx, new String[]{PAGE_SIZE }, Integer.class, DEFAULT_PAGE_SIZE, metrics, name(TimeSeriesController.class.getName(), GET_ALL)); + pageSize = Controllers.validateTimeSeriesPageSize(pageSize); + final int validatedPageSize = pageSize; String acceptHeader = ctx.header(Header.ACCEPT); ContentType contentType = Formats.parseHeaderAndQueryParm(acceptHeader, format, TimeSeries.class); @@ -468,7 +473,8 @@ public void getAll(@NotNull Context ctx) { .build(); // Execute DAO call with a timeout so we can return a clearer message instead of a generic 500 int apiTimeoutMs = Integer.getInteger("cwms.cda.api.apiTimeoutMs", 45000); - CompletableFuture daoFuture = CompletableFuture.supplyAsync(() -> dao.getTimeseries(cursor, pageSize, requestParameters)); + CompletableFuture daoFuture = CompletableFuture.supplyAsync( + () -> dao.getTimeseries(cursor, validatedPageSize, requestParameters)); TimeSeries ts; try { ts = daoFuture.get(apiTimeoutMs, TimeUnit.MILLISECONDS); diff --git a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesFilteredController.java b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesFilteredController.java index 3393c23898..9dc34ab180 100644 --- a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesFilteredController.java +++ b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesFilteredController.java @@ -117,7 +117,9 @@ private TimeSeriesDao getTimeSeriesDao(DSLContext dsl) { + "offset and timezone."), @OpenApiParam(name = Controllers.TRIM, type = Boolean.class, description = "Specifies " + "whether to trim missing values from the beginning and end of the " - + "retrieved values. " + + "retrieved values. When true and values are returned, the contained time-series " + + Controllers.BEGIN + " and " + Controllers.END + + " fields reflect the returned data window. " + "Only supported for:" + Formats.JSONV2 + " and " + Formats.XMLV2 + ". " + "Default is true."), @OpenApiParam(name = INCLUDE_ENTRY_DATE, type = Boolean.class, description = "Specifies " @@ -149,7 +151,9 @@ private TimeSeriesDao getTimeSeriesDao(DSLContext dsl) { @OpenApiParam(name = PAGE_SIZE, type = Integer.class, description = "How many entries per page returned. " - + "Default " + DEFAULT_PAGE_SIZE + ".") + + "Default " + DEFAULT_PAGE_SIZE + + ". Use 0 to return an empty values array, or -1 to return the entire window " + + "in one response without a next-page cursor. Values less than -1 are invalid.") }, responses = { @OpenApiResponse(status = STATUS_200, @@ -202,6 +206,7 @@ public void handle(@NotNull Context ctx) { int pageSize = queryParamAsClass(ctx, new String[]{PAGE_SIZE}, Integer.class, DEFAULT_PAGE_SIZE, metrics, name(TimeSeriesController.class.getName(), GET_ALL)); + pageSize = Controllers.validateTimeSeriesPageSize(pageSize); String acceptHeader = ctx.header(Header.ACCEPT); ContentType contentType = Formats.parseHeaderAndQueryParm(acceptHeader, format, TimeSeries.class); diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index 1ef0c6b4f4..02de50ec45 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -498,6 +498,13 @@ protected TimeSeries getRequestedTimeSeriesLegacy(String page, int pageSize, Record tsMetadata = metadataQuery.fetchOne(); + if (pageSize == 0) { + Integer resolvedTotal = resolveTotalQueryFuture(totalQueryFuture, totalQueryDeadlineNanos, + names, office, beginTime, endTime); + return buildTimeSeriesFromMetadata(tsMetadata, resolvedTotal, names, office, + beginTime, endTime, units, versionDate, recordCursor, recordPageSize, tzName); + } + String retrievalMethod; if (includeEntryDate) { retrievalMethod = "cwms_20.cwms_ts.retrieve_ts_entry_out_tab"; // New method that supports entry date @@ -599,6 +606,10 @@ protected TimeSeries getRequestedTimeSeriesLegacy(String page, int pageSize, getRequestedTimeSeriesResultsReturnedHistogram.update(timeseries.getValues().size()); } + if (retVal != null) { + retVal.alignWindowToReturnedValues(shouldTrim); + } + return retVal; } @@ -735,9 +746,6 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, VersionType finalDateVersionType = getDirectReadVersionType( metadata.versionFlag, versionDate != null); - if (pageSize == 0) { - return null; - } // Pagination happens after regular-interval gap rows are merged // fetch the full raw window first @@ -769,8 +777,12 @@ private TimeSeries getRequestedTimeSeriesDirect(String page, int pageSize, finalDateVersionType ); + if (pageSize == 0) { + return timeseries; + } + populateTimeSeriesValues(timeseries, rawRows, expectedTimes, tsCursor, includeEntryDate); - return timeseries; + return timeseries.alignWindowToReturnedValues(requestParameters.isShouldTrim()); } private DirectReadMetadata fetchRequestedTimeSeriesMetadataRecord( diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java index 05ee21c8f7..ef6856ce07 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java @@ -24,6 +24,7 @@ import java.lang.reflect.Field; import java.sql.Timestamp; import java.time.Duration; +import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.List; @@ -68,14 +69,16 @@ public class TimeSeries extends CwmsDTOPaginated { @JsonFormat(shape = Shape.STRING) @Schema( accessMode = AccessMode.READ_ONLY, - description = "The requested start time of the data, in ISO-8601 format with offset and timezone ('" + ZONED_DATE_TIME_FORMAT + "')" + description = "The start time represented by the values in this response, in ISO-8601 format with offset and timezone ('" + + ZONED_DATE_TIME_FORMAT + "'). When trim=true and values are returned, this reflects the first returned value." ) ZonedDateTime begin; @JsonFormat(shape = Shape.STRING) @Schema( accessMode = AccessMode.READ_ONLY, - description = "The requested end time of the data, in ISO-8601 format with offset and timezone ('" + ZONED_DATE_TIME_FORMAT + "')" + description = "The end time represented by the values in this response, in ISO-8601 format with offset and timezone ('" + + ZONED_DATE_TIME_FORMAT + "'). When trim=true and values are returned, this reflects the last returned value." ) ZonedDateTime end; @@ -229,8 +232,8 @@ public void addValue(Timestamp dateTime, Double value, int qualityCode, Timestam } public void addValue(Record record) { - // Set the current page, if not set - if ((page == null || page.isEmpty()) && (values == null || values.isEmpty())) { + // Only paged responses expose cursors. page-size=-1 requests the entire window. + if (pageSize > 0 && (page == null || page.isEmpty()) && (values == null || values.isEmpty())) { page = encodeCursor(String.format("%d", record.dateTime.getTime()), pageSize, total); } if (pageSize > 0 && values.size() == pageSize) { @@ -246,6 +249,16 @@ public TimeSeries withValues(List values) { return this; } + public TimeSeries alignWindowToReturnedValues(boolean trim) { + if (!trim || values == null || values.isEmpty()) { + return this; + } + + begin = values.get(0).getDateTime().toInstant().atZone(ZoneOffset.UTC); + end = values.get(values.size() - 1).getDateTime().toInstant().atZone(ZoneOffset.UTC); + return this; + } + public static List getColumnDescriptor() { List columns = new ArrayList<>(); for (Field f: Record.class.getDeclaredFields()) { diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java index 13f2d51584..c587d295be 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -199,6 +199,79 @@ void localRegularGapReadMatchesRetrieveTs() throws Exception { ); } + @Test + void pageSizeZeroReturnsEmptyValuesArray() throws Exception { + List rows = denseRows(); + Instant beginTime = Instant.parse("2024-01-01T00:00:00Z"); + Instant endTime = Instant.parse("2024-01-01T00:05:00Z"); + seedTimeSeries("ITPARPZ0", "ITPARPZ0.Stage.Inst.1Minute.0.BENCH", rows, false); + + TimeSeries response = fetchCdaRowsWithPageSize( + "ITPARPZ0.Stage.Inst.1Minute.0.BENCH", + "ft", + beginTime, + endTime, + 0, + false, + null, + true + ); + + assertEquals(0, response.getPageSize(), "page-size"); + assertNotNull(response.getValues(), "values"); + assertEquals(0, response.getValues().size(), "values size"); + assertEquals(rows.size(), response.getTotal(), "total"); + assertNull(response.getPage(), "page"); + assertNull(response.getNextPage(), "next-page"); + } + + @Test + void pageSizeNegativeOneReturnsWholeWindowWithoutPagination() throws Exception { + List rows = denseRows(); + Instant beginTime = Instant.parse("2024-01-01T00:00:00Z"); + Instant endTime = Instant.parse("2024-01-01T00:05:00Z"); + seedTimeSeries("ITPARALL", "ITPARALL.Stage.Inst.1Minute.0.BENCH", rows, false); + + TimeSeries response = fetchCdaRowsWithPageSize( + "ITPARALL.Stage.Inst.1Minute.0.BENCH", + "ft", + beginTime, + endTime, + -1, + false, + null, + true + ); + + assertEquals(-1, response.getPageSize(), "page-size"); + assertEquals(rows.size(), response.getValues().size(), "values size"); + assertEquals(rows.size(), response.getTotal(), "total"); + assertNull(response.getPage(), "page"); + assertNull(response.getNextPage(), "next-page"); + } + + @Test + void trimmedResponseWindowMatchesReturnedValues() throws Exception { + List rows = gapRows(); + seedTimeSeries("ITPARTRM", "ITPARTRM.Stage.Inst.1Minute.0.BENCH", rows, false); + + TimeSeries response = fetchCdaRowsWithPageSize( + "ITPARTRM.Stage.Inst.1Minute.0.BENCH", + "ft", + Instant.parse("2023-12-31T23:59:00Z"), + Instant.parse("2024-01-01T00:10:00Z"), + 1000, + false, + null, + true + ); + + assertNotNull(response.getBegin(), "begin"); + assertNotNull(response.getEnd(), "end"); + assertEquals(Instant.parse("2024-01-01T00:00:00Z"), response.getBegin().toInstant(), "begin"); + assertEquals(Instant.parse("2024-01-01T00:09:00Z"), response.getEnd().toInstant(), "end"); + } + private static void assertDirectReadMatchesOracle(String locationId, String seriesId, String units, Instant beginTime, Instant endTime, List rows, boolean versioned, boolean includeEntryDate, @@ -530,6 +603,14 @@ private static TimeSeries fetchCdaRows(String seriesId, String units, Instant be int seedRowCount, boolean includeEntryDate, Instant versionDate) throws Exception { int pageSize = Math.max(1000, seedRowCount * 2); + return fetchCdaRowsWithPageSize(seriesId, units, beginTime, endTime, pageSize, includeEntryDate, + versionDate, true); + } + + private static TimeSeries fetchCdaRowsWithPageSize(String seriesId, String units, Instant beginTime, + Instant endTime, int pageSize, boolean includeEntryDate, + Instant versionDate, boolean trim) + throws Exception { RequestSpecification request = given() .log().ifValidationFails(LogDetail.ALL, true) .accept(Formats.JSONV2) @@ -538,7 +619,8 @@ private static TimeSeries fetchCdaRows(String seriesId, String units, Instant be .queryParam(Controllers.UNIT, units) .queryParam(Controllers.BEGIN, beginTime.toString()) .queryParam(Controllers.END, endTime.toString()) - .queryParam("page-size", pageSize) + .queryParam(Controllers.PAGE_SIZE, pageSize) + .queryParam(Controllers.TRIM, trim) .queryParam(Controllers.INCLUDE_ENTRY_DATE, includeEntryDate); if (versionDate != null) { request = request.queryParam(Controllers.VERSION_DATE, versionDate.toString()); diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesFilteredControllerTestIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesFilteredControllerTestIT.java index 4c9d43a0bb..56bdb49786 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesFilteredControllerTestIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesFilteredControllerTestIT.java @@ -32,6 +32,86 @@ class TimeSeriesFilteredControllerTestIT extends DataApiTestIT { static FluentLogger logger = FluentLogger.forEnclosingClass(); public static final String JSON_FILE = "/cwms/cda/api/lrl/1hour.json"; + @ParameterizedTest + @ValueSource(strings = {Formats.JSONV2, Formats.DEFAULT}) + void test_page_size_special_cases(String format) throws Exception { + ObjectMapper mapper = new ObjectMapper(); + + InputStream resource = this.getClass().getResourceAsStream(JSON_FILE); + assertNotNull(resource); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); + + JsonNode ts = mapper.readTree(tsData); + String location = ts.get(Controllers.NAME).asText().split("\\.")[0]; + String officeId = ts.get("office-id").asText(); + + try { + createLocation(location, true, officeId); + + TestAccounts.KeyUser user = TestAccounts.KeyUser.SPK_NORMAL; + + given() + .log().ifValidationFails(LogDetail.ALL,true) + .accept(format) + .contentType(Formats.JSONV2) + .body(tsData) + .header("Authorization",user.toHeaderValue()) + .queryParam(Controllers.OFFICE, officeId) + .when() + .redirects().follow(true) + .redirects().max(3) + .post("/timeseries/") + .then() + .log().ifValidationFails(LogDetail.ALL,true) + .assertThat() + .statusCode(is(HttpServletResponse.SC_OK)); + + given() + .config(RestAssured.config().jsonConfig(jsonConfig().numberReturnType(JsonPathConfig.NumberReturnType.DOUBLE))) + .log().ifValidationFails(LogDetail.ALL,true) + .accept(format) + .queryParam(Controllers.OFFICE, officeId) + .queryParam(Controllers.UNIT,"cfs") + .queryParam(Controllers.NAME, ts.get(Controllers.NAME).asText()) + .queryParam(Controllers.BEGIN,"2023-01-11T12:00:00-00:00") + .queryParam(Controllers.END,"2023-01-11T15:00:00-00:00") + .queryParam(Controllers.PAGE_SIZE, 0) + .when() + .redirects().follow(true) + .redirects().max(3) + .get("/timeseries/filtered/") + .then() + .log().ifValidationFails(LogDetail.ALL,true) + .assertThat() + .statusCode(is(HttpServletResponse.SC_OK)) + .body("page-size", equalTo(0)) + .body("time-series.values.size()", equalTo(0)); + + given() + .config(RestAssured.config().jsonConfig(jsonConfig().numberReturnType(JsonPathConfig.NumberReturnType.DOUBLE))) + .log().ifValidationFails(LogDetail.ALL,true) + .accept(format) + .queryParam(Controllers.OFFICE, officeId) + .queryParam(Controllers.UNIT,"cfs") + .queryParam(Controllers.NAME, ts.get(Controllers.NAME).asText()) + .queryParam(Controllers.BEGIN,"2023-01-11T12:00:00-00:00") + .queryParam(Controllers.END,"2023-01-11T15:00:00-00:00") + .queryParam(Controllers.PAGE_SIZE, -1) + .when() + .redirects().follow(true) + .redirects().max(3) + .get("/timeseries/filtered/") + .then() + .log().ifValidationFails(LogDetail.ALL,true) + .assertThat() + .statusCode(is(HttpServletResponse.SC_OK)) + .body("page-size", equalTo(-1)) + .body("time-series.values.size()", equalTo(4)); + } catch (SQLException ex) { + throw new RuntimeException("Unable to create location for TS", ex); + } + } + @ParameterizedTest @ValueSource(strings = {Formats.JSONV2, Formats.DEFAULT}) void test_filter_nulls(String format) throws Exception { From ffeada829e6b869561c20fca0a1aa466e920d50e Mon Sep 17 00:00:00 2001 From: Charles Graham SWT Date: Wed, 6 May 2026 09:56:27 -0500 Subject: [PATCH 13/16] Make timeseries page size final --- .../src/main/java/cwms/cda/api/TimeSeriesController.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java index 45893807fc..80594f448d 100644 --- a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java +++ b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java @@ -438,11 +438,10 @@ public void getAll(@NotNull Context ctx) { String.class, "", metrics, name(TimeSeriesController.class.getName(), GET_ALL)); - int pageSize = queryParamAsClass(ctx, new String[]{PAGE_SIZE }, + final int pageSize = Controllers.validateTimeSeriesPageSize(queryParamAsClass(ctx, + new String[]{PAGE_SIZE}, Integer.class, DEFAULT_PAGE_SIZE, metrics, - name(TimeSeriesController.class.getName(), GET_ALL)); - pageSize = Controllers.validateTimeSeriesPageSize(pageSize); - final int validatedPageSize = pageSize; + name(TimeSeriesController.class.getName(), GET_ALL))); String acceptHeader = ctx.header(Header.ACCEPT); ContentType contentType = Formats.parseHeaderAndQueryParm(acceptHeader, format, TimeSeries.class); @@ -474,7 +473,7 @@ public void getAll(@NotNull Context ctx) { // Execute DAO call with a timeout so we can return a clearer message instead of a generic 500 int apiTimeoutMs = Integer.getInteger("cwms.cda.api.apiTimeoutMs", 45000); CompletableFuture daoFuture = CompletableFuture.supplyAsync( - () -> dao.getTimeseries(cursor, validatedPageSize, requestParameters)); + () -> dao.getTimeseries(cursor, pageSize, requestParameters)); TimeSeries ts; try { ts = daoFuture.get(apiTimeoutMs, TimeUnit.MILLISECONDS); From d178e1eda47944ba242eef75447699c516353f8d Mon Sep 17 00:00:00 2001 From: Charles Graham SWT Date: Wed, 6 May 2026 09:57:19 -0500 Subject: [PATCH 14/16] Bind seeded timeseries row inserts --- .../cda/api/TimeSeriesDirectReadParityIT.java | 26 +++++++++++++------ 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java index c587d295be..06456e41ff 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -22,6 +22,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; +import java.sql.Types; import java.time.Duration; import java.time.Instant; import java.time.LocalDateTime; @@ -485,15 +486,24 @@ private static void insertScenarioRows(Connection connection, long tsCode, List< int year = OffsetDateTime.ofInstant(row.dateTime, ZoneOffset.UTC).getYear(); String sql = "insert into at_tsv_" + year + " (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag)" - + " values (" - + tsCode + ", " - + toOracleDateExpression(row.dateTime) + ", " - + (row.versionDate != null ? toOracleDateExpression(row.versionDate) : "date '1111-11-11'") + ", " - + (row.dataEntryDate != null ? toOracleTimestampExpression(row.dataEntryDate) : "null") + ", " - + (row.value != null ? Double.toString(row.value) : "null") + ", " - + row.qualityCode - + ", 0)"; + + " values (?, ?, ?, ?, ?, ?, 0)"; try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.setLong(1, tsCode); + statement.setTimestamp(2, Timestamp.from(row.dateTime)); + statement.setTimestamp(3, Timestamp.from(row.versionDate != null + ? row.versionDate + : Instant.parse("1111-11-11T00:00:00Z"))); + if (row.dataEntryDate != null) { + statement.setTimestamp(4, Timestamp.from(row.dataEntryDate)); + } else { + statement.setNull(4, Types.TIMESTAMP); + } + if (row.value != null) { + statement.setDouble(5, row.value); + } else { + statement.setNull(5, Types.DOUBLE); + } + statement.setInt(6, row.qualityCode); statement.executeUpdate(); } } From 0b718639dbf8702514d583e75d93b77812adc6f5 Mon Sep 17 00:00:00 2001 From: Charles Graham SWT Date: Wed, 6 May 2026 09:58:00 -0500 Subject: [PATCH 15/16] Batch seeded timeseries row inserts --- .../cda/api/TimeSeriesDirectReadParityIT.java | 56 +++++++++++++------ 1 file changed, 38 insertions(+), 18 deletions(-) diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java index 06456e41ff..26a10f81bd 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -30,8 +30,10 @@ import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Comparator; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -482,33 +484,51 @@ private static void insertScenarioRows(Connection connection, long tsCode, List< List sortedRows = new ArrayList<>(rows); sortedRows.sort(Comparator.comparing(seedRow -> seedRow.dateTime)); - for (SeedRow row : sortedRows) { + Map> rowsByYear = new LinkedHashMap<>(); + for (SeedRow row: sortedRows) { int year = OffsetDateTime.ofInstant(row.dateTime, ZoneOffset.UTC).getYear(); - String sql = "insert into at_tsv_" + year + rowsByYear.computeIfAbsent(year, ignored -> new ArrayList<>()).add(row); + } + + for (Map.Entry> entry: rowsByYear.entrySet()) { + String sql = "insert into at_tsv_" + entry.getKey() + " (ts_code, date_time, version_date, data_entry_date, value, quality_code, dest_flag)" + " values (?, ?, ?, ?, ?, ?, 0)"; try (PreparedStatement statement = connection.prepareStatement(sql)) { - statement.setLong(1, tsCode); - statement.setTimestamp(2, Timestamp.from(row.dateTime)); - statement.setTimestamp(3, Timestamp.from(row.versionDate != null - ? row.versionDate - : Instant.parse("1111-11-11T00:00:00Z"))); - if (row.dataEntryDate != null) { - statement.setTimestamp(4, Timestamp.from(row.dataEntryDate)); - } else { - statement.setNull(4, Types.TIMESTAMP); - } - if (row.value != null) { - statement.setDouble(5, row.value); - } else { - statement.setNull(5, Types.DOUBLE); + int batchCount = 0; + for (SeedRow row: entry.getValue()) { + bindScenarioInsert(statement, tsCode, row); + statement.addBatch(); + batchCount++; + if (batchCount % 1000 == 0) { + statement.executeBatch(); + } } - statement.setInt(6, row.qualityCode); - statement.executeUpdate(); + statement.executeBatch(); } } } + private static void bindScenarioInsert(PreparedStatement statement, long tsCode, SeedRow row) + throws SQLException { + statement.setLong(1, tsCode); + statement.setTimestamp(2, Timestamp.from(row.dateTime)); + statement.setTimestamp(3, Timestamp.from(row.versionDate != null + ? row.versionDate + : Instant.parse("1111-11-11T00:00:00Z"))); + if (row.dataEntryDate != null) { + statement.setTimestamp(4, Timestamp.from(row.dataEntryDate)); + } else { + statement.setNull(4, Types.TIMESTAMP); + } + if (row.value != null) { + statement.setDouble(5, row.value); + } else { + statement.setNull(5, Types.DOUBLE); + } + statement.setInt(6, row.qualityCode); + } + private static void updateScenarioExtents(Connection connection, long tsCode, List rows) throws SQLException { Set distinctVersionDates = rows.stream() From 8737548c9826205692d370f8f8e1d546b403dc86 Mon Sep 17 00:00:00 2001 From: Charles Graham SWT Date: Wed, 6 May 2026 09:58:54 -0500 Subject: [PATCH 16/16] Bind Oracle parity comparison query --- .../cda/api/TimeSeriesDirectReadParityIT.java | 40 +++++++++---------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java index 26a10f81bd..f30b152b21 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesDirectReadParityIT.java @@ -566,27 +566,35 @@ private static List fetchOracleRows(String seriesId, String u String rowProjection = includeEntryDate ? ", case when data_entry_date is null then null else round((cast(data_entry_date as date) - date '1970-01-01') * 86400000) end as data_entry_date_ms" : ""; - String versionDateExpression = versionDate != null - ? toOracleDateExpression(versionDate) - : "null"; - String maxVersionFlag = versionDate != null ? "'F'" : "'T'"; + String maxVersionFlag = versionDate != null ? "F" : "T"; String sql = "select round((date_time - date '1970-01-01') * 86400000) as date_time_ms," + " value," + " quality_code" + rowProjection + " from table(" + functionName + "(" - + toSqlStringLiteral(seriesId) + ", " - + toSqlStringLiteral(units) + ", " - + toOracleDateExpression(beginTime) + ", " - + toOracleDateExpression(endTime) + ", " + + "?, " + + "?, " + + "?, " + + "?, " + "'UTC', 'T', 'T', 'T', 'F', 'F', " - + versionDateExpression + ", " - + maxVersionFlag + ", " - + toSqlStringLiteral(OFFICE) + + "?, " + + "?, " + + "?" + "))" + " order by date_time"; try (PreparedStatement statement = connection.prepareStatement(sql)) { + statement.setString(1, seriesId); + statement.setString(2, units); + statement.setTimestamp(3, Timestamp.from(beginTime)); + statement.setTimestamp(4, Timestamp.from(endTime)); + if (versionDate != null) { + statement.setTimestamp(5, Timestamp.from(versionDate)); + } else { + statement.setNull(5, Types.TIMESTAMP); + } + statement.setString(6, maxVersionFlag); + statement.setString(7, OFFICE); try (ResultSet resultSet = statement.executeQuery()) { List rows = new ArrayList<>(); while (resultSet.next()) { @@ -689,22 +697,12 @@ private static TimeSeries fetchCdaRowsWithPageSize(String seriesId, String units return timeSeries.withValues(values); } - private static String toSqlStringLiteral(String value) { - return "'" + value.replace("'", "''") + "'"; - } - private static String toOracleDateExpression(Instant instant) { LocalDateTime utc = LocalDateTime.ofInstant(instant, ZoneOffset.UTC); return "to_date('" + utc.format(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")) + "', 'yyyy-mm-dd hh24:mi:ss')"; } - private static String toOracleTimestampExpression(Instant instant) { - LocalDateTime utc = LocalDateTime.ofInstant(instant, ZoneOffset.UTC); - return "to_timestamp('" + utc.format(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")) - + "', 'yyyy-mm-dd hh24:mi:ss')"; - } - private static final class SeedRow { private final Instant dateTime; private final Double value;