output)
+ {
+ if (geometry.isEmpty()) {
+ return;
+ }
+ if (geometry instanceof GeometryCollection gc) {
+ for (int i = 0; i < gc.getNumGeometries(); i++) {
+ flattenGeometry(gc.getGeometryN(i), output);
+ }
+ }
+ else {
+ output.add(geometry);
}
}
- public static String jsonFromJtsGeometry(org.locationtech.jts.geom.Geometry geometry)
+ private static long getCoordinateSequenceMemorySize(CoordinateSequence coordinateSequence)
{
- GeoJsonWriter geoJsonWriter = new GeoJsonWriter();
- geoJsonWriter.setEncodeCRS(false);
- return geoJsonWriter.write(geometry);
+ return COORDINATE_ARRAY_SEQUENCE_INSTANCE_SIZE +
+ sizeOfObjectArray(coordinateSequence.size()) +
+ (long) coordinateSequence.size() * COORDINATE_INSTANCE_SIZE;
}
}
diff --git a/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/EsriShapeReader.java b/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/EsriShapeReader.java
new file mode 100644
index 000000000000..2cdaaca43d76
--- /dev/null
+++ b/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/EsriShapeReader.java
@@ -0,0 +1,303 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.geospatial.serde;
+
+import io.airlift.slice.BasicSliceInput;
+import io.airlift.slice.Slice;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.LinearRing;
+import org.locationtech.jts.geom.Polygon;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Parser for ESRI Shape binary format.
+ *
+ * ESRI Shape format is used in ESRI Shapefiles and by the ESRI Geometry API.
+ * This parser supports the 2D shape types required for Hadoop Spatial Framework compatibility.
+ *
+ * Supported shape types:
+ *
+ * - 0 - Null Shape (empty geometry)
+ * - 1 - Point
+ * - 3 - PolyLine (becomes LineString or MultiLineString)
+ * - 5 - Polygon (becomes Polygon or MultiPolygon)
+ * - 8 - MultiPoint
+ *
+ *
+ * Note: Z and M variants (PointZ, PolyLineZ, etc.) are not supported.
+ *
+ * @see ESRI Shapefile Technical Description
+ */
+public final class EsriShapeReader
+{
+ // Shape types from ESRI Shapefile specification
+ private static final int NULL_SHAPE = 0;
+ private static final int POINT = 1;
+ private static final int POLYLINE = 3;
+ private static final int POLYGON = 5;
+ private static final int MULTIPOINT = 8;
+
+ // Bounding box size: 4 doubles (Xmin, Ymin, Xmax, Ymax)
+ private static final int BOUNDING_BOX_SIZE = 32;
+
+ private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
+
+ private EsriShapeReader() {}
+
+ public static Geometry read(Slice slice)
+ {
+ BasicSliceInput input = slice.getInput();
+
+ int shapeType = input.readInt();
+
+ return switch (shapeType) {
+ case NULL_SHAPE -> GEOMETRY_FACTORY.createPoint();
+ case POINT -> readPoint(input);
+ case POLYLINE -> readPolyLine(input);
+ case POLYGON -> readPolygon(input);
+ case MULTIPOINT -> readMultiPoint(input);
+ default -> throw new IllegalArgumentException("Unsupported ESRI shape type: " + shapeType);
+ };
+ }
+
+ private static Geometry readPoint(BasicSliceInput input)
+ {
+ double x = input.readDouble();
+ double y = input.readDouble();
+
+ // ESRI represents empty points using NaN or extreme values
+ if (Double.isNaN(x) || Double.isNaN(y) || isEmptyValue(x) || isEmptyValue(y)) {
+ return GEOMETRY_FACTORY.createPoint();
+ }
+
+ return GEOMETRY_FACTORY.createPoint(new Coordinate(x, y));
+ }
+
+ /**
+ * Checks if a value represents an empty coordinate in ESRI format.
+ * ESRI uses extreme values close to Double.MAX_VALUE to indicate empty.
+ */
+ private static boolean isEmptyValue(double value)
+ {
+ return value <= -Double.MAX_VALUE || value >= Double.MAX_VALUE;
+ }
+
+ private static Geometry readMultiPoint(BasicSliceInput input)
+ {
+ // Check bounding box for empty (4 doubles: Xmin, Ymin, Xmax, Ymax)
+ if (isEmptyBoundingBox(input)) {
+ input.skip(BOUNDING_BOX_SIZE);
+ input.readInt(); // skip numPoints (should be 0)
+ return GEOMETRY_FACTORY.createMultiPoint();
+ }
+ input.skip(BOUNDING_BOX_SIZE);
+
+ int numPoints = input.readInt();
+ if (numPoints == 0) {
+ return GEOMETRY_FACTORY.createMultiPoint();
+ }
+
+ Coordinate[] coords = new Coordinate[numPoints];
+ for (int i = 0; i < numPoints; i++) {
+ double x = input.readDouble();
+ double y = input.readDouble();
+ coords[i] = new Coordinate(x, y);
+ }
+
+ return GEOMETRY_FACTORY.createMultiPointFromCoords(coords);
+ }
+
+ /**
+ * Checks if the bounding box indicates an empty geometry.
+ * Does not advance the input position.
+ */
+ private static boolean isEmptyBoundingBox(BasicSliceInput input)
+ {
+ long pos = input.position();
+ double xMin = input.readDouble();
+ input.setPosition(pos); // reset to original position
+ return Double.isNaN(xMin) || isEmptyValue(xMin);
+ }
+
+ private static Geometry readPolyLine(BasicSliceInput input)
+ {
+ // Check bounding box for empty
+ if (isEmptyBoundingBox(input)) {
+ input.skip(BOUNDING_BOX_SIZE);
+ input.readInt(); // skip numParts
+ input.readInt(); // skip numPoints (should be 0)
+ return GEOMETRY_FACTORY.createLineString();
+ }
+ input.skip(BOUNDING_BOX_SIZE);
+
+ int numParts = input.readInt();
+ int numPoints = input.readInt();
+
+ if (numParts == 0 || numPoints == 0) {
+ return GEOMETRY_FACTORY.createLineString();
+ }
+
+ // Read part indices
+ int[] partIndices = new int[numParts];
+ for (int i = 0; i < numParts; i++) {
+ partIndices[i] = input.readInt();
+ }
+ validatePartIndices(partIndices, numPoints);
+
+ // Read all points
+ Coordinate[] allCoords = new Coordinate[numPoints];
+ for (int i = 0; i < numPoints; i++) {
+ double x = input.readDouble();
+ double y = input.readDouble();
+ allCoords[i] = new Coordinate(x, y);
+ }
+
+ // Build LineStrings for each part
+ LineString[] lineStrings = new LineString[numParts];
+ for (int i = 0; i < numParts; i++) {
+ int startIndex = partIndices[i];
+ int endIndex = (i + 1 < numParts) ? partIndices[i + 1] : numPoints;
+ int partLength = endIndex - startIndex;
+
+ Coordinate[] partCoords = new Coordinate[partLength];
+ System.arraycopy(allCoords, startIndex, partCoords, 0, partLength);
+ lineStrings[i] = GEOMETRY_FACTORY.createLineString(partCoords);
+ }
+
+ if (numParts == 1) {
+ return lineStrings[0];
+ }
+ return GEOMETRY_FACTORY.createMultiLineString(lineStrings);
+ }
+
+ private static Geometry readPolygon(BasicSliceInput input)
+ {
+ // Check bounding box for empty
+ if (isEmptyBoundingBox(input)) {
+ input.skip(BOUNDING_BOX_SIZE);
+ input.readInt(); // skip numParts
+ input.readInt(); // skip numPoints (should be 0)
+ return GEOMETRY_FACTORY.createPolygon();
+ }
+ input.skip(BOUNDING_BOX_SIZE);
+
+ int numParts = input.readInt();
+ int numPoints = input.readInt();
+
+ if (numParts == 0 || numPoints == 0) {
+ return GEOMETRY_FACTORY.createPolygon();
+ }
+
+ // Read part indices
+ int[] partIndices = new int[numParts];
+ for (int i = 0; i < numParts; i++) {
+ partIndices[i] = input.readInt();
+ }
+ validatePartIndices(partIndices, numPoints);
+
+ // Read all points
+ Coordinate[] allCoords = new Coordinate[numPoints];
+ for (int i = 0; i < numPoints; i++) {
+ double x = input.readDouble();
+ double y = input.readDouble();
+ allCoords[i] = new Coordinate(x, y);
+ }
+
+ // Build rings for each part
+ LinearRing[] rings = new LinearRing[numParts];
+ for (int i = 0; i < numParts; i++) {
+ int startIndex = partIndices[i];
+ int endIndex = (i + 1 < numParts) ? partIndices[i + 1] : numPoints;
+ int partLength = endIndex - startIndex;
+
+ Coordinate[] partCoords = new Coordinate[partLength];
+ System.arraycopy(allCoords, startIndex, partCoords, 0, partLength);
+ rings[i] = GEOMETRY_FACTORY.createLinearRing(partCoords);
+ }
+
+ // Organize rings into polygons
+ // ESRI format: exterior rings are clockwise, interior rings (holes) are counter-clockwise
+ // JTS format: exterior rings are counter-clockwise, interior rings are clockwise
+ // We need to identify which rings are exterior (shells) and which are interior (holes)
+ return createPolygonsFromRings(rings);
+ }
+
+ private static void validatePartIndices(int[] partIndices, int numPoints)
+ {
+ int previousIndex = -1;
+ for (int partIndex : partIndices) {
+ if (partIndex < 0 || partIndex < previousIndex || partIndex > numPoints) {
+ throw new IllegalArgumentException("Invalid ESRI shape part index");
+ }
+ previousIndex = partIndex;
+ }
+ }
+
+ private static Geometry createPolygonsFromRings(LinearRing[] rings)
+ {
+ if (rings.length == 1) {
+ return GEOMETRY_FACTORY.createPolygon(rings[0]);
+ }
+
+ // Preserve the old ESRI importer behavior by treating the first ring's
+ // orientation as the shell orientation for the polygon stream.
+ boolean shellOrientation = isClockwise(rings[0].getCoordinates());
+ List polygons = new ArrayList<>();
+ LinearRing currentShell = null;
+ List currentHoles = new ArrayList<>();
+
+ for (LinearRing ring : rings) {
+ boolean isShell = isClockwise(ring.getCoordinates()) == shellOrientation;
+
+ if (isShell) {
+ if (currentShell != null) {
+ polygons.add(GEOMETRY_FACTORY.createPolygon(currentShell, currentHoles.toArray(new LinearRing[0])));
+ currentHoles.clear();
+ }
+ currentShell = ring;
+ }
+ else if (currentShell != null) {
+ currentHoles.add(ring);
+ }
+ }
+
+ // Don't forget the last polygon
+ if (currentShell != null) {
+ polygons.add(GEOMETRY_FACTORY.createPolygon(currentShell, currentHoles.toArray(new LinearRing[0])));
+ }
+
+ if (polygons.size() == 1) {
+ return polygons.getFirst();
+ }
+ return GEOMETRY_FACTORY.createMultiPolygon(polygons.toArray(new Polygon[0]));
+ }
+
+ /**
+ * Determines if a ring is clockwise using the shoelace formula.
+ * Positive signed area = counter-clockwise, negative signed area = clockwise.
+ */
+ private static boolean isClockwise(Coordinate[] ring)
+ {
+ double sum = 0;
+ for (int i = 0; i < ring.length - 1; i++) {
+ sum += (ring[i + 1].x - ring[i].x) * (ring[i + 1].y + ring[i].y);
+ }
+ return sum < 0;
+ }
+}
diff --git a/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/GeometrySerde.java b/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/GeometrySerde.java
deleted file mode 100644
index 2b3dba0a911b..000000000000
--- a/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/GeometrySerde.java
+++ /dev/null
@@ -1,350 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.geospatial.serde;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.MultiPoint;
-import com.esri.core.geometry.OperatorImportFromESRIShape;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.Polygon;
-import com.esri.core.geometry.Polyline;
-import com.esri.core.geometry.VertexDescription;
-import com.esri.core.geometry.ogc.OGCConcreteGeometryCollection;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
-import com.esri.core.geometry.ogc.OGCLineString;
-import com.esri.core.geometry.ogc.OGCMultiLineString;
-import com.esri.core.geometry.ogc.OGCMultiPoint;
-import com.esri.core.geometry.ogc.OGCMultiPolygon;
-import com.esri.core.geometry.ogc.OGCPoint;
-import com.esri.core.geometry.ogc.OGCPolygon;
-import io.airlift.slice.BasicSliceInput;
-import io.airlift.slice.DynamicSliceOutput;
-import io.airlift.slice.Slice;
-import io.airlift.slice.SliceInput;
-import io.trino.geospatial.GeometryType;
-import jakarta.annotation.Nullable;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
-import static com.esri.core.geometry.Geometry.Type.Unknown;
-import static com.esri.core.geometry.GeometryEngine.geometryToEsriShape;
-import static com.google.common.base.Verify.verify;
-import static io.trino.geospatial.GeometryUtils.isEsriNaN;
-import static java.lang.Double.NaN;
-import static java.lang.Double.isNaN;
-import static java.lang.Math.toIntExact;
-import static java.util.Objects.requireNonNull;
-
-public final class GeometrySerde
-{
- private GeometrySerde() {}
-
- public static Slice serialize(OGCGeometry input)
- {
- requireNonNull(input, "input is null");
- DynamicSliceOutput output = new DynamicSliceOutput(100);
- writeGeometry(output, input);
- return output.slice();
- }
-
- public static Slice serialize(Envelope envelope)
- {
- requireNonNull(envelope, "envelope is null");
- verify(!envelope.isEmpty());
- DynamicSliceOutput output = new DynamicSliceOutput(100);
- output.appendByte(GeometrySerializationType.ENVELOPE.code());
- writeEnvelopeCoordinates(output, envelope);
- return output.slice();
- }
-
- public static GeometryType getGeometryType(Slice shape)
- {
- return deserializeType(shape).geometryType();
- }
-
- private static void writeGeometry(DynamicSliceOutput output, OGCGeometry geometry)
- {
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
- switch (type) {
- case POINT -> writePoint(output, geometry);
- case MULTI_POINT -> writeSimpleGeometry(output, GeometrySerializationType.MULTI_POINT, geometry);
- case LINE_STRING -> writeSimpleGeometry(output, GeometrySerializationType.LINE_STRING, geometry);
- case MULTI_LINE_STRING -> writeSimpleGeometry(output, GeometrySerializationType.MULTI_LINE_STRING, geometry);
- case POLYGON -> writeSimpleGeometry(output, GeometrySerializationType.POLYGON, geometry);
- case MULTI_POLYGON -> writeSimpleGeometry(output, GeometrySerializationType.MULTI_POLYGON, geometry);
- case GEOMETRY_COLLECTION -> {
- verify(geometry instanceof OGCConcreteGeometryCollection);
- writeGeometryCollection(output, (OGCConcreteGeometryCollection) geometry);
- }
- default -> throw new IllegalArgumentException("Unexpected type: " + type);
- }
- }
-
- private static void writeGeometryCollection(DynamicSliceOutput output, OGCGeometryCollection collection)
- {
- output.appendByte(GeometrySerializationType.GEOMETRY_COLLECTION.code());
- for (int geometryIndex = 0; geometryIndex < collection.numGeometries(); geometryIndex++) {
- OGCGeometry geometry = collection.geometryN(geometryIndex);
- int startPosition = output.size();
-
- // leave 4 bytes for the shape length
- output.appendInt(0);
- writeGeometry(output, geometry);
-
- int endPosition = output.size();
- int length = endPosition - startPosition - Integer.BYTES;
-
- output.getUnderlyingSlice().setInt(startPosition, length);
- }
- }
-
- private static void writeSimpleGeometry(DynamicSliceOutput output, GeometrySerializationType type, OGCGeometry geometry)
- {
- output.appendByte(type.code());
- Geometry esriGeometry = requireNonNull(geometry.getEsriGeometry(), "esriGeometry is null");
- byte[] shape = geometryToEsriShape(esriGeometry);
- output.appendBytes(shape);
- }
-
- private static void writePoint(DynamicSliceOutput output, OGCGeometry geometry)
- {
- Geometry esriGeometry = geometry.getEsriGeometry();
- verify(esriGeometry instanceof Point, "geometry is expected to be an instance of Point");
- Point point = (Point) esriGeometry;
- verify(!point.hasAttribute(VertexDescription.Semantics.Z) &&
- !point.hasAttribute(VertexDescription.Semantics.M) &&
- !point.hasAttribute(VertexDescription.Semantics.ID),
- "Only 2D points with no ID nor M attribute are supported");
- output.appendByte(GeometrySerializationType.POINT.code());
- if (!point.isEmpty()) {
- output.appendDouble(point.getX());
- output.appendDouble(point.getY());
- }
- else {
- output.appendDouble(NaN);
- output.appendDouble(NaN);
- }
- }
-
- public static GeometrySerializationType deserializeType(Slice shape)
- {
- requireNonNull(shape, "shape is null");
- BasicSliceInput input = shape.getInput();
- verify(input.available() > 0);
- return GeometrySerializationType.getForCode(input.readByte());
- }
-
- public static OGCGeometry deserialize(Slice shape)
- {
- requireNonNull(shape, "shape is null");
- BasicSliceInput input = shape.getInput();
- verify(input.available() > 0);
- int length = input.available() - 1;
- GeometrySerializationType type = GeometrySerializationType.getForCode(input.readByte());
- return readGeometry(input, shape, type, length);
- }
-
- private static OGCGeometry readGeometry(BasicSliceInput input, Slice inputSlice, GeometrySerializationType type, int length)
- {
- return switch (type) {
- case POINT -> readPoint(input);
- case MULTI_POINT, LINE_STRING, MULTI_LINE_STRING, POLYGON, MULTI_POLYGON -> readSimpleGeometry(input, inputSlice, type, length);
- case GEOMETRY_COLLECTION -> readGeometryCollection(input, inputSlice);
- case ENVELOPE -> createFromEsriGeometry(readEnvelope(input), false);
- };
- }
-
- private static OGCConcreteGeometryCollection readGeometryCollection(BasicSliceInput input, Slice inputSlice)
- {
- // GeometryCollection: geometryType|len-of-shape1|bytes-of-shape1|len-of-shape2|bytes-of-shape2...
- List geometries = new ArrayList<>();
- while (input.available() > 0) {
- int length = input.readInt() - 1;
- GeometrySerializationType type = GeometrySerializationType.getForCode(input.readByte());
- geometries.add(readGeometry(input, inputSlice, type, length));
- }
- return new OGCConcreteGeometryCollection(geometries, null);
- }
-
- private static OGCGeometry readSimpleGeometry(BasicSliceInput input, Slice inputSlice, GeometrySerializationType type, int length)
- {
- int currentPosition = toIntExact(input.position());
- ByteBuffer geometryBuffer = inputSlice.toByteBuffer(currentPosition, length).slice();
- input.setPosition(currentPosition + length);
- Geometry esriGeometry = OperatorImportFromESRIShape.local().execute(0, Unknown, geometryBuffer);
- return createFromEsriGeometry(esriGeometry, type.geometryType().isMultitype());
- }
-
- private static OGCGeometry createFromEsriGeometry(Geometry geometry, boolean multiType)
- {
- Geometry.Type type = geometry.getType();
- switch (type) {
- case Polygon: {
- if (!multiType && ((Polygon) geometry).getExteriorRingCount() <= 1) {
- return new OGCPolygon((Polygon) geometry, null);
- }
- return new OGCMultiPolygon((Polygon) geometry, null);
- }
- case Polyline: {
- if (!multiType && ((Polyline) geometry).getPathCount() <= 1) {
- return new OGCLineString((Polyline) geometry, 0, null);
- }
- return new OGCMultiLineString((Polyline) geometry, null);
- }
- case MultiPoint: {
- if (!multiType && ((MultiPoint) geometry).getPointCount() <= 1) {
- if (geometry.isEmpty()) {
- return new OGCPoint(new Point(), null);
- }
- return new OGCPoint(((MultiPoint) geometry).getPoint(0), null);
- }
- return new OGCMultiPoint((MultiPoint) geometry, null);
- }
- case Point: {
- if (!multiType) {
- return new OGCPoint((Point) geometry, null);
- }
- return new OGCMultiPoint((Point) geometry, null);
- }
- case Envelope: {
- Polygon polygon = new Polygon();
- polygon.addEnvelope((Envelope) geometry, false);
- return new OGCPolygon(polygon, null);
- }
- case Line:
- // TODO unsupported
- break;
- case Unknown:
- break;
- }
- throw new IllegalArgumentException("Unexpected geometry type: " + type);
- }
-
- private static OGCPoint readPoint(BasicSliceInput input)
- {
- double x = input.readDouble();
- double y = input.readDouble();
- Point point;
- if (isNaN(x) || isNaN(y)) {
- point = new Point();
- }
- else {
- point = new Point(x, y);
- }
- return new OGCPoint(point, null);
- }
-
- @Nullable
- public static Envelope deserializeEnvelope(Slice shape)
- {
- requireNonNull(shape, "shape is null");
- BasicSliceInput input = shape.getInput();
- verify(input.available() > 0);
-
- int length = input.available() - 1;
- GeometrySerializationType type = GeometrySerializationType.getForCode(input.readByte());
- return getEnvelope(input, type, length);
- }
-
- private static Envelope getEnvelope(BasicSliceInput input, GeometrySerializationType type, int length)
- {
- return switch (type) {
- case POINT -> getPointEnvelope(input);
- case MULTI_POINT, LINE_STRING, MULTI_LINE_STRING, POLYGON, MULTI_POLYGON -> getSimpleGeometryEnvelope(input, length);
- case GEOMETRY_COLLECTION -> getGeometryCollectionOverallEnvelope(input);
- case ENVELOPE -> readEnvelope(input);
- };
- }
-
- private static Envelope getGeometryCollectionOverallEnvelope(BasicSliceInput input)
- {
- Envelope overallEnvelope = new Envelope();
- while (input.available() > 0) {
- int length = input.readInt() - 1;
- GeometrySerializationType type = GeometrySerializationType.getForCode(input.readByte());
- Envelope envelope = getEnvelope(input, type, length);
- overallEnvelope = merge(overallEnvelope, envelope);
- }
- return overallEnvelope;
- }
-
- private static Envelope getSimpleGeometryEnvelope(BasicSliceInput input, int length)
- {
- // skip type injected by esri
- input.readInt();
-
- Envelope envelope = readEnvelope(input);
-
- int skipLength = length - (4 * Double.BYTES) - Integer.BYTES;
- verify(input.skip(skipLength) == skipLength);
-
- return envelope;
- }
-
- private static Envelope getPointEnvelope(BasicSliceInput input)
- {
- double x = input.readDouble();
- double y = input.readDouble();
- if (isNaN(x) || isNaN(y)) {
- return new Envelope();
- }
- return new Envelope(x, y, x, y);
- }
-
- private static Envelope readEnvelope(SliceInput input)
- {
- verify(input.available() > 0);
- double xMin = input.readDouble();
- double yMin = input.readDouble();
- double xMax = input.readDouble();
- double yMax = input.readDouble();
- if (isEsriNaN(xMin) || isEsriNaN(yMin) || isEsriNaN(xMax) || isEsriNaN(yMax)) {
- return new Envelope();
- }
- return new Envelope(xMin, yMin, xMax, yMax);
- }
-
- private static void writeEnvelopeCoordinates(DynamicSliceOutput output, Envelope envelope)
- {
- if (envelope.isEmpty()) {
- output.appendDouble(NaN);
- output.appendDouble(NaN);
- output.appendDouble(NaN);
- output.appendDouble(NaN);
- }
- else {
- output.appendDouble(envelope.getXMin());
- output.appendDouble(envelope.getYMin());
- output.appendDouble(envelope.getXMax());
- output.appendDouble(envelope.getYMax());
- }
- }
-
- @Nullable
- private static Envelope merge(@Nullable Envelope left, @Nullable Envelope right)
- {
- if (left == null) {
- return right;
- }
- if (right == null) {
- return left;
- }
- right.merge(left);
- return right;
- }
-}
diff --git a/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/GeometrySerializationType.java b/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/GeometrySerializationType.java
deleted file mode 100644
index e712200a4c3c..000000000000
--- a/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/GeometrySerializationType.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.geospatial.serde;
-
-import io.trino.geospatial.GeometryType;
-
-public enum GeometrySerializationType
-{
- POINT(0, GeometryType.POINT),
- MULTI_POINT(1, GeometryType.MULTI_POINT),
- LINE_STRING(2, GeometryType.LINE_STRING),
- MULTI_LINE_STRING(3, GeometryType.MULTI_LINE_STRING),
- POLYGON(4, GeometryType.POLYGON),
- MULTI_POLYGON(5, GeometryType.MULTI_POLYGON),
- GEOMETRY_COLLECTION(6, GeometryType.GEOMETRY_COLLECTION),
- ENVELOPE(7, GeometryType.POLYGON);
-
- private final int code;
- private final GeometryType geometryType;
-
- GeometrySerializationType(int code, GeometryType geometryType)
- {
- this.code = code;
- this.geometryType = geometryType;
- }
-
- public int code()
- {
- return code;
- }
-
- public GeometryType geometryType()
- {
- return geometryType;
- }
-
- public static GeometrySerializationType getForCode(int code)
- {
- return switch (code) {
- case 0 -> POINT;
- case 1 -> MULTI_POINT;
- case 2 -> LINE_STRING;
- case 3 -> MULTI_LINE_STRING;
- case 4 -> POLYGON;
- case 5 -> MULTI_POLYGON;
- case 6 -> GEOMETRY_COLLECTION;
- case 7 -> ENVELOPE;
- default -> throw new IllegalArgumentException("Invalid type code: " + code);
- };
- }
-}
diff --git a/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/JtsGeometrySerde.java b/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/JtsGeometrySerde.java
index f96bb73d02b5..23d2b7aea299 100644
--- a/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/JtsGeometrySerde.java
+++ b/lib/trino-geospatial-toolkit/src/main/java/io/trino/geospatial/serde/JtsGeometrySerde.java
@@ -13,495 +13,175 @@
*/
package io.trino.geospatial.serde;
-import io.airlift.slice.BasicSliceInput;
-import io.airlift.slice.DynamicSliceOutput;
import io.airlift.slice.Slice;
-import io.airlift.slice.SliceInput;
-import io.airlift.slice.SliceOutput;
-import org.locationtech.jts.algorithm.Orientation;
-import org.locationtech.jts.geom.Coordinate;
+import io.airlift.slice.Slices;
+import io.trino.geospatial.GeometryType;
+import io.trino.spi.TrinoException;
import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryFactory;
-import org.locationtech.jts.geom.LineString;
-import org.locationtech.jts.geom.LinearRing;
-import org.locationtech.jts.geom.MultiPoint;
-import org.locationtech.jts.geom.Point;
-import org.locationtech.jts.geom.Polygon;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKBReader;
+import org.locationtech.jts.io.WKBWriter;
import static com.google.common.base.Verify.verify;
-import static com.google.common.base.Verify.verifyNotNull;
-import static com.google.common.collect.Iterables.getOnlyElement;
-import static io.airlift.slice.SizeOf.SIZE_OF_DOUBLE;
-import static io.trino.geospatial.GeometryUtils.translateToAVNaN;
-import static java.lang.Double.NaN;
-import static java.lang.Double.isNaN;
+import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
+import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
+/**
+ * Serializes JTS Geometry objects to/from EWKB (Extended Well-Known Binary) format.
+ * EWKB is the PostGIS extension that includes SRID in the binary format.
+ */
public final class JtsGeometrySerde
{
- // TODO: Are we sure this is thread safe?
private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
+ // WKB type codes (2D)
+ private static final int WKB_POINT = 1;
+ private static final int WKB_LINE_STRING = 2;
+ private static final int WKB_POLYGON = 3;
+ private static final int WKB_MULTI_POINT = 4;
+ private static final int WKB_MULTI_LINE_STRING = 5;
+ private static final int WKB_MULTI_POLYGON = 6;
+ private static final int WKB_GEOMETRY_COLLECTION = 7;
+
private JtsGeometrySerde() {}
+ /**
+ * Deserialize WKB bytes to a JTS Geometry.
+ */
public static Geometry deserialize(Slice shape)
{
requireNonNull(shape, "shape is null");
- BasicSliceInput input = shape.getInput();
- verify(input.available() > 0);
- GeometrySerializationType type = GeometrySerializationType.getForCode(input.readByte());
- return readGeometry(input, type);
- }
-
- private static Geometry readGeometry(BasicSliceInput input, GeometrySerializationType type)
- {
- return switch (type) {
- case POINT -> readPoint(input);
- case MULTI_POINT -> readMultiPoint(input);
- case LINE_STRING -> readPolyline(input, false);
- case MULTI_LINE_STRING -> readPolyline(input, true);
- case POLYGON -> readPolygon(input, false);
- case MULTI_POLYGON -> readPolygon(input, true);
- case GEOMETRY_COLLECTION -> readGeometryCollection(input);
- case ENVELOPE -> readEnvelope(input);
- };
- }
-
- private static Point readPoint(SliceInput input)
- {
- Coordinate coordinates = readCoordinate(input);
- if (isNaN(coordinates.x) || isNaN(coordinates.y)) {
- return GEOMETRY_FACTORY.createPoint();
- }
- return GEOMETRY_FACTORY.createPoint(coordinates);
- }
-
- private static Geometry readMultiPoint(SliceInput input)
- {
- skipEsriType(input);
- skipEnvelope(input);
- int pointCount = input.readInt();
- Point[] points = new Point[pointCount];
- for (int i = 0; i < pointCount; i++) {
- points[i] = readPoint(input);
- }
- return GEOMETRY_FACTORY.createMultiPoint(points);
- }
-
- private static Geometry readPolyline(SliceInput input, boolean multitype)
- {
- skipEsriType(input);
- skipEnvelope(input);
- int partCount = input.readInt();
- if (partCount == 0) {
- if (multitype) {
- return GEOMETRY_FACTORY.createMultiLineString();
- }
- return GEOMETRY_FACTORY.createLineString();
- }
-
- int pointCount = input.readInt();
- int[] startIndexes = new int[partCount];
- for (int i = 0; i < partCount; i++) {
- startIndexes[i] = input.readInt();
- }
-
- int[] partLengths = new int[partCount];
- if (partCount > 1) {
- partLengths[0] = startIndexes[1];
- for (int i = 1; i < partCount - 1; i++) {
- partLengths[i] = startIndexes[i + 1] - startIndexes[i];
- }
- }
- partLengths[partCount - 1] = pointCount - startIndexes[partCount - 1];
-
- LineString[] lineStrings = new LineString[partCount];
-
- for (int i = 0; i < partCount; i++) {
- lineStrings[i] = GEOMETRY_FACTORY.createLineString(readCoordinates(input, partLengths[i]));
+ verify(shape.length() > 0, "shape is empty");
+ try {
+ return new WKBReader(GEOMETRY_FACTORY).read(shape.getBytes());
}
-
- if (multitype) {
- return GEOMETRY_FACTORY.createMultiLineString(lineStrings);
+ catch (ParseException e) {
+ throw new IllegalArgumentException("Invalid WKB", e);
}
- verify(lineStrings.length == 1);
- return lineStrings[0];
- }
-
- private static Geometry readPolygon(SliceInput input, boolean multitype)
- {
- skipEsriType(input);
- skipEnvelope(input);
- int partCount = input.readInt();
- if (partCount == 0) {
- if (multitype) {
- return GEOMETRY_FACTORY.createMultiPolygon();
- }
- return GEOMETRY_FACTORY.createPolygon();
- }
-
- int pointCount = input.readInt();
- int[] startIndexes = new int[partCount];
- for (int i = 0; i < partCount; i++) {
- startIndexes[i] = input.readInt();
- }
-
- int[] partLengths = new int[partCount];
- if (partCount > 1) {
- partLengths[0] = startIndexes[1];
- for (int i = 1; i < partCount - 1; i++) {
- partLengths[i] = startIndexes[i + 1] - startIndexes[i];
- }
- }
- partLengths[partCount - 1] = pointCount - startIndexes[partCount - 1];
-
- LinearRing shell = null;
- List holes = new ArrayList<>();
- List polygons = new ArrayList<>();
- for (int i = 0; i < partCount; i++) {
- Coordinate[] coordinates = readCoordinates(input, partLengths[i]);
- if (isClockwise(coordinates)) {
- // next polygon has started
- if (shell != null) {
- polygons.add(GEOMETRY_FACTORY.createPolygon(shell, holes.toArray(new LinearRing[0])));
- holes.clear();
- }
- else {
- verify(holes.isEmpty(), "shell is null but holes found");
- }
- shell = GEOMETRY_FACTORY.createLinearRing(coordinates);
- }
- else {
- verifyNotNull(shell, "shell is null but hole found");
- holes.add(GEOMETRY_FACTORY.createLinearRing(coordinates));
- }
- }
- polygons.add(GEOMETRY_FACTORY.createPolygon(shell, holes.toArray(new LinearRing[0])));
-
- if (multitype) {
- return GEOMETRY_FACTORY.createMultiPolygon(polygons.toArray(new Polygon[0]));
- }
- return getOnlyElement(polygons);
- }
-
- private static Geometry readGeometryCollection(BasicSliceInput input)
- {
- List geometries = new ArrayList<>();
- while (input.available() > 0) {
- // skip length
- input.readInt();
- GeometrySerializationType type = GeometrySerializationType.getForCode(input.readByte());
- geometries.add(readGeometry(input, type));
- }
- return GEOMETRY_FACTORY.createGeometryCollection(geometries.toArray(new Geometry[0]));
- }
-
- private static Geometry readEnvelope(SliceInput input)
- {
- verify(input.available() > 0);
- double xMin = input.readDouble();
- double yMin = input.readDouble();
- double xMax = input.readDouble();
- double yMax = input.readDouble();
-
- Coordinate[] coordinates = new Coordinate[5];
- coordinates[0] = new Coordinate(xMin, yMin);
- coordinates[1] = new Coordinate(xMin, yMax);
- coordinates[2] = new Coordinate(xMax, yMax);
- coordinates[3] = new Coordinate(xMax, yMin);
- coordinates[4] = coordinates[0];
- return GEOMETRY_FACTORY.createPolygon(coordinates);
- }
-
- private static void skipEsriType(SliceInput input)
- {
- input.readInt();
- }
-
- private static void skipEnvelope(SliceInput input)
- {
- requireNonNull(input, "input is null");
- int skipLength = 4 * SIZE_OF_DOUBLE;
- verify(input.skip(skipLength) == skipLength);
- }
-
- private static Coordinate readCoordinate(SliceInput input)
- {
- requireNonNull(input, "input is null");
- return new Coordinate(input.readDouble(), input.readDouble());
- }
-
- private static Coordinate[] readCoordinates(SliceInput input, int count)
- {
- requireNonNull(input, "input is null");
- verify(count > 0);
- Coordinate[] coordinates = new Coordinate[count];
- for (int i = 0; i < count; i++) {
- coordinates[i] = readCoordinate(input);
- }
- return coordinates;
}
/**
- * Serialize JTS {@link Geometry} shape into an ESRI shape
+ * Serialize a JTS Geometry to EWKB bytes (Extended WKB with SRID).
*/
public static Slice serialize(Geometry geometry)
{
requireNonNull(geometry, "geometry is null");
- DynamicSliceOutput output = new DynamicSliceOutput(100);
- writeGeometry(geometry, output);
- return output.slice();
- }
-
- private static void writeGeometry(Geometry geometry, DynamicSliceOutput output)
- {
- switch (geometry.getGeometryType()) {
- case "Point":
- writePoint((Point) geometry, output);
- return;
- case "MultiPoint":
- writeMultiPoint((MultiPoint) geometry, output);
- return;
- case "LineString":
- writePolyline(geometry, output, false);
- return;
- case "MultiLineString":
- writePolyline(geometry, output, true);
- return;
- case "Polygon":
- writePolygon(geometry, output, false);
- return;
- case "MultiPolygon":
- writePolygon(geometry, output, true);
- return;
- case "GeometryCollection":
- writeGeometryCollection(geometry, output);
- return;
- }
- throw new IllegalArgumentException("Unsupported geometry type : " + geometry.getGeometryType());
- }
-
- private static void writePoint(Point point, SliceOutput output)
- {
- output.writeByte(GeometrySerializationType.POINT.code());
- if (!point.isEmpty()) {
- writeCoordinate(point.getCoordinate(), output);
- }
- else {
- output.writeDouble(NaN);
- output.writeDouble(NaN);
- }
+ // WKBWriter(outputDimension, includeSRID)
+ // Always include SRID in EWKB format
+ byte[] bytes = new WKBWriter(2, true).write(geometry);
+ return Slices.wrappedBuffer(bytes);
}
- private static void writeMultiPoint(MultiPoint geometry, SliceOutput output)
- {
- output.writeByte(GeometrySerializationType.MULTI_POINT.code());
- output.writeInt(EsriShapeType.MULTI_POINT.code);
- writeEnvelope(geometry, output);
- output.writeInt(geometry.getNumPoints());
- for (Coordinate coordinate : geometry.getCoordinates()) {
- writeCoordinate(coordinate, output);
- }
- }
-
- private static void writePolyline(Geometry geometry, SliceOutput output, boolean multitype)
+ /**
+ * Serialize a JTS Envelope to WKB bytes (as a Polygon).
+ */
+ public static Slice serialize(Envelope envelope)
{
- int numParts;
- int numPoints = geometry.getNumPoints();
- if (multitype) {
- numParts = geometry.getNumGeometries();
- output.writeByte(GeometrySerializationType.MULTI_LINE_STRING.code());
- }
- else {
- numParts = numPoints > 0 ? 1 : 0;
- output.writeByte(GeometrySerializationType.LINE_STRING.code());
- }
-
- output.writeInt(EsriShapeType.POLYLINE.code);
-
- writeEnvelope(geometry, output);
-
- output.writeInt(numParts);
- output.writeInt(numPoints);
-
- int partIndex = 0;
- for (int i = 0; i < numParts; i++) {
- output.writeInt(partIndex);
- partIndex += geometry.getGeometryN(i).getNumPoints();
- }
-
- writeCoordinates(geometry.getCoordinates(), output);
+ requireNonNull(envelope, "envelope is null");
+ verify(!envelope.isNull(), "envelope is null/empty");
+ Geometry polygon = GEOMETRY_FACTORY.toGeometry(envelope);
+ return serialize(polygon);
}
- private static void writePolygon(Geometry geometry, SliceOutput output, boolean multitype)
+ /**
+ * Get the geometry type from WKB bytes without full deserialization.
+ */
+ public static GeometryType deserializeType(Slice shape)
{
- int numGeometries = geometry.getNumGeometries();
- int numParts = 0;
- int numPoints = geometry.getNumPoints();
- for (int i = 0; i < numGeometries; i++) {
- Polygon polygon = (Polygon) geometry.getGeometryN(i);
- if (polygon.getNumPoints() > 0) {
- numParts += polygon.getNumInteriorRing() + 1;
- }
- }
-
- if (multitype) {
- output.writeByte(GeometrySerializationType.MULTI_POLYGON.code());
+ requireNonNull(shape, "shape is null");
+ verify(shape.length() >= 5, "shape too short for WKB header");
+
+ // WKB format: [1 byte endianness] [4 bytes type]
+ // endianness: 0 = big endian (XDR), 1 = little endian (NDR)
+ byte endianness = shape.getByte(0);
+ verify(endianness == 0 || endianness == 1, "invalid WKB endianness: %s", endianness);
+ int wkbType;
+ if (endianness == 0) {
+ // Big endian - read bytes manually
+ wkbType = ((shape.getByte(1) & 0xFF) << 24) |
+ ((shape.getByte(2) & 0xFF) << 16) |
+ ((shape.getByte(3) & 0xFF) << 8) |
+ (shape.getByte(4) & 0xFF);
}
else {
- output.writeByte(GeometrySerializationType.POLYGON.code());
- }
-
- output.writeInt(EsriShapeType.POLYGON.code);
-
- writeEnvelope(geometry, output);
-
- output.writeInt(numParts);
- output.writeInt(numPoints);
-
- if (numParts == 0) {
- return;
- }
-
- int[] partIndexes = new int[numParts];
- boolean[] shellPart = new boolean[numParts];
-
- int currentPart = 0;
- int currentPoint = 0;
- for (int i = 0; i < numGeometries; i++) {
- Polygon polygon = (Polygon) geometry.getGeometryN(i);
-
- partIndexes[currentPart] = currentPoint;
- shellPart[currentPart] = true;
- currentPart++;
- currentPoint += polygon.getExteriorRing().getNumPoints();
-
- int holesCount = polygon.getNumInteriorRing();
- for (int holeIndex = 0; holeIndex < holesCount; holeIndex++) {
- partIndexes[currentPart] = currentPoint;
- shellPart[currentPart] = false;
- currentPart++;
- currentPoint += polygon.getInteriorRingN(holeIndex).getNumPoints();
- }
- }
-
- for (int partIndex : partIndexes) {
- output.writeInt(partIndex);
- }
-
- Coordinate[] coordinates = geometry.getCoordinates();
- canonicalizePolygonCoordinates(coordinates, partIndexes, shellPart);
- writeCoordinates(coordinates, output);
- }
-
- private static void writeGeometryCollection(Geometry collection, DynamicSliceOutput output)
- {
- output.appendByte(GeometrySerializationType.GEOMETRY_COLLECTION.code());
- for (int geometryIndex = 0; geometryIndex < collection.getNumGeometries(); geometryIndex++) {
- Geometry geometry = collection.getGeometryN(geometryIndex);
- int startPosition = output.size();
-
- // leave 4 bytes for the shape length
- output.appendInt(0);
- writeGeometry(geometry, output);
-
- int endPosition = output.size();
- int length = endPosition - startPosition - Integer.BYTES;
-
- output.getUnderlyingSlice().setInt(startPosition, length);
- }
+ // Little endian - read bytes manually
+ wkbType = (shape.getByte(1) & 0xFF) |
+ ((shape.getByte(2) & 0xFF) << 8) |
+ ((shape.getByte(3) & 0xFF) << 16) |
+ ((shape.getByte(4) & 0xFF) << 24);
+ }
+
+ // Mask off Z/M/SRID flags to get base type
+ // WKB type codes: 1=Point, 2=LineString, 3=Polygon, 4=MultiPoint, etc.
+ // Z adds 1000, M adds 2000, ZM adds 3000
+ // EWKB SRID flag is 0x20000000
+ int baseType = wkbType & 0xFFFF;
+ if (baseType > 1000) {
+ baseType = baseType % 1000;
+ }
+
+ return switch (baseType) {
+ case WKB_POINT -> GeometryType.POINT;
+ case WKB_LINE_STRING -> GeometryType.LINE_STRING;
+ case WKB_POLYGON -> GeometryType.POLYGON;
+ case WKB_MULTI_POINT -> GeometryType.MULTI_POINT;
+ case WKB_MULTI_LINE_STRING -> GeometryType.MULTI_LINE_STRING;
+ case WKB_MULTI_POLYGON -> GeometryType.MULTI_POLYGON;
+ case WKB_GEOMETRY_COLLECTION -> GeometryType.GEOMETRY_COLLECTION;
+ default -> throw new IllegalArgumentException("Unknown WKB type: " + wkbType);
+ };
}
- private static void writeCoordinate(Coordinate coordinate, SliceOutput output)
+ /**
+ * Get the envelope (bounding box) of a geometry from WKB bytes.
+ * This requires parsing the full geometry.
+ */
+ public static Envelope deserializeEnvelope(Slice shape)
{
- output.writeDouble(translateToAVNaN(coordinate.x));
- output.writeDouble(translateToAVNaN(coordinate.y));
+ Geometry geometry = deserialize(shape);
+ return geometry.getEnvelopeInternal();
}
- private static void writeCoordinates(Coordinate[] coordinates, SliceOutput output)
+ /**
+ * Serialize geometry preserving SRID from source geometry.
+ */
+ public static Slice serializeWithSrid(Geometry result, Geometry source)
{
- for (Coordinate coordinate : coordinates) {
- writeCoordinate(coordinate, output);
- }
+ result.setSRID(source.getSRID());
+ return serialize(result);
}
- private static void writeEnvelope(Geometry geometry, SliceOutput output)
+ /**
+ * Validate SRID match for binary operations. Returns the SRID to use.
+ * Rules: 0 matches anything, mismatched non-zero SRIDs throw exception.
+ */
+ public static int validateAndGetSrid(Geometry left, Geometry right)
{
- if (geometry.isEmpty()) {
- for (int i = 0; i < 4; i++) {
- output.writeDouble(NaN);
- }
- return;
- }
-
- Envelope envelope = geometry.getEnvelopeInternal();
- output.writeDouble(envelope.getMinX());
- output.writeDouble(envelope.getMinY());
- output.writeDouble(envelope.getMaxX());
- output.writeDouble(envelope.getMaxY());
- }
+ int leftSrid = left.getSRID();
+ int rightSrid = right.getSRID();
- private static void canonicalizePolygonCoordinates(Coordinate[] coordinates, int[] partIndexes, boolean[] shellPart)
- {
- for (int part = 0; part < partIndexes.length - 1; part++) {
- canonicalizePolygonCoordinates(coordinates, partIndexes[part], partIndexes[part + 1], shellPart[part]);
+ if (leftSrid == 0) {
+ return rightSrid;
}
- if (partIndexes.length > 0) {
- canonicalizePolygonCoordinates(coordinates, partIndexes[partIndexes.length - 1], coordinates.length, shellPart[partIndexes.length - 1]);
+ if (rightSrid == 0) {
+ return leftSrid;
}
- }
-
- private static void canonicalizePolygonCoordinates(Coordinate[] coordinates, int start, int end, boolean isShell)
- {
- boolean isClockwise = isClockwise(coordinates, start, end);
-
- if ((isShell && !isClockwise) || (!isShell && isClockwise)) {
- // shell has to be counter clockwise
- reverse(coordinates, start, end);
- }
- }
-
- private static boolean isClockwise(Coordinate[] coordinates)
- {
- return !Orientation.isCCW(coordinates);
- }
-
- private static boolean isClockwise(Coordinate[] coordinates, int start, int end)
- {
- return isClockwise(Arrays.copyOfRange(coordinates, start, end));
- }
-
- private static void reverse(Coordinate[] coordinates, int start, int end)
- {
- verify(start <= end, "start must be less or equal than end");
- for (int i = start; i < start + ((end - start) / 2); i++) {
- Coordinate buffer = coordinates[i];
- coordinates[i] = coordinates[start + end - i - 1];
- coordinates[start + end - i - 1] = buffer;
+ if (leftSrid != rightSrid) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT,
+ format("SRID mismatch: %d vs %d", leftSrid, rightSrid));
}
+ return leftSrid;
}
/**
- * Shape type codes from ERSI's specification
- * https://www.esri.com/library/whitepapers/pdfs/shapefile.pdf
+ * Serialize binary operation result with validated SRID.
*/
- private enum EsriShapeType
+ public static Slice serializeBinaryOp(Geometry result, Geometry left, Geometry right)
{
- POINT(1),
- POLYLINE(3),
- POLYGON(5),
- MULTI_POINT(8);
-
- final int code;
-
- EsriShapeType(int code)
- {
- this.code = code;
- }
+ result.setSRID(validateAndGetSrid(left, right));
+ return serialize(result);
}
}
diff --git a/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/TestGeometryUtils.java b/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/TestGeometryUtils.java
index ede4c9f44cd0..3da614822db6 100644
--- a/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/TestGeometryUtils.java
+++ b/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/TestGeometryUtils.java
@@ -14,9 +14,12 @@
package io.trino.geospatial;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
+import static io.trino.geospatial.GeometryUtils.contains;
+import static io.trino.geospatial.GeometryUtils.estimateMemorySize;
import static io.trino.geospatial.GeometryUtils.jsonFromJtsGeometry;
import static org.assertj.core.api.Assertions.assertThat;
@@ -31,4 +34,42 @@ void testJsonFromJtsGeometry()
.isNotNull()
.doesNotContain("crs");
}
+
+ @Test
+ void testEstimateMemorySize()
+ throws ParseException
+ {
+ Geometry point = new WKTReader().read("POINT (1 1)");
+ Geometry lineString = new WKTReader().read("LINESTRING (1 1, 2 2)");
+ Geometry geometryCollection = new WKTReader().read("GEOMETRYCOLLECTION (POINT (1 1), LINESTRING (1 1, 2 2))");
+
+ assertThat(estimateMemorySize(null)).isZero();
+ assertThat(estimateMemorySize(point)).isPositive();
+ assertThat(estimateMemorySize(geometryCollection))
+ .isGreaterThan(estimateMemorySize(point) + estimateMemorySize(lineString));
+ }
+
+ @Test
+ void testContainsUsesJtsSemanticsForMultiLineString()
+ throws ParseException
+ {
+ Geometry multiLineString = new WKTReader().read("MULTILINESTRING ((0 0, 1 0), (1 0, 2 0))");
+ Geometry multiPoint = new WKTReader().read("MULTIPOINT ((0.25 0), (1.75 0))");
+
+ assertThat(multiLineString.contains(multiPoint)).isTrue();
+ assertThat(multiLineString.getGeometryN(0).contains(multiPoint)).isFalse();
+ assertThat(multiLineString.getGeometryN(1).contains(multiPoint)).isFalse();
+ assertThat(contains(multiLineString, multiPoint)).isTrue();
+ }
+
+ @Test
+ void testContainsRecursesForGeometryCollection()
+ throws ParseException
+ {
+ Geometry geometryCollection = new WKTReader().read("GEOMETRYCOLLECTION (POINT (10 10), POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0)))");
+ Geometry polygon = new WKTReader().read("POLYGON ((1 1, 3 1, 3 3, 1 3, 1 1))");
+
+ assertThat(geometryCollection.getGeometryN(1).contains(polygon)).isTrue();
+ assertThat(contains(geometryCollection, polygon)).isTrue();
+ }
}
diff --git a/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/serde/BenchmarkGeometrySerde.java b/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/serde/BenchmarkGeometrySerde.java
deleted file mode 100644
index 60093b6fd300..000000000000
--- a/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/serde/BenchmarkGeometrySerde.java
+++ /dev/null
@@ -1,386 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.geospatial.serde;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.google.common.base.Joiner;
-import io.airlift.slice.Slice;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Fork;
-import org.openjdk.jmh.annotations.Measurement;
-import org.openjdk.jmh.annotations.OutputTimeUnit;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.openjdk.jmh.annotations.Warmup;
-import org.openjdk.jmh.runner.RunnerException;
-
-import static com.esri.core.geometry.ogc.OGCGeometry.fromText;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.GEOMETRYCOLLECTION;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.LINESTRING;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.MULTILINESTRING;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.MULTIPOINT;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.MULTIPOLYGON;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.POINT;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.POLYGON;
-import static io.trino.geospatial.serde.BenchmarkGeometrySerializationData.readResource;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-import static io.trino.geospatial.serde.GeometrySerde.deserializeEnvelope;
-import static io.trino.geospatial.serde.GeometrySerde.serialize;
-import static io.trino.jmh.Benchmarks.benchmark;
-import static java.util.concurrent.TimeUnit.SECONDS;
-import static org.openjdk.jmh.annotations.Mode.Throughput;
-
-@State(Scope.Thread)
-@Fork(2)
-@Warmup(iterations = 3, time = 3, timeUnit = SECONDS)
-@Measurement(iterations = 5, time = 4, timeUnit = SECONDS)
-@OutputTimeUnit(SECONDS)
-@BenchmarkMode(Throughput)
-public class BenchmarkGeometrySerde
-{
- // POINT
- @Benchmark
- public Object serializePoint(BenchmarkData data)
- {
- return serialize(data.point);
- }
-
- @Benchmark
- public Object deserializePoint(BenchmarkData data)
- {
- return deserialize(data.pointSerialized);
- }
-
- @Benchmark
- public Object deserializePointEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.pointSerialized);
- }
-
- // MULTI POINT
- @Benchmark
- public Object serializeSimpleMultipoint(BenchmarkData data)
- {
- return serialize(data.simpleMultipoint);
- }
-
- @Benchmark
- public Object deserializeSimpleMultipoint(BenchmarkData data)
- {
- return deserialize(data.simpleMultipointSerialized);
- }
-
- @Benchmark
- public Object deserializeSimpleMultipointEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.simpleMultipointSerialized);
- }
-
- @Benchmark
- public Object serializeComplexMultipoint(BenchmarkData data)
- {
- return serialize(data.complexMultipoint);
- }
-
- @Benchmark
- public Object deserializeComplexMultipoint(BenchmarkData data)
- {
- return deserialize(data.complexMultipointSerialized);
- }
-
- @Benchmark
- public Object deserializeComplexMultipointEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.complexMultipointSerialized);
- }
-
- // LINE STRING
- @Benchmark
- public Object serializeSimpleLineString(BenchmarkData data)
- {
- return serialize(data.simpleLineString);
- }
-
- @Benchmark
- public Object deserializeSimpleLineString(BenchmarkData data)
- {
- return deserialize(data.simpleLineStringSerialized);
- }
-
- @Benchmark
- public Object deserializeSimpleLineStringEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.simpleLineStringSerialized);
- }
-
- @Benchmark
- public Object serializeComplexLineString(BenchmarkData data)
- {
- return serialize(data.complexLineString);
- }
-
- @Benchmark
- public Object deserializeComplexLineString(BenchmarkData data)
- {
- return deserialize(data.complexLineStringSerialized);
- }
-
- @Benchmark
- public Object deserializeComplexLineStringEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.complexLineStringSerialized);
- }
-
- // MULTILINE STRING
- @Benchmark
- public Object serializeSimpleMultiLineString(BenchmarkData data)
- {
- return serialize(data.simpleMultiLineString);
- }
-
- @Benchmark
- public Object deserializeSimpleMultiLineString(BenchmarkData data)
- {
- return deserialize(data.simpleMultiLineStringSerialized);
- }
-
- @Benchmark
- public Object deserializeSimpleMultiLineStringEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.simpleMultiLineStringSerialized);
- }
-
- @Benchmark
- public Object serializeComplexMultiLineString(BenchmarkData data)
- {
- return serialize(data.complexMultiLineString);
- }
-
- @Benchmark
- public Object deserializeComplexMultiLineString(BenchmarkData data)
- {
- return deserialize(data.complexMultiLineStringSerialized);
- }
-
- @Benchmark
- public Object deserializeComplexMultiLineStringEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.complexMultiLineStringSerialized);
- }
-
- // POLYGON
- @Benchmark
- public Object serializeSimplePolygon(BenchmarkData data)
- {
- return serialize(data.simplePolygon);
- }
-
- @Benchmark
- public Object deserializeSimplePolygon(BenchmarkData data)
- {
- return deserialize(data.simplePolygonSerialized);
- }
-
- @Benchmark
- public Object deserializeSimplePolygonEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.simplePolygonSerialized);
- }
-
- @Benchmark
- public Object serializeComplexPolygon(BenchmarkData data)
- {
- return serialize(data.complexPolygon);
- }
-
- @Benchmark
- public Object deserializeComplexPolygon(BenchmarkData data)
- {
- return deserialize(data.complexPolygonSerialized);
- }
-
- @Benchmark
- public Object deserializeComplexPolygonEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.complexPolygonSerialized);
- }
-
- // MULTI POLYGON
- @Benchmark
- public Object serializeSimpleMultiPolygon(BenchmarkData data)
- {
- return serialize(data.simpleMultiPolygon);
- }
-
- @Benchmark
- public Object deserializeSimpleMultiPolygon(BenchmarkData data)
- {
- return deserialize(data.simpleMultiPolygonSerialized);
- }
-
- @Benchmark
- public Object deserializeSimpleMultiPolygonEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.simpleMultiPolygonSerialized);
- }
-
- @Benchmark
- public Object serializeComplexMultiPolygon(BenchmarkData data)
- {
- return serialize(data.complexMultiPolygon);
- }
-
- @Benchmark
- public Object deserializeComplexMultiPolygon(BenchmarkData data)
- {
- return deserialize(data.complexMultiPolygonSerialized);
- }
-
- @Benchmark
- public Object deserializeComplexMultiPolygonEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.complexMultiPolygonSerialized);
- }
-
- // GEOMETRY COLLECTION
- @Benchmark
- public Object serializeSimpleGeometryCollection(BenchmarkData data)
- {
- return serialize(data.simpleGeometryCollection);
- }
-
- @Benchmark
- public Object deserializeSimpleGeometryCollection(BenchmarkData data)
- {
- return deserialize(data.simpleGeometryCollectionSerialized);
- }
-
- @Benchmark
- public Object deserializeSimpleGeometryCollectionEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.simpleGeometryCollectionSerialized);
- }
-
- @Benchmark
- public Object serializeComplexGeometryCollection(BenchmarkData data)
- {
- return serialize(data.complexGeometryCollection);
- }
-
- @Benchmark
- public Object deserializeComplexGeometryCollection(BenchmarkData data)
- {
- return deserialize(data.complexGeometryCollectionSerialized);
- }
-
- @Benchmark
- public Object deserializeComplexGeometryCollectionEnvelope(BenchmarkData data)
- {
- return deserializeEnvelope(data.complexGeometryCollectionSerialized);
- }
-
- @State(Scope.Thread)
- public static class BenchmarkData
- {
- // POINT
- private OGCGeometry point;
- private Slice pointSerialized;
-
- // MULTI POINT
- private OGCGeometry simpleMultipoint;
- private Slice simpleMultipointSerialized;
- private OGCGeometry complexMultipoint;
- private Slice complexMultipointSerialized;
-
- // LINE STRING
- private OGCGeometry simpleLineString;
- private Slice simpleLineStringSerialized;
- private OGCGeometry complexLineString;
- private Slice complexLineStringSerialized;
-
- // MULTILINE STRING
- private OGCGeometry simpleMultiLineString;
- private Slice simpleMultiLineStringSerialized;
- private OGCGeometry complexMultiLineString;
- private Slice complexMultiLineStringSerialized;
-
- // POLYGON
- private OGCGeometry simplePolygon;
- private Slice simplePolygonSerialized;
- private OGCGeometry complexPolygon;
- private Slice complexPolygonSerialized;
-
- // MULTI POLYGON
- private OGCGeometry simpleMultiPolygon;
- private Slice simpleMultiPolygonSerialized;
- private OGCGeometry complexMultiPolygon;
- private Slice complexMultiPolygonSerialized;
-
- // COLLECTION
- private OGCGeometry simpleGeometryCollection;
- private Slice simpleGeometryCollectionSerialized;
- private OGCGeometry complexGeometryCollection;
- private Slice complexGeometryCollectionSerialized;
-
- @Setup
- public void setup()
- {
- point = fromText(POINT);
- pointSerialized = serialize(point);
-
- simpleMultipoint = fromText(MULTIPOINT);
- simpleMultipointSerialized = serialize(simpleMultipoint);
- complexMultipoint = fromText(readResource("complex-multipoint.txt"));
- complexMultipointSerialized = serialize(complexMultipoint);
-
- simpleLineString = fromText(LINESTRING);
- simpleLineStringSerialized = serialize(simpleLineString);
- complexLineString = fromText(readResource("complex-linestring.txt"));
- complexLineStringSerialized = serialize(complexLineString);
-
- simpleMultiLineString = fromText(MULTILINESTRING);
- simpleMultiLineStringSerialized = serialize(simpleMultiLineString);
- complexMultiLineString = fromText(readResource("complex-multilinestring.txt"));
- complexMultiLineStringSerialized = serialize(complexMultiLineString);
-
- simplePolygon = fromText(POLYGON);
- simplePolygonSerialized = serialize(simplePolygon);
- complexPolygon = fromText(readResource("complex-polygon.txt"));
- complexPolygonSerialized = serialize(complexPolygon);
-
- simpleMultiPolygon = fromText(MULTIPOLYGON);
- simpleMultiPolygonSerialized = serialize(simpleMultiPolygon);
- complexMultiPolygon = fromText(readResource("complex-multipolygon.txt"));
- complexMultiPolygonSerialized = serialize(complexMultiPolygon);
-
- simpleGeometryCollection = fromText(GEOMETRYCOLLECTION);
- simpleGeometryCollectionSerialized = serialize(simpleGeometryCollection);
- complexGeometryCollection = fromText("GEOMETRYCOLLECTION (" + Joiner.on(", ").join(
- readResource("complex-multipoint.txt"),
- readResource("complex-linestring.txt"),
- readResource("complex-multilinestring.txt"),
- readResource("complex-polygon.txt"),
- readResource("complex-multipolygon.txt")) + ")");
- complexGeometryCollectionSerialized = serialize(complexGeometryCollection);
- }
- }
-
- static void main()
- throws RunnerException
- {
- benchmark(BenchmarkGeometrySerde.class).run();
- }
-}
diff --git a/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/serde/TestGeometrySerialization.java b/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/serde/TestGeometrySerialization.java
index 56e69a2f6504..ae2383c45d8c 100644
--- a/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/serde/TestGeometrySerialization.java
+++ b/lib/trino-geospatial-toolkit/src/test/java/io/trino/geospatial/serde/TestGeometrySerialization.java
@@ -13,28 +13,29 @@
*/
package io.trino.geospatial.serde;
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import com.google.common.base.VerifyException;
import io.airlift.slice.Slice;
+import io.airlift.slice.Slices;
+import io.trino.geospatial.GeometryType;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
-import static com.esri.core.geometry.ogc.OGCGeometry.createFromEsriGeometry;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-import static io.trino.geospatial.serde.GeometrySerde.deserializeEnvelope;
-import static io.trino.geospatial.serde.GeometrySerde.deserializeType;
-import static io.trino.geospatial.serde.GeometrySerde.serialize;
-import static io.trino.geospatial.serde.GeometrySerializationType.ENVELOPE;
-import static io.trino.geospatial.serde.GeometrySerializationType.GEOMETRY_COLLECTION;
-import static io.trino.geospatial.serde.GeometrySerializationType.LINE_STRING;
-import static io.trino.geospatial.serde.GeometrySerializationType.MULTI_LINE_STRING;
-import static io.trino.geospatial.serde.GeometrySerializationType.MULTI_POINT;
-import static io.trino.geospatial.serde.GeometrySerializationType.MULTI_POLYGON;
-import static io.trino.geospatial.serde.GeometrySerializationType.POINT;
-import static io.trino.geospatial.serde.GeometrySerializationType.POLYGON;
+import static io.trino.geospatial.GeometryType.GEOMETRY_COLLECTION;
+import static io.trino.geospatial.GeometryType.LINE_STRING;
+import static io.trino.geospatial.GeometryType.MULTI_LINE_STRING;
+import static io.trino.geospatial.GeometryType.MULTI_POINT;
+import static io.trino.geospatial.GeometryType.MULTI_POLYGON;
+import static io.trino.geospatial.GeometryType.POINT;
+import static io.trino.geospatial.GeometryType.POLYGON;
+import static io.trino.geospatial.serde.JtsGeometrySerde.deserialize;
+import static io.trino.geospatial.serde.JtsGeometrySerde.deserializeEnvelope;
+import static io.trino.geospatial.serde.JtsGeometrySerde.deserializeType;
+import static io.trino.geospatial.serde.JtsGeometrySerde.serialize;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class TestGeometrySerialization
{
@@ -51,9 +52,9 @@ public void testPoint()
@Test
public void testMultiPoint()
{
- testSerialization("MULTIPOINT (0 0)");
- testSerialization("MULTIPOINT (0 0, 0 0)");
- testSerialization("MULTIPOINT (0 0, 1 1, 2 3)");
+ testSerialization("MULTIPOINT ((0 0))");
+ testSerialization("MULTIPOINT ((0 0), (0 0))");
+ testSerialization("MULTIPOINT ((0 0), (1 1), (2 3))");
testSerialization("MULTIPOINT EMPTY");
}
@@ -100,8 +101,8 @@ public void testMultiPolygon()
{
testSerialization("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)))");
testSerialization("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((30 20, 45 40, 10 40, 30 20)))");
- testSerialization("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((15 5, 40 10, 10 20, 15 5))), ((0 0, 0 1, 1 1, 1 0.5, 1 0, 0 0), (0.25 0.25, 0.25 0.75, 0.75 0.75, 0.75 0.25))");
- testSerialization("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((0 0, 0 1, 1 1, 1 0, 0 0), (0.75 0.25, 0.75 0.75, 0.25 0.75, 0.25 0.25, 0.75 0.25)), ((15 5, 40 10, 10 20, 5 10, 15 5))), ((0 0, 0 1, 1 1, 1 0), (0.25 0.25, 0.25 0.75, 0.75 0.75, 0.75 0.25))");
+ testSerialization("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((15 5, 40 10, 10 20, 15 5)))");
+ testSerialization("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((0 0, 0 1, 1 1, 1 0, 0 0), (0.75 0.25, 0.75 0.75, 0.25 0.75, 0.25 0.25, 0.75 0.25)), ((15 5, 40 10, 10 20, 5 10, 15 5)))");
testSerialization("MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)), ((0 0, 0 1, 1 1, 1 0, 0 0), (0.25 0.25, 0.25 0.75, 0.75 0.75, 0.75 0.25, 0.25 0.25)))");
testSerialization("MULTIPOLYGON (" +
"((30 20, 45 40, 10 40, 30 20)), " +
@@ -132,33 +133,49 @@ public void testGeometryCollection()
testSerialization("GEOMETRYCOLLECTION (MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20))), GEOMETRYCOLLECTION (MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)))))");
}
+ @Test
+ public void testPointSridRoundTrip()
+ {
+ testSerializationWithSrid("POINT (1 2)", 4326);
+ }
+
+ @Test
+ public void testGeometryCollectionSridRoundTrip()
+ {
+ testSerializationWithSrid("GEOMETRYCOLLECTION (POINT (1 2), LINESTRING (0 0, 1 2, 3 4))", 3857);
+ }
+
@Test
public void testEnvelope()
{
- testEnvelopeSerialization(new Envelope(0, 0, 1, 1));
- testEnvelopeSerialization(new Envelope(1, 2, 3, 4));
- testEnvelopeSerialization(new Envelope(10101, -2.05, -3e5, 0));
+ testEnvelopeSerialization(new Envelope(0, 1, 0, 1));
+ testEnvelopeSerialization(new Envelope(1, 3, 2, 4));
+ testEnvelopeSerialization(new Envelope(-3e5, 10101, -2.05, 0));
}
private void testEnvelopeSerialization(Envelope envelope)
{
- assertThat(deserialize(serialize(envelope))).isEqualTo(createFromEsriGeometry(envelope, null));
- assertThat(deserializeEnvelope(serialize(envelope))).isEqualTo(envelope);
- assertThat(JtsGeometrySerde.serialize(JtsGeometrySerde.deserialize(serialize(envelope)))).isEqualTo(serialize(createFromEsriGeometry(envelope, null)));
+ Slice serialized = serialize(envelope);
+ Geometry deserialized = deserialize(serialized);
+
+ assertThat(deserialized.getGeometryType()).isEqualTo("Polygon");
+ assertThat(deserialized.getEnvelopeInternal()).isEqualTo(envelope);
+ assertThat(deserializeType(serialized)).isEqualTo(POLYGON);
+ assertThat(deserializeEnvelope(serialized)).isEqualTo(envelope);
}
@Test
public void testDeserializeEnvelope()
{
- assertDeserializeEnvelope("MULTIPOINT (20 20, 25 25)", new Envelope(20, 20, 25, 25));
- assertDeserializeEnvelope("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", new Envelope(1, 1, 5, 4));
- assertDeserializeEnvelope("POLYGON ((0 0, 0 4, 4 0))", new Envelope(0, 0, 4, 4));
- assertDeserializeEnvelope("MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0)), ((2 2, 2 4, 4 4, 4 2)))", new Envelope(0, 0, 4, 4));
- assertDeserializeEnvelope("GEOMETRYCOLLECTION (POINT (3 7), LINESTRING (4 6, 7 10))", new Envelope(3, 6, 7, 10));
+ assertDeserializeEnvelope("MULTIPOINT ((20 20), (25 25))", new Envelope(20, 25, 20, 25));
+ assertDeserializeEnvelope("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", new Envelope(1, 5, 1, 4));
+ assertDeserializeEnvelope("POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))", new Envelope(0, 4, 0, 4));
+ assertDeserializeEnvelope("MULTIPOLYGON (((0 0, 0 2, 2 2, 2 0, 0 0)), ((2 2, 2 4, 4 4, 4 2, 2 2)))", new Envelope(0, 4, 0, 4));
+ assertDeserializeEnvelope("GEOMETRYCOLLECTION (POINT (3 7), LINESTRING (4 6, 7 10))", new Envelope(3, 7, 6, 10));
assertDeserializeEnvelope("POLYGON EMPTY", new Envelope());
- assertDeserializeEnvelope("POINT (1 2)", new Envelope(1, 2, 1, 2));
+ assertDeserializeEnvelope("POINT (1 2)", new Envelope(1, 1, 2, 2));
assertDeserializeEnvelope("POINT EMPTY", new Envelope());
- assertDeserializeEnvelope("GEOMETRYCOLLECTION (GEOMETRYCOLLECTION (POINT (2 7), LINESTRING (4 6, 7 10)), POINT (3 7), LINESTRING (4 6, 7 10))", new Envelope(2, 6, 7, 10));
+ assertDeserializeEnvelope("GEOMETRYCOLLECTION (GEOMETRYCOLLECTION (POINT (2 7), LINESTRING (4 6, 7 10)), POINT (3 7), LINESTRING (4 6, 7 10))", new Envelope(2, 7, 6, 10));
}
@Test
@@ -166,54 +183,54 @@ public void testDeserializeType()
{
assertDeserializeType("POINT (1 2)", POINT);
assertDeserializeType("POINT EMPTY", POINT);
- assertDeserializeType("MULTIPOINT (20 20, 25 25)", MULTI_POINT);
+ assertDeserializeType("MULTIPOINT ((20 20), (25 25))", MULTI_POINT);
assertDeserializeType("MULTIPOINT EMPTY", MULTI_POINT);
- assertDeserializeType("LINESTRING (1 1, 5 1, 6 2))", LINE_STRING);
+ assertDeserializeType("LINESTRING (1 1, 5 1, 6 2)", LINE_STRING);
assertDeserializeType("LINESTRING EMPTY", LINE_STRING);
assertDeserializeType("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", MULTI_LINE_STRING);
assertDeserializeType("MULTILINESTRING EMPTY", MULTI_LINE_STRING);
- assertDeserializeType("POLYGON ((0 0, 0 4, 4 0))", POLYGON);
+ assertDeserializeType("POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))", POLYGON);
assertDeserializeType("POLYGON EMPTY", POLYGON);
- assertDeserializeType("MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0)), ((2 2, 2 4, 4 4, 4 2)))", MULTI_POLYGON);
+ assertDeserializeType("MULTIPOLYGON (((0 0, 0 2, 2 2, 2 0, 0 0)), ((2 2, 2 4, 4 4, 4 2, 2 2)))", MULTI_POLYGON);
assertDeserializeType("MULTIPOLYGON EMPTY", MULTI_POLYGON);
assertDeserializeType("GEOMETRYCOLLECTION (POINT (3 7), LINESTRING (4 6, 7 10))", GEOMETRY_COLLECTION);
assertDeserializeType("GEOMETRYCOLLECTION EMPTY", GEOMETRY_COLLECTION);
- assertThat(deserializeType(serialize(new Envelope(1, 2, 3, 4)))).isEqualTo(ENVELOPE);
+ assertThat(deserializeType(serialize(new Envelope(1, 3, 2, 4)))).isEqualTo(POLYGON);
}
- private static void testSerialization(String wkt)
+ @Test
+ public void testDeserializeTypeRejectsInvalidByteOrder()
{
- testEsriSerialization(wkt);
- testJtsSerialization(wkt);
+ assertThatThrownBy(() -> deserializeType(Slices.wrappedBuffer(new byte[] {2, 1, 0, 0, 0})))
+ .isInstanceOf(VerifyException.class)
+ .hasMessage("invalid WKB endianness: 2");
}
- private static void testEsriSerialization(String wkt)
+ private static void testSerialization(String wkt)
{
- OGCGeometry expected = OGCGeometry.fromText(wkt);
- OGCGeometry actual = deserialize(serialize(expected));
- assertGeometryEquals(actual, expected);
+ Geometry geometry = createJtsGeometry(wkt);
+ Slice serialized = serialize(geometry);
+ Geometry deserialized = deserialize(serialized);
+
+ assertThat(deserialized.norm()).isEqualTo(geometry.norm());
}
- private static void testJtsSerialization(String wkt)
+ private static void testSerializationWithSrid(String wkt, int srid)
{
- Geometry jtsGeometry = createJtsGeometry(wkt);
- OGCGeometry esriGeometry = OGCGeometry.fromText(wkt);
-
- Slice jtsSerialized = JtsGeometrySerde.serialize(jtsGeometry);
- Slice esriSerialized = GeometrySerde.serialize(esriGeometry);
- assertThat(jtsSerialized).isEqualTo(esriSerialized);
+ Geometry geometry = createJtsGeometry(wkt);
+ geometry.setSRID(srid);
- Geometry jtsDeserialized = JtsGeometrySerde.deserialize(jtsSerialized);
- assertGeometryEquals(jtsDeserialized, jtsGeometry);
+ Slice serialized = serialize(geometry);
+ Geometry deserialized = deserialize(serialized);
- OGCGeometry esriDeserialized = GeometrySerde.deserialize(esriSerialized);
- assertGeometryEquals(esriDeserialized, esriGeometry);
+ assertThat(deserialized.norm()).isEqualTo(geometry.norm());
+ assertThat(deserialized.getSRID()).isEqualTo(srid);
}
private static Slice geometryFromText(String wkt)
{
- return serialize(OGCGeometry.fromText(wkt));
+ return serialize(createJtsGeometry(wkt));
}
private static Geometry createJtsGeometry(String wkt)
@@ -226,36 +243,13 @@ private static Geometry createJtsGeometry(String wkt)
}
}
- private static void assertGeometryEquals(Geometry actual, Geometry expected)
- {
- assertThat(actual.norm()).isEqualTo(expected.norm());
- }
-
- private static void assertDeserializeEnvelope(String geometry, Envelope expectedEnvelope)
+ private static void assertDeserializeEnvelope(String wkt, Envelope expectedEnvelope)
{
- assertThat(deserializeEnvelope(geometryFromText(geometry))).isEqualTo(expectedEnvelope);
+ assertThat(deserializeEnvelope(geometryFromText(wkt))).isEqualTo(expectedEnvelope);
}
- private static void assertDeserializeType(String wkt, GeometrySerializationType expectedType)
+ private static void assertDeserializeType(String wkt, GeometryType expectedType)
{
assertThat(deserializeType(geometryFromText(wkt))).isEqualTo(expectedType);
}
-
- private static void assertGeometryEquals(OGCGeometry actual, OGCGeometry expected)
- {
- actual.setSpatialReference(null);
- expected.setSpatialReference(null);
- ensureEnvelopeLoaded(actual);
- ensureEnvelopeLoaded(expected);
- assertThat(actual).isEqualTo(expected);
- }
-
- /**
- * There is a weird bug in geometry comparison. If a geometry envelope is not loaded it may return
- * false for two empty line strings or multiline strings
- */
- private static void ensureEnvelopeLoaded(OGCGeometry geometry)
- {
- geometry.envelope();
- }
}
diff --git a/lib/trino-hive-formats/pom.xml b/lib/trino-hive-formats/pom.xml
index 903accfee371..9841858b82c1 100644
--- a/lib/trino-hive-formats/pom.xml
+++ b/lib/trino-hive-formats/pom.xml
@@ -18,11 +18,6 @@
-
- com.esri.geometry
- esri-geometry-api
-
-
com.fasterxml.jackson.core
jackson-core
@@ -90,6 +85,11 @@
trino-filesystem
+
+ io.trino
+ trino-geospatial-toolkit
+
+
io.trino
trino-plugin-toolkit
@@ -120,6 +120,11 @@
modernizer-maven-annotations
+
+ org.locationtech.jts
+ jts-core
+
+
org.jetbrains
annotations
diff --git a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/esri/EsriDeserializer.java b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/esri/EsriDeserializer.java
index a97b92ac7571..b0a08d6830bb 100644
--- a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/esri/EsriDeserializer.java
+++ b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/esri/EsriDeserializer.java
@@ -13,15 +13,12 @@
*/
package io.trino.hive.formats.esri;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryEngine;
-import com.esri.core.geometry.MapGeometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slices;
+import io.trino.geospatial.serde.JtsGeometrySerde;
import io.trino.hive.formats.line.Column;
import io.trino.plugin.base.type.DecodedTimestamp;
import io.trino.spi.PageBuilder;
@@ -34,12 +31,10 @@
import io.trino.spi.type.Type;
import io.trino.spi.type.VarcharType;
import org.joda.time.DateTimeZone;
+import org.locationtech.jts.geom.Geometry;
import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.lang.invoke.VarHandle;
import java.math.BigDecimal;
-import java.nio.ByteOrder;
import java.sql.Date;
import java.sql.Timestamp;
import java.time.Instant;
@@ -85,7 +80,6 @@
public final class EsriDeserializer
{
- private static final VarHandle INT_HANDLE_BIG_ENDIAN = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.BIG_ENDIAN);
private static final String GEOMETRY_FIELD_NAME = "geometry";
private static final String ATTRIBUTES_FIELD_NAME = "attributes";
private static final DateTimeFormatter DATE_FORMATTER =
@@ -194,37 +188,13 @@ private void parseGeometry(JsonParser parser, PageBuilder pageBuilder)
return;
}
- MapGeometry mapGeometry = GeometryEngine.jsonToGeometry(parser);
- OGCGeometry ogcGeometry = OGCGeometry.createFromEsriGeometry(mapGeometry.getGeometry(), mapGeometry.getSpatialReference());
- Geometry geometry = ogcGeometry.getEsriGeometry();
+ Geometry geometry = EsriJsonParser.parseGeometry(parser);
if (geometry == null) {
throw new IllegalArgumentException("Could not parse geometry");
}
- byte[] shape = GeometryEngine.geometryToEsriShape(geometry);
- if (shape == null) {
- throw new IllegalArgumentException("Could not serialize geometry shape");
- }
-
- byte[] shapeHeader = new byte[4 + 1 + shape.length];
- // write the Spatial Reference System Identifier (a.k.a, the well-known ID)
- INT_HANDLE_BIG_ENDIAN.set(shapeHeader, 0, ogcGeometry.SRID());
- // write the geometry type
- OGCType ogcType = switch (ogcGeometry.geometryType()) {
- case "Point" -> OGCType.ST_POINT;
- case "LineString" -> OGCType.ST_LINESTRING;
- case "Polygon" -> OGCType.ST_POLYGON;
- case "MultiPoint" -> OGCType.ST_MULTIPOINT;
- case "MultiLineString" -> OGCType.ST_MULTILINESTRING;
- case "MultiPolygon" -> OGCType.ST_MULTIPOLYGON;
- case null, default -> OGCType.UNKNOWN;
- };
- shapeHeader[4] = ogcType.getIndex();
- // write the serialized shape
- System.arraycopy(shape, 0, shapeHeader, 5, shape.length);
-
- // write the shape to the page
- VARBINARY.writeSlice(getBlockBuilderForWrite(pageBuilder, geometryColumn), Slices.wrappedBuffer(shapeHeader));
+ // Serialize geometry to EWKB format
+ VARBINARY.writeSlice(getBlockBuilderForWrite(pageBuilder, geometryColumn), JtsGeometrySerde.serialize(geometry));
}
private void parseAttributes(JsonParser parser, PageBuilder pageBuilder)
diff --git a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/esri/EsriJsonParser.java b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/esri/EsriJsonParser.java
new file mode 100644
index 000000000000..228505d930a4
--- /dev/null
+++ b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/esri/EsriJsonParser.java
@@ -0,0 +1,325 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.hive.formats.esri;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.LinearRing;
+import org.locationtech.jts.geom.Polygon;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import static com.fasterxml.jackson.core.JsonToken.END_ARRAY;
+import static com.fasterxml.jackson.core.JsonToken.END_OBJECT;
+import static com.fasterxml.jackson.core.JsonToken.FIELD_NAME;
+import static com.fasterxml.jackson.core.JsonToken.START_ARRAY;
+import static com.fasterxml.jackson.core.JsonToken.VALUE_NUMBER_FLOAT;
+import static com.fasterxml.jackson.core.JsonToken.VALUE_NUMBER_INT;
+
+/**
+ * Parser for ESRI JSON geometry format.
+ *
+ * ESRI JSON format examples:
+ *
+ * - Point: {@code {"x": 10, "y": 20}}
+ * - MultiPoint: {@code {"points": [[x1,y1], [x2,y2], ...]}}
+ * - Polyline: {@code {"paths": [[[x1,y1], [x2,y2], ...], ...]}}
+ * - Polygon: {@code {"rings": [[[x1,y1], [x2,y2], ...], ...]}}
+ *
+ *
+ * @see ESRI Geometry Objects
+ */
+public final class EsriJsonParser
+{
+ private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
+
+ private EsriJsonParser() {}
+
+ /**
+ * Parses an ESRI JSON geometry object from the parser.
+ * Parser must be positioned at the START_OBJECT token of the geometry.
+ * After parsing, the parser will be positioned at the END_OBJECT token.
+ */
+ public static Geometry parseGeometry(JsonParser parser)
+ throws IOException
+ {
+ if (parser.currentToken() != JsonToken.START_OBJECT) {
+ throw new IOException("Expected START_OBJECT, got " + parser.currentToken());
+ }
+
+ Double x = null;
+ Double y = null;
+ List paths = null;
+ List rings = null;
+ List points = null;
+ int srid = 0;
+
+ while (parser.nextToken() != END_OBJECT) {
+ if (parser.currentToken() != FIELD_NAME) {
+ throw new IOException("Expected field name, got " + parser.currentToken());
+ }
+
+ String fieldName = parser.currentName();
+ parser.nextToken();
+
+ switch (fieldName) {
+ case "x" -> x = parseDouble(parser);
+ case "y" -> y = parseDouble(parser);
+ case "paths" -> paths = parseCoordinateArrays(parser);
+ case "rings" -> rings = parseCoordinateArrays(parser);
+ case "points" -> points = parseCoordinateArray(parser);
+ case "spatialReference" -> srid = parseSpatialReference(parser);
+ default -> skipValue(parser);
+ }
+ }
+
+ Geometry geometry;
+ // Determine geometry type from the fields present
+ if (x != null && y != null && !x.isNaN() && !y.isNaN()) {
+ // Point
+ geometry = GEOMETRY_FACTORY.createPoint(new Coordinate(x, y));
+ }
+ else if (x != null || y != null) {
+ // Empty point
+ geometry = GEOMETRY_FACTORY.createPoint();
+ }
+ else if (points != null) {
+ // MultiPoint
+ if (points.isEmpty()) {
+ geometry = GEOMETRY_FACTORY.createMultiPoint();
+ }
+ else {
+ geometry = GEOMETRY_FACTORY.createMultiPointFromCoords(points.toArray(new Coordinate[0]));
+ }
+ }
+ else if (paths != null) {
+ // Polyline (LineString or MultiLineString)
+ if (paths.isEmpty()) {
+ geometry = GEOMETRY_FACTORY.createMultiLineString();
+ }
+ else if (paths.size() == 1) {
+ geometry = GEOMETRY_FACTORY.createLineString(paths.getFirst());
+ }
+ else {
+ geometry = GEOMETRY_FACTORY.createMultiLineString(
+ paths.stream()
+ .map(GEOMETRY_FACTORY::createLineString)
+ .toArray(LineString[]::new));
+ }
+ }
+ else if (rings != null) {
+ // Polygon (may contain holes)
+ if (rings.isEmpty()) {
+ geometry = GEOMETRY_FACTORY.createPolygon();
+ }
+ else {
+ geometry = createPolygonFromRings(rings);
+ }
+ }
+ else {
+ throw new IOException("Unknown geometry type: no recognized fields found");
+ }
+
+ geometry.setSRID(srid);
+ return geometry;
+ }
+
+ private static Double parseDouble(JsonParser parser)
+ throws IOException
+ {
+ JsonToken token = parser.currentToken();
+ if (token == JsonToken.VALUE_NULL) {
+ return null;
+ }
+ if (token == VALUE_NUMBER_FLOAT ||
+ token == VALUE_NUMBER_INT ||
+ token == JsonToken.VALUE_STRING ||
+ token == JsonToken.VALUE_TRUE ||
+ token == JsonToken.VALUE_FALSE) {
+ return parser.getValueAsDouble();
+ }
+ throw new IOException("Expected number, got " + token);
+ }
+
+ private static int parseSpatialReference(JsonParser parser)
+ throws IOException
+ {
+ if (parser.currentToken() == JsonToken.VALUE_NULL) {
+ return 0;
+ }
+ if (parser.currentToken() != JsonToken.START_OBJECT) {
+ throw new IOException("Expected START_OBJECT or VALUE_NULL for spatialReference, got " + parser.currentToken());
+ }
+
+ Integer wkid = null;
+ Integer latestWkid = null;
+ while (parser.nextToken() != END_OBJECT) {
+ if (parser.currentToken() != FIELD_NAME) {
+ throw new IOException("Expected field name, got " + parser.currentToken());
+ }
+
+ String fieldName = parser.currentName();
+ parser.nextToken();
+
+ switch (fieldName) {
+ case "wkid" -> wkid = parseInteger(parser);
+ case "latestWkid" -> latestWkid = parseInteger(parser);
+ default -> skipValue(parser);
+ }
+ }
+
+ if (latestWkid != null && latestWkid > 0) {
+ return latestWkid;
+ }
+ if (wkid != null && wkid > 0) {
+ return wkid;
+ }
+ return 0;
+ }
+
+ private static Integer parseInteger(JsonParser parser)
+ throws IOException
+ {
+ if (parser.currentToken() == VALUE_NUMBER_INT) {
+ return parser.getIntValue();
+ }
+ skipValue(parser);
+ return null;
+ }
+
+ private static List parseCoordinateArrays(JsonParser parser)
+ throws IOException
+ {
+ if (parser.currentToken() != START_ARRAY) {
+ throw new IOException("Expected START_ARRAY for paths/rings, got " + parser.currentToken());
+ }
+
+ List result = new ArrayList<>();
+ while (parser.nextToken() != END_ARRAY) {
+ if (parser.currentToken() == START_ARRAY) {
+ result.add(parseCoordinateArray(parser).toArray(new Coordinate[0]));
+ }
+ else {
+ throw new IOException("Expected START_ARRAY for path/ring, got " + parser.currentToken());
+ }
+ }
+ return result;
+ }
+
+ private static List parseCoordinateArray(JsonParser parser)
+ throws IOException
+ {
+ if (parser.currentToken() != START_ARRAY) {
+ throw new IOException("Expected START_ARRAY for coordinates, got " + parser.currentToken());
+ }
+
+ List result = new ArrayList<>();
+ while (parser.nextToken() != END_ARRAY) {
+ if (parser.currentToken() == START_ARRAY) {
+ result.add(parseCoordinate(parser));
+ }
+ else {
+ throw new IOException("Expected START_ARRAY for coordinate, got " + parser.currentToken());
+ }
+ }
+ return result;
+ }
+
+ private static Coordinate parseCoordinate(JsonParser parser)
+ throws IOException
+ {
+ // Coordinate is an array: [x, y] or [x, y, z] or [x, y, z, m]
+ if (parser.currentToken() != START_ARRAY) {
+ throw new IOException("Expected START_ARRAY for coordinate, got " + parser.currentToken());
+ }
+
+ // Read x
+ parser.nextToken();
+ double x = parser.getDoubleValue();
+
+ // Read y
+ parser.nextToken();
+ double y = parser.getDoubleValue();
+
+ // Skip any remaining values (z, m) and consume END_ARRAY
+ while (parser.nextToken() != END_ARRAY) {
+ // Skip z and m values
+ }
+
+ return new Coordinate(x, y);
+ }
+
+ private static Geometry createPolygonFromRings(List rings)
+ {
+ if (rings.size() == 1) {
+ LinearRing shell = GEOMETRY_FACTORY.createLinearRing(rings.getFirst());
+ return GEOMETRY_FACTORY.createPolygon(shell);
+ }
+
+ // Treat the first ring's orientation as the shell orientation for this geometry.
+ // A ring with the same orientation starts a new polygon, while the opposite
+ // orientation becomes a hole in the current polygon. This preserves Hive
+ // behavior for mixed and non-standard ring orderings.
+ boolean shellOrientation = isClockwise(rings.getFirst());
+ List polygons = new ArrayList<>();
+ LinearRing currentShell = GEOMETRY_FACTORY.createLinearRing(rings.getFirst());
+ List currentHoles = new ArrayList<>();
+
+ for (Coordinate[] ring : rings.subList(1, rings.size())) {
+ if (isClockwise(ring) == shellOrientation) {
+ polygons.add(GEOMETRY_FACTORY.createPolygon(currentShell, currentHoles.toArray(new LinearRing[0])));
+ currentHoles.clear();
+ currentShell = GEOMETRY_FACTORY.createLinearRing(ring);
+ }
+ else {
+ currentHoles.add(GEOMETRY_FACTORY.createLinearRing(ring));
+ }
+ }
+
+ polygons.add(GEOMETRY_FACTORY.createPolygon(currentShell, currentHoles.toArray(new LinearRing[0])));
+
+ if (polygons.size() == 1) {
+ return polygons.getFirst();
+ }
+ return GEOMETRY_FACTORY.createMultiPolygon(polygons.toArray(new Polygon[0]));
+ }
+
+ /**
+ * Determines if a ring is clockwise using the shoelace formula.
+ * Positive area = counter-clockwise, negative area = clockwise.
+ */
+ private static boolean isClockwise(Coordinate[] ring)
+ {
+ double sum = 0;
+ for (int i = 0; i < ring.length - 1; i++) {
+ sum += (ring[i + 1].x - ring[i].x) * (ring[i + 1].y + ring[i].y);
+ }
+ return sum < 0;
+ }
+
+ private static void skipValue(JsonParser parser)
+ throws IOException
+ {
+ JsonToken token = parser.currentToken();
+ if (token == START_ARRAY || token == JsonToken.START_OBJECT) {
+ parser.skipChildren();
+ }
+ }
+}
diff --git a/lib/trino-hive-formats/src/test/java/io/trino/hive/formats/esri/TestEsriDeserializer.java b/lib/trino-hive-formats/src/test/java/io/trino/hive/formats/esri/TestEsriDeserializer.java
index 7f156341b40a..0ab496cb23df 100644
--- a/lib/trino-hive-formats/src/test/java/io/trino/hive/formats/esri/TestEsriDeserializer.java
+++ b/lib/trino-hive-formats/src/test/java/io/trino/hive/formats/esri/TestEsriDeserializer.java
@@ -13,9 +13,6 @@
*/
package io.trino.hive.formats.esri;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryEngine;
-import com.esri.core.geometry.Point;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
@@ -27,6 +24,10 @@
import io.trino.spi.type.CharType;
import io.trino.spi.type.DecimalType;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKBReader;
+import org.locationtech.jts.io.WKTReader;
import java.io.IOException;
import java.util.List;
@@ -104,7 +105,7 @@ public void testDeserializeSimpleFeature()
assertThat(DOUBLE.getDouble(page.getBlock(3), 0)).isEqualTo(123.45);
assertThat(DATE.getLong(page.getBlock(4), 0)).isEqualTo(20150);
assertThat(TIMESTAMP_MILLIS.getLong(page.getBlock(5), 0)).isEqualTo(1741034025839000L);
- assertGeometry(page, new Point(10, 20));
+ assertGeometry(page, "POINT (10 20)");
assertThat(INTEGER.getLong(page.getBlock(7), 0)).isEqualTo(42);
DecimalType decimalType = DecimalType.createDecimalType(10, 2);
@@ -502,7 +503,79 @@ public void testMissingAttributes()
""";
Page page = parse(json);
- assertGeometry(page, new Point(10, 20));
+ assertGeometry(page, "POINT (10 20)");
+ }
+
+ @Test
+ public void testDeserializePointWithCoercedStringCoordinates()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "x": "not-a-number",
+ "y": 2
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "POINT (0 2)");
+ }
+
+ @Test
+ public void testDeserializePointWithBooleanCoordinates()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "x": true,
+ "y": false
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "POINT (1 0)");
+ }
+
+ @Test
+ public void testDeserializePointWithNaNCoordinate()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "x": "NaN",
+ "y": 2
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "POINT EMPTY");
+ }
+
+ @Test
+ public void testDeserializePointWithArrayCoordinateFails()
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "x": [],
+ "y": 2
+ }
+ }
+ """;
+
+ assertThatThrownBy(() -> parse(json))
+ .isInstanceOf(IOException.class)
+ .hasMessage("Expected number, got START_ARRAY");
}
@Test
@@ -524,7 +597,111 @@ public void testDuplicateGeometry()
""";
Page page = parse(json);
- assertGeometry(page, new Point(10, 20));
+ assertGeometry(page, "POINT (10 20)");
+ }
+
+ @Test
+ public void testGeometrySpatialReferenceWkid()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "x": 10,
+ "y": 20,
+ "spatialReference": {
+ "wkid": 4326
+ }
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "POINT (10 20)", 4326);
+ }
+
+ @Test
+ public void testGeometrySpatialReferencePrefersLatestWkid()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "x": 10,
+ "y": 20,
+ "spatialReference": {
+ "wkid": 102100,
+ "latestWkid": 3857
+ }
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "POINT (10 20)", 3857);
+ }
+
+ @Test
+ public void testDeserializePolygonWithCounterClockwiseRings()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "rings": [
+ [[0, 0], [10, 0], [10, 10], [0, 10], [0, 0]],
+ [[20, 0], [30, 0], [30, 10], [20, 10], [20, 0]]
+ ]
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "MULTIPOLYGON (((0 0, 10 0, 10 10, 0 10, 0 0)), ((20 0, 30 0, 30 10, 20 10, 20 0)))");
+ }
+
+ @Test
+ public void testDeserializePolygonWithLeadingHoleOrientationRing()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "rings": [
+ [[2, 2], [8, 2], [8, 8], [2, 8], [2, 2]],
+ [[0, 0], [0, 10], [10, 10], [10, 0], [0, 0]]
+ ]
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "POLYGON ((2 2, 8 2, 8 8, 2 8, 2 2), (0 0, 0 10, 10 10, 10 0, 0 0))");
+ }
+
+ @Test
+ public void testDeserializePolygonWithMixedRingGroups()
+ throws IOException
+ {
+ String json =
+ """
+ {
+ "geometry": {
+ "rings": [
+ [[2, 2], [8, 2], [8, 8], [2, 8], [2, 2]],
+ [[20, 20], [30, 20], [30, 30], [20, 30], [20, 20]],
+ [[0, 0], [0, 10], [10, 10], [10, 0], [0, 0]]
+ ]
+ }
+ }
+ """;
+
+ Page page = parse(json);
+ assertGeometry(page, "MULTIPOLYGON (((2 2, 8 2, 8 8, 2 8, 2 2)), ((20 20, 30 20, 30 30, 20 30, 20 20), (0 0, 0 10, 10 10, 10 0, 0 0)))");
}
@Test
@@ -572,7 +749,7 @@ public void testNullAttributes()
""";
Page page = parse(json);
- assertGeometry(page, new Point(5, 7));
+ assertGeometry(page, "POINT (5 7)");
}
@Test
@@ -643,31 +820,29 @@ private static Page parse(String json, List columns)
return page;
}
- private static void assertGeometry(Page page, Geometry expected)
+ private static void assertGeometry(Page page, String expectedWkt)
{
- if (expected == null) {
+ assertGeometry(page, expectedWkt, 0);
+ }
+
+ private static void assertGeometry(Page page, String expectedWkt, int expectedSrid)
+ {
+ if (expectedWkt == null) {
assertThat(page.getBlock(6).isNull(0)).isTrue();
return;
}
assertThat(page.getBlock(6).isNull(0)).isFalse();
- byte[] actual = VARBINARY.getSlice(page.getBlock(6), 0).getBytes();
-
- byte[] expectedShape = GeometryEngine.geometryToEsriShape(expected);
- byte[] expectedBytes = new byte[4 + 1 + expectedShape.length];
-
- OGCType ogcType = switch (expected.getType()) {
- case Point -> OGCType.ST_POINT;
- case Line -> OGCType.ST_LINESTRING;
- case Polygon -> OGCType.ST_POLYGON;
- case MultiPoint -> OGCType.ST_MULTIPOINT;
- case Polyline -> OGCType.ST_MULTILINESTRING;
- default -> OGCType.UNKNOWN;
- };
- expectedBytes[4] = ogcType.getIndex();
- System.arraycopy(expectedShape, 0, expectedBytes, 5, expectedShape.length);
-
- assertThat(actual).isEqualTo(expectedBytes);
+ try {
+ byte[] actualWkb = VARBINARY.getSlice(page.getBlock(6), 0).getBytes();
+ Geometry actualGeometry = new WKBReader().read(actualWkb);
+ Geometry expectedGeometry = new WKTReader().read(expectedWkt);
+ assertThat(actualGeometry.equalsExact(expectedGeometry)).isTrue();
+ assertThat(actualGeometry.getSRID()).isEqualTo(expectedSrid);
+ }
+ catch (ParseException e) {
+ throw new RuntimeException(e);
+ }
}
}
diff --git a/plugin/trino-geospatial/pom.xml b/plugin/trino-geospatial/pom.xml
index df6734e45c91..b694cb272bcd 100644
--- a/plugin/trino-geospatial/pom.xml
+++ b/plugin/trino-geospatial/pom.xml
@@ -18,11 +18,6 @@
-
- com.esri.geometry
- esri-geometry-api
-
-
com.google.errorprone
error_prone_annotations
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/AbstractGeometryType.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/AbstractGeometryType.java
new file mode 100644
index 000000000000..890a4471a22a
--- /dev/null
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/AbstractGeometryType.java
@@ -0,0 +1,459 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.geospatial;
+
+import io.airlift.slice.Slice;
+import io.airlift.slice.XxHash64;
+import io.trino.geospatial.serde.JtsGeometrySerde;
+import io.trino.spi.block.Block;
+import io.trino.spi.block.BlockBuilder;
+import io.trino.spi.block.VariableWidthBlock;
+import io.trino.spi.block.VariableWidthBlockBuilder;
+import io.trino.spi.function.BlockIndex;
+import io.trino.spi.function.BlockPosition;
+import io.trino.spi.function.FlatFixed;
+import io.trino.spi.function.FlatFixedOffset;
+import io.trino.spi.function.FlatVariableOffset;
+import io.trino.spi.function.FlatVariableWidth;
+import io.trino.spi.function.ScalarOperator;
+import io.trino.spi.type.AbstractVariableWidthType;
+import io.trino.spi.type.TypeOperatorDeclaration;
+import io.trino.spi.type.TypeOperators;
+import io.trino.spi.type.TypeSignature;
+import org.locationtech.jts.geom.Geometry;
+
+import java.lang.invoke.MethodHandles;
+import java.lang.invoke.VarHandle;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+
+import static io.airlift.slice.Slices.wrappedBuffer;
+import static io.trino.spi.function.OperatorType.EQUAL;
+import static io.trino.spi.function.OperatorType.READ_VALUE;
+import static io.trino.spi.function.OperatorType.XX_HASH_64;
+import static io.trino.spi.type.TypeOperatorDeclaration.extractOperatorDeclaration;
+import static java.lang.invoke.MethodHandles.lookup;
+
+/**
+ * Base class for geometry types (GeometryType and SphericalGeographyType).
+ * Uses JTS Geometry as the stack type while storing EWKB bytes in blocks.
+ */
+public abstract class AbstractGeometryType
+ extends AbstractVariableWidthType
+{
+ // Short strings are encoded with a negative length, so we have to encode the length in big-endian format
+ private static final VarHandle INT_BE_HANDLE = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.BIG_ENDIAN);
+ private static final int MAX_SHORT_FLAT_LENGTH = 3;
+
+ private static final TypeOperatorDeclaration TYPE_OPERATOR_DECLARATION = extractOperatorDeclaration(GeometryTypeOperators.class, lookup(), Geometry.class);
+
+ protected AbstractGeometryType(TypeSignature signature)
+ {
+ super(signature, Geometry.class);
+ }
+
+ @Override
+ public TypeOperatorDeclaration getTypeOperatorDeclaration(TypeOperators typeOperators)
+ {
+ return TYPE_OPERATOR_DECLARATION;
+ }
+
+ @Override
+ public boolean isComparable()
+ {
+ return true;
+ }
+
+ // Escape hatch for direct EWKB access (used by connectors, optimized functions)
+ @Override
+ public Slice getSlice(Block block, int position)
+ {
+ VariableWidthBlock valueBlock = (VariableWidthBlock) block.getUnderlyingValueBlock();
+ int valuePosition = block.getUnderlyingValuePosition(position);
+ return valueBlock.getSlice(valuePosition);
+ }
+
+ @Override
+ public void writeSlice(BlockBuilder blockBuilder, Slice value)
+ {
+ ((VariableWidthBlockBuilder) blockBuilder).writeEntry(value);
+ }
+
+ @Override
+ public void writeSlice(BlockBuilder blockBuilder, Slice value, int offset, int length)
+ {
+ ((VariableWidthBlockBuilder) blockBuilder).writeEntry(value, offset, length);
+ }
+
+ @Override
+ public Object getObject(Block block, int position)
+ {
+ return JtsGeometrySerde.deserialize(getSlice(block, position));
+ }
+
+ @Override
+ public void writeObject(BlockBuilder blockBuilder, Object value)
+ {
+ writeSlice(blockBuilder, JtsGeometrySerde.serialize((Geometry) value));
+ }
+
+ @Override
+ public Object getObjectValue(Block block, int position)
+ {
+ if (block.isNull(position)) {
+ return null;
+ }
+ try {
+ return JtsGeometrySerde.deserialize(getSlice(block, position)).toText();
+ }
+ catch (RuntimeException e) {
+ return "";
+ }
+ }
+
+ // Helper methods for flat memory operations
+ private static int readVariableWidthLength(byte[] fixedSizeSlice, int fixedSizeOffset)
+ {
+ int length = (int) INT_BE_HANDLE.get(fixedSizeSlice, fixedSizeOffset);
+ if (length < 0) {
+ int shortLength = fixedSizeSlice[fixedSizeOffset] & 0x7F;
+ if (shortLength > MAX_SHORT_FLAT_LENGTH) {
+ throw new IllegalArgumentException("Invalid short variable width length: " + shortLength);
+ }
+ return shortLength;
+ }
+ return length;
+ }
+
+ private static void writeFlatVariableLength(int length, byte[] fixedSizeSlice, int fixedSizeOffset)
+ {
+ if (length < 0) {
+ throw new IllegalArgumentException("Invalid variable width length: " + length);
+ }
+ if (length <= MAX_SHORT_FLAT_LENGTH) {
+ fixedSizeSlice[fixedSizeOffset] = (byte) (length | 0x80);
+ }
+ else {
+ INT_BE_HANDLE.set(fixedSizeSlice, fixedSizeOffset, length);
+ }
+ }
+
+ private static Slice readFlatToSlice(
+ byte[] fixedSizeSlice,
+ int fixedSizeOffset,
+ byte[] variableSizeSlice,
+ int variableSizeOffset)
+ {
+ int length = readVariableWidthLength(fixedSizeSlice, fixedSizeOffset);
+ byte[] bytes;
+ int offset;
+ if (length <= MAX_SHORT_FLAT_LENGTH) {
+ bytes = fixedSizeSlice;
+ offset = fixedSizeOffset + 1;
+ }
+ else {
+ bytes = variableSizeSlice;
+ offset = variableSizeOffset;
+ }
+ return wrappedBuffer(bytes, offset, length);
+ }
+
+ private static void writeFlatFromSlice(
+ Slice value,
+ byte[] fixedSizeSlice,
+ int fixedSizeOffset,
+ byte[] variableSizeSlice,
+ int variableSizeOffset)
+ {
+ int length = value.length();
+ writeFlatVariableLength(length, fixedSizeSlice, fixedSizeOffset);
+ byte[] bytes;
+ int offset;
+ if (length <= MAX_SHORT_FLAT_LENGTH) {
+ bytes = fixedSizeSlice;
+ offset = fixedSizeOffset + 1;
+ }
+ else {
+ bytes = variableSizeSlice;
+ offset = variableSizeOffset;
+ }
+ value.getBytes(0, bytes, offset, length);
+ }
+
+ /**
+ * Operators for geometry types.
+ * Strict binary equality is enforced to ensure consistency between Stack (Geometry)
+ * and Block (Slice) representations. Topological equality must be checked via ST_Equals.
+ */
+ // This is a copy of AbstractVariableWidthType operators adapted for Geometry stack type. The
+ // original implementation is inaccessible due to visibility restrictions.
+ private static class GeometryTypeOperators
+ {
+ @ScalarOperator(READ_VALUE)
+ private static Geometry readFlatToStack(
+ @FlatFixed byte[] fixedSizeSlice,
+ @FlatFixedOffset int fixedSizeOffset,
+ @FlatVariableWidth byte[] variableSizeSlice,
+ @FlatVariableOffset int variableSizeOffset)
+ {
+ Slice slice = readFlatToSlice(fixedSizeSlice, fixedSizeOffset, variableSizeSlice, variableSizeOffset);
+ return JtsGeometrySerde.deserialize(slice);
+ }
+
+ @ScalarOperator(READ_VALUE)
+ private static void readFlatToBlock(
+ @FlatFixed byte[] fixedSizeSlice,
+ @FlatFixedOffset int fixedSizeOffset,
+ @FlatVariableWidth byte[] variableSizeSlice,
+ @FlatVariableOffset int variableSizeOffset,
+ BlockBuilder blockBuilder)
+ {
+ int length = readVariableWidthLength(fixedSizeSlice, fixedSizeOffset);
+ byte[] bytes;
+ int offset;
+ if (length <= MAX_SHORT_FLAT_LENGTH) {
+ bytes = fixedSizeSlice;
+ offset = fixedSizeOffset + 1;
+ }
+ else {
+ bytes = variableSizeSlice;
+ offset = variableSizeOffset;
+ }
+ ((VariableWidthBlockBuilder) blockBuilder).writeEntry(bytes, offset, length);
+ }
+
+ @ScalarOperator(READ_VALUE)
+ private static void writeFlatFromStack(
+ Geometry value,
+ @FlatFixed byte[] fixedSizeSlice,
+ @FlatFixedOffset int fixedSizeOffset,
+ @FlatVariableWidth byte[] variableSizeSlice,
+ @FlatVariableOffset int variableSizeOffset)
+ {
+ Slice slice = JtsGeometrySerde.serialize(value);
+ writeFlatFromSlice(slice, fixedSizeSlice, fixedSizeOffset, variableSizeSlice, variableSizeOffset);
+ }
+
+ @ScalarOperator(READ_VALUE)
+ private static void writeFlatFromBlock(
+ @BlockPosition VariableWidthBlock block,
+ @BlockIndex int position,
+ @FlatFixed byte[] fixedSizeSlice,
+ @FlatFixedOffset int fixedSizeOffset,
+ @FlatVariableWidth byte[] variableSizeSlice,
+ @FlatVariableOffset int variableSizeOffset)
+ {
+ Slice rawSlice = block.getRawSlice();
+ int rawSliceOffset = block.getRawSliceOffset(position);
+ int length = block.getSliceLength(position);
+
+ writeFlatVariableLength(length, fixedSizeSlice, fixedSizeOffset);
+ byte[] bytes;
+ int offset;
+ if (length <= MAX_SHORT_FLAT_LENGTH) {
+ bytes = fixedSizeSlice;
+ offset = fixedSizeOffset + 1;
+ }
+ else {
+ bytes = variableSizeSlice;
+ offset = variableSizeOffset;
+ }
+ rawSlice.getBytes(rawSliceOffset, bytes, offset, length);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(Geometry left, Geometry right)
+ {
+ Slice leftSlice = JtsGeometrySerde.serialize(left);
+ Slice rightSlice = JtsGeometrySerde.serialize(right);
+ return leftSlice.equals(rightSlice);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ @BlockPosition VariableWidthBlock leftBlock,
+ @BlockIndex int leftPosition,
+ @BlockPosition VariableWidthBlock rightBlock,
+ @BlockIndex int rightPosition)
+ {
+ Slice leftRawSlice = leftBlock.getRawSlice();
+ int leftRawSliceOffset = leftBlock.getRawSliceOffset(leftPosition);
+ int leftLength = leftBlock.getSliceLength(leftPosition);
+
+ Slice rightRawSlice = rightBlock.getRawSlice();
+ int rightRawSliceOffset = rightBlock.getRawSliceOffset(rightPosition);
+ int rightLength = rightBlock.getSliceLength(rightPosition);
+
+ return leftRawSlice.equals(leftRawSliceOffset, leftLength, rightRawSlice, rightRawSliceOffset, rightLength);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ Geometry left,
+ @BlockPosition VariableWidthBlock rightBlock,
+ @BlockIndex int rightPosition)
+ {
+ Slice leftSlice = JtsGeometrySerde.serialize(left);
+ Slice rightRawSlice = rightBlock.getRawSlice();
+ int rightOffset = rightBlock.getRawSliceOffset(rightPosition);
+ int rightLength = rightBlock.getSliceLength(rightPosition);
+ return leftSlice.equals(0, leftSlice.length(), rightRawSlice, rightOffset, rightLength);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ @BlockPosition VariableWidthBlock leftBlock,
+ @BlockIndex int leftPosition,
+ Geometry right)
+ {
+ return equalOperator(right, leftBlock, leftPosition);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ @FlatFixed byte[] leftFixedSizeSlice,
+ @FlatFixedOffset int leftFixedSizeOffset,
+ @FlatVariableWidth byte[] leftVariableSizeSlice,
+ @FlatVariableOffset int leftVariableSizeOffset,
+ @FlatFixed byte[] rightFixedSizeSlice,
+ @FlatFixedOffset int rightFixedSizeOffset,
+ @FlatVariableWidth byte[] rightVariableSizeSlice,
+ @FlatVariableOffset int rightVariableSizeOffset)
+ {
+ int leftLength = readVariableWidthLength(leftFixedSizeSlice, leftFixedSizeOffset);
+ int rightLength = readVariableWidthLength(rightFixedSizeSlice, rightFixedSizeOffset);
+ if (leftLength != rightLength) {
+ return false;
+ }
+ if (leftLength <= MAX_SHORT_FLAT_LENGTH) {
+ return ((int) INT_BE_HANDLE.get(leftFixedSizeSlice, leftFixedSizeOffset)) ==
+ ((int) INT_BE_HANDLE.get(rightFixedSizeSlice, rightFixedSizeOffset));
+ }
+ return Arrays.equals(
+ leftVariableSizeSlice,
+ leftVariableSizeOffset,
+ leftVariableSizeOffset + leftLength,
+ rightVariableSizeSlice,
+ rightVariableSizeOffset,
+ rightVariableSizeOffset + rightLength);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ @BlockPosition VariableWidthBlock leftBlock,
+ @BlockIndex int leftPosition,
+ @FlatFixed byte[] rightFixedSizeSlice,
+ @FlatFixedOffset int rightFixedSizeOffset,
+ @FlatVariableWidth byte[] rightVariableSizeSlice,
+ @FlatVariableOffset int rightVariableSizeOffset)
+ {
+ return equalOperator(
+ rightFixedSizeSlice,
+ rightFixedSizeOffset,
+ rightVariableSizeSlice,
+ rightVariableSizeOffset,
+ leftBlock,
+ leftPosition);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ @FlatFixed byte[] leftFixedSizeSlice,
+ @FlatFixedOffset int leftFixedSizeOffset,
+ @FlatVariableWidth byte[] leftVariableSizeSlice,
+ @FlatVariableOffset int leftVariableSizeOffset,
+ @BlockPosition VariableWidthBlock rightBlock,
+ @BlockIndex int rightPosition)
+ {
+ int leftLength = readVariableWidthLength(leftFixedSizeSlice, leftFixedSizeOffset);
+
+ Slice rightRawSlice = rightBlock.getRawSlice();
+ int rightRawSliceOffset = rightBlock.getRawSliceOffset(rightPosition);
+ int rightLength = rightBlock.getSliceLength(rightPosition);
+
+ if (leftLength != rightLength) {
+ return false;
+ }
+
+ byte[] leftBytes;
+ int leftOffset;
+ if (leftLength <= MAX_SHORT_FLAT_LENGTH) {
+ leftBytes = leftFixedSizeSlice;
+ leftOffset = leftFixedSizeOffset + 1;
+ }
+ else {
+ leftBytes = leftVariableSizeSlice;
+ leftOffset = leftVariableSizeOffset;
+ }
+ return rightRawSlice.equals(rightRawSliceOffset, rightLength, wrappedBuffer(leftBytes, leftOffset, leftLength), 0, leftLength);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ Geometry left,
+ @FlatFixed byte[] rightFixedSizeSlice,
+ @FlatFixedOffset int rightFixedSizeOffset,
+ @FlatVariableWidth byte[] rightVariableSizeSlice,
+ @FlatVariableOffset int rightVariableSizeOffset)
+ {
+ Slice leftSlice = JtsGeometrySerde.serialize(left);
+ Slice rightSlice = readFlatToSlice(rightFixedSizeSlice, rightFixedSizeOffset, rightVariableSizeSlice, rightVariableSizeOffset);
+ return leftSlice.equals(rightSlice);
+ }
+
+ @ScalarOperator(EQUAL)
+ private static boolean equalOperator(
+ @FlatFixed byte[] leftFixedSizeSlice,
+ @FlatFixedOffset int leftFixedSizeOffset,
+ @FlatVariableWidth byte[] leftVariableSizeSlice,
+ @FlatVariableOffset int leftVariableSizeOffset,
+ Geometry right)
+ {
+ return equalOperator(right, leftFixedSizeSlice, leftFixedSizeOffset, leftVariableSizeSlice, leftVariableSizeOffset);
+ }
+
+ @ScalarOperator(XX_HASH_64)
+ private static long xxHash64Operator(Geometry value)
+ {
+ return XxHash64.hash(JtsGeometrySerde.serialize(value));
+ }
+
+ @ScalarOperator(XX_HASH_64)
+ private static long xxHash64Operator(@BlockPosition VariableWidthBlock block, @BlockIndex int position)
+ {
+ return XxHash64.hash(block.getRawSlice(), block.getRawSliceOffset(position), block.getSliceLength(position));
+ }
+
+ @ScalarOperator(XX_HASH_64)
+ private static long xxHash64Operator(
+ @FlatFixed byte[] fixedSizeSlice,
+ @FlatFixedOffset int fixedSizeOffset,
+ @FlatVariableWidth byte[] variableSizeSlice,
+ @FlatVariableOffset int variableSizeOffset)
+ {
+ int length = readVariableWidthLength(fixedSizeSlice, fixedSizeOffset);
+ byte[] bytes;
+ int offset;
+ if (length <= MAX_SHORT_FLAT_LENGTH) {
+ bytes = fixedSizeSlice;
+ offset = fixedSizeOffset + 1;
+ }
+ else {
+ bytes = variableSizeSlice;
+ offset = variableSizeOffset;
+ }
+ return XxHash64.hash(wrappedBuffer(bytes, offset, length));
+ }
+ }
+}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/BingTileFunctions.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/BingTileFunctions.java
index dfe64bfaee94..22c0255dcfce 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/BingTileFunctions.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/BingTileFunctions.java
@@ -13,10 +13,6 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.ogc.OGCGeometry;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.FormatMethod;
import io.airlift.slice.Slice;
@@ -30,17 +26,17 @@
import io.trino.spi.function.SqlType;
import io.trino.spi.type.RowType;
import io.trino.spi.type.StandardTypes;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Verify.verify;
import static io.airlift.slice.Slices.utf8Slice;
import static io.trino.geospatial.GeometryUtils.contains;
import static io.trino.geospatial.GeometryUtils.disjoint;
-import static io.trino.geospatial.GeometryUtils.getEnvelope;
-import static io.trino.geospatial.GeometryUtils.getPointCount;
import static io.trino.geospatial.GeometryUtils.isPointOrRectangle;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-import static io.trino.geospatial.serde.GeometrySerde.serialize;
import static io.trino.plugin.geospatial.BingTile.MAX_ZOOM_LEVEL;
import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.trino.spi.type.BigintType.BIGINT;
@@ -65,6 +61,7 @@
*/
public final class BingTileFunctions
{
+ private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
private static final int TILE_PIXELS = 256;
private static final double MAX_LATITUDE = 85.05112878;
private static final double MIN_LATITUDE = -85.05112878;
@@ -267,7 +264,7 @@ public static Block bingTilesAround(
BIGINT.writeLong(blockBuilder, tile.encode());
}
else {
- Point bottomLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY() + 1, tile.getZoomLevel());
+ Coordinate bottomLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY() + 1, tile.getZoomLevel());
if (withinDistance(distanceToCenter, radiusInKm, bottomLeftCorner)) {
include = true;
BIGINT.writeLong(blockBuilder, tile.encode());
@@ -283,7 +280,7 @@ public static Block bingTilesAround(
BIGINT.writeLong(blockBuilder, tile.encode());
}
else {
- Point topLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY(), tile.getZoomLevel());
+ Coordinate topLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY(), tile.getZoomLevel());
if (withinDistance(distanceToCenter, radiusInKm, topLeftCorner)) {
include = true;
BIGINT.writeLong(blockBuilder, tile.encode());
@@ -301,7 +298,7 @@ public static Block bingTilesAround(
BIGINT.writeLong(blockBuilder, tile.encode());
}
else {
- Point bottomRightCorner = tileXYToLatitudeLongitude(tile.getX() + 1, tile.getY() + 1, tile.getZoomLevel());
+ Coordinate bottomRightCorner = tileXYToLatitudeLongitude(tile.getX() + 1, tile.getY() + 1, tile.getZoomLevel());
if (withinDistance(distanceToCenter, radiusInKm, bottomRightCorner)) {
include = true;
BIGINT.writeLong(blockBuilder, tile.encode());
@@ -317,7 +314,7 @@ public static Block bingTilesAround(
BIGINT.writeLong(blockBuilder, tile.encode());
}
else {
- Point topRightCorner = tileXYToLatitudeLongitude(tile.getX() + 1, tile.getY(), tile.getZoomLevel());
+ Coordinate topRightCorner = tileXYToLatitudeLongitude(tile.getX() + 1, tile.getY(), tile.getZoomLevel());
if (withinDistance(distanceToCenter, radiusInKm, topRightCorner)) {
include = true;
BIGINT.writeLong(blockBuilder, tile.encode());
@@ -332,43 +329,42 @@ public static Block bingTilesAround(
@Description("Given a Bing tile, returns the polygon representation of the tile")
@ScalarFunction("bing_tile_polygon")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice bingTilePolygon(@SqlType(StandardTypes.BING_TILE) long input)
+ public static Geometry bingTilePolygon(@SqlType(StandardTypes.BING_TILE) long input)
{
BingTile tile = BingTile.decode(input);
- return serialize(tileToEnvelope(tile));
+ return GEOMETRY_FACTORY.toGeometry(tileToEnvelope(tile));
}
@Description("Given a geometry and a zoom level, returns the minimum set of Bing tiles that fully covers that geometry")
@ScalarFunction("geometry_to_bing_tiles")
@SqlType("array(" + StandardTypes.BING_TILE + ")")
- public static Block geometryToBingTiles(@SqlType(StandardTypes.GEOMETRY) Slice input, @SqlType(StandardTypes.INTEGER) long zoomLevelInput)
+ public static Block geometryToBingTiles(@SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(StandardTypes.INTEGER) long zoomLevelInput)
{
checkZoomLevel(zoomLevelInput);
int zoomLevel = toIntExact(zoomLevelInput);
- OGCGeometry ogcGeometry = deserialize(input);
- if (ogcGeometry.isEmpty()) {
+ if (geometry.isEmpty()) {
return EMPTY_TILE_ARRAY;
}
- Envelope envelope = getEnvelope(ogcGeometry);
+ Envelope envelope = geometry.getEnvelopeInternal();
- checkLatitude(envelope.getYMin(), LATITUDE_SPAN_OUT_OF_RANGE);
- checkLatitude(envelope.getYMax(), LATITUDE_SPAN_OUT_OF_RANGE);
- checkLongitude(envelope.getXMin(), LONGITUDE_SPAN_OUT_OF_RANGE);
- checkLongitude(envelope.getXMax(), LONGITUDE_SPAN_OUT_OF_RANGE);
+ checkLatitude(envelope.getMinY(), LATITUDE_SPAN_OUT_OF_RANGE);
+ checkLatitude(envelope.getMaxY(), LATITUDE_SPAN_OUT_OF_RANGE);
+ checkLongitude(envelope.getMinX(), LONGITUDE_SPAN_OUT_OF_RANGE);
+ checkLongitude(envelope.getMaxX(), LONGITUDE_SPAN_OUT_OF_RANGE);
- boolean pointOrRectangle = isPointOrRectangle(ogcGeometry, envelope);
+ boolean pointOrRectangle = isPointOrRectangle(geometry, envelope);
- BingTile leftUpperTile = latitudeLongitudeToTile(envelope.getYMax(), envelope.getXMin(), zoomLevel);
+ BingTile leftUpperTile = latitudeLongitudeToTile(envelope.getMaxY(), envelope.getMinX(), zoomLevel);
BingTile rightLowerTile = getTileCoveringLowerRightCorner(envelope, zoomLevel);
// XY coordinates start at (0,0) in the left upper corner and increase left to right and top to bottom
long tileCount = (long) (rightLowerTile.getX() - leftUpperTile.getX() + 1) * (rightLowerTile.getY() - leftUpperTile.getY() + 1);
- checkGeometryToBingTilesLimits(ogcGeometry, envelope, pointOrRectangle, tileCount, zoomLevel);
+ checkGeometryToBingTilesLimits(geometry, envelope, pointOrRectangle, tileCount, zoomLevel);
BlockBuilder blockBuilder = BIGINT.createFixedSizeBlockBuilder(toIntExact(tileCount));
if (pointOrRectangle || zoomLevel <= OPTIMIZED_TILING_MIN_ZOOM_LEVEL) {
@@ -378,7 +374,7 @@ public static Block geometryToBingTiles(@SqlType(StandardTypes.GEOMETRY) Slice i
for (int x = leftUpperTile.getX(); x <= rightLowerTile.getX(); x++) {
for (int y = leftUpperTile.getY(); y <= rightLowerTile.getY(); y++) {
BingTile tile = BingTile.fromCoordinates(x, y, zoomLevel);
- if (pointOrRectangle || !disjoint(tileToGeometry(tile), ogcGeometry)) {
+ if (pointOrRectangle || !disjoint(tileToGeometry(tile), geometry)) {
BIGINT.writeLong(blockBuilder, tile.encode());
}
}
@@ -394,7 +390,7 @@ public static Block geometryToBingTiles(@SqlType(StandardTypes.GEOMETRY) Slice i
// tile covered by the geometry.
BingTile[] tiles = getTilesInBetween(leftUpperTile, rightLowerTile, OPTIMIZED_TILING_MIN_ZOOM_LEVEL);
for (BingTile tile : tiles) {
- appendIntersectingSubtiles(ogcGeometry, zoomLevel, tile, blockBuilder);
+ appendIntersectingSubtiles(geometry, zoomLevel, tile, blockBuilder);
}
}
@@ -403,17 +399,17 @@ public static Block geometryToBingTiles(@SqlType(StandardTypes.GEOMETRY) Slice i
private static BingTile getTileCoveringLowerRightCorner(Envelope envelope, int zoomLevel)
{
- BingTile tile = latitudeLongitudeToTile(envelope.getYMin(), envelope.getXMax(), zoomLevel);
+ BingTile tile = latitudeLongitudeToTile(envelope.getMinY(), envelope.getMaxX(), zoomLevel);
// If the tile covering the lower right corner of the envelope overlaps the envelope only
// at the border then return a tile shifted to the left and/or top
int deltaX = 0;
int deltaY = 0;
- Point upperLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY(), tile.getZoomLevel());
- if (upperLeftCorner.getX() == envelope.getXMax()) {
+ Coordinate upperLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY(), tile.getZoomLevel());
+ if (upperLeftCorner.getX() == envelope.getMaxX()) {
deltaX = -1;
}
- if (upperLeftCorner.getY() == envelope.getYMin()) {
+ if (upperLeftCorner.getY() == envelope.getMinY()) {
deltaY = -1;
}
@@ -424,18 +420,18 @@ private static BingTile getTileCoveringLowerRightCorner(Envelope envelope, int z
return tile;
}
- private static void checkGeometryToBingTilesLimits(OGCGeometry ogcGeometry, Envelope envelope, boolean pointOrRectangle, long tileCount, int zoomLevel)
+ private static void checkGeometryToBingTilesLimits(Geometry geometry, Envelope envelope, boolean pointOrRectangle, long tileCount, int zoomLevel)
{
if (pointOrRectangle) {
checkCondition(tileCount <= 1_000_000, "The number of tiles covering input rectangle exceeds the limit of 1M. " +
"Number of tiles: %d. Rectangle: xMin=%.2f, yMin=%.2f, xMax=%.2f, yMax=%.2f. Zoom level: %d.",
- tileCount, envelope.getXMin(), envelope.getYMin(), envelope.getXMax(), envelope.getYMax(), zoomLevel);
+ tileCount, envelope.getMinX(), envelope.getMinY(), envelope.getMaxX(), envelope.getMaxY(), zoomLevel);
}
else {
checkCondition((int) tileCount == tileCount, "The zoom level is too high to compute a set of covering Bing tiles.");
long complexity = 0;
try {
- complexity = multiplyExact(tileCount, getPointCount(ogcGeometry));
+ complexity = multiplyExact(tileCount, geometry.getNumPoints());
}
catch (ArithmeticException e) {
checkCondition(false, "The zoom level is too high or the geometry is too complex to compute a set of covering Bing tiles. " +
@@ -526,7 +522,7 @@ private static BingTile[] getTilesInBetween(BingTile leftUpperTile, BingTile rig
* BlockBuilder.
*/
private static void appendIntersectingSubtiles(
- OGCGeometry ogcGeometry,
+ Geometry geometry,
int zoomLevel,
BingTile tile,
BlockBuilder blockBuilder)
@@ -536,13 +532,13 @@ private static void appendIntersectingSubtiles(
Geometry tileGeometry = tileToGeometry(tile);
if (tileZoomLevel == zoomLevel) {
- if (!disjoint(tileGeometry, ogcGeometry)) {
+ if (!disjoint(tileGeometry, geometry)) {
BIGINT.writeLong(blockBuilder, tile.encode());
}
return;
}
- if (contains(ogcGeometry, tileGeometry)) {
+ if (contains(geometry, tileGeometry)) {
int subTileCount = 1 << (zoomLevel - tileZoomLevel);
int minX = subTileCount * tile.getX();
int minY = subTileCount * tile.getY();
@@ -554,7 +550,7 @@ private static void appendIntersectingSubtiles(
return;
}
- if (disjoint(tileGeometry, ogcGeometry)) {
+ if (disjoint(tileGeometry, geometry)) {
return;
}
@@ -565,7 +561,7 @@ private static void appendIntersectingSubtiles(
for (int x = minX; x < minX + 2; x++) {
for (int y = minY; y < minY + 2; y++) {
appendIntersectingSubtiles(
- ogcGeometry,
+ geometry,
zoomLevel,
BingTile.fromCoordinates(x, y, nextZoomLevel),
blockBuilder);
@@ -573,7 +569,7 @@ private static void appendIntersectingSubtiles(
}
}
- private static Point tileXYToLatitudeLongitude(int tileX, int tileY, int zoomLevel)
+ private static Coordinate tileXYToLatitudeLongitude(int tileX, int tileY, int zoomLevel)
{
long mapSize = mapSize(zoomLevel);
double x = (clamp((long) tileX * TILE_PIXELS, 0, mapSize) / (double) mapSize) - 0.5;
@@ -581,7 +577,7 @@ private static Point tileXYToLatitudeLongitude(int tileX, int tileY, int zoomLev
double latitude = 90 - 360 * Math.atan(Math.exp(-y * 2 * Math.PI)) / Math.PI;
double longitude = 360 * x;
- return new Point(longitude, latitude);
+ return new Coordinate(longitude, latitude);
}
/**
@@ -633,16 +629,16 @@ private static int axisToCoordinates(double axis, long mapSize)
private static Envelope tileToEnvelope(BingTile tile)
{
- Point upperLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY(), tile.getZoomLevel());
- Point lowerRightCorner = tileXYToLatitudeLongitude(tile.getX() + 1, tile.getY() + 1, tile.getZoomLevel());
- return new Envelope(upperLeftCorner.getX(), lowerRightCorner.getY(), lowerRightCorner.getX(), upperLeftCorner.getY());
+ Coordinate upperLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY(), tile.getZoomLevel());
+ Coordinate lowerRightCorner = tileXYToLatitudeLongitude(tile.getX() + 1, tile.getY() + 1, tile.getZoomLevel());
+ // JTS Envelope constructor: (xMin, xMax, yMin, yMax)
+ return new Envelope(upperLeftCorner.getX(), lowerRightCorner.getX(), lowerRightCorner.getY(), upperLeftCorner.getY());
}
private static Geometry tileToGeometry(BingTile tile)
{
- Point upperLeftCorner = tileXYToLatitudeLongitude(tile.getX(), tile.getY(), tile.getZoomLevel());
- Point lowerRightCorner = tileXYToLatitudeLongitude(tile.getX() + 1, tile.getY() + 1, tile.getZoomLevel());
- return OGCGeometry.createFromEsriGeometry(new Envelope(upperLeftCorner.getX(), lowerRightCorner.getY(), lowerRightCorner.getX(), upperLeftCorner.getY()), null).getEsriGeometry();
+ Envelope envelope = tileToEnvelope(tile);
+ return GEOMETRY_FACTORY.toGeometry(envelope);
}
private static void checkZoomLevel(long zoomLevel)
@@ -672,7 +668,7 @@ private static void checkLongitude(double longitude, String errorMessage)
checkCondition(longitude >= MIN_LONGITUDE && longitude <= MAX_LONGITUDE, errorMessage);
}
- private static boolean withinDistance(GreatCircleDistanceToPoint distanceFunction, double maxDistance, Point point)
+ private static boolean withinDistance(GreatCircleDistanceToPoint distanceFunction, double maxDistance, Coordinate point)
{
return distanceFunction.distance(point.getY(), point.getX()) <= maxDistance;
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/EncodedPolylineFunctions.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/EncodedPolylineFunctions.java
index 33fbf65d13c0..cc8136997335 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/EncodedPolylineFunctions.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/EncodedPolylineFunctions.java
@@ -13,12 +13,6 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.MultiPath;
-import com.esri.core.geometry.MultiVertexGeometry;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.Polyline;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCLineString;
import com.google.common.base.Joiner;
import io.airlift.slice.DynamicSliceOutput;
import io.airlift.slice.Slice;
@@ -28,14 +22,19 @@
import io.trino.spi.function.ScalarFunction;
import io.trino.spi.function.SqlType;
import io.trino.spi.type.StandardTypes;
-
-import java.util.EnumSet;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.CoordinateSequence;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.impl.CoordinateArraySequence;
+
+import java.util.ArrayList;
+import java.util.List;
import java.util.Set;
import static io.trino.geospatial.GeometryType.LINE_STRING;
import static io.trino.geospatial.GeometryType.MULTI_POINT;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-import static io.trino.geospatial.serde.GeometrySerde.serialize;
import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static java.lang.String.format;
@@ -47,20 +46,21 @@
*/
public final class EncodedPolylineFunctions
{
+ private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
+
private EncodedPolylineFunctions() {}
@Description("Decodes a polyline to a linestring")
@ScalarFunction("from_encoded_polyline")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice fromEncodedPolyline(@SqlType(StandardTypes.VARCHAR) Slice input)
+ public static Geometry fromEncodedPolyline(@SqlType(StandardTypes.VARCHAR) Slice input)
{
- return serialize(decodePolyline(input.toStringUtf8()));
+ return decodePolyline(input.toStringUtf8());
}
- private static OGCLineString decodePolyline(String polyline)
+ private static LineString decodePolyline(String polyline)
{
- MultiPath multipath = new Polyline();
- boolean isFirstPoint = true;
+ List coordinates = new ArrayList<>();
int index = 0;
int latitude = 0;
@@ -88,44 +88,42 @@ private static OGCLineString decodePolyline(String polyline)
while (bytes >= 0x1f);
longitude += (result & 1) != 0 ? ~(result >> 1) : (result >> 1);
- if (isFirstPoint) {
- multipath.startPath(longitude * 1e-5, latitude * 1e-5);
- isFirstPoint = false;
- }
- else {
- multipath.lineTo(longitude * 1e-5, latitude * 1e-5);
- }
+ coordinates.add(new Coordinate(longitude * 1e-5, latitude * 1e-5));
}
- return new OGCLineString(multipath, 0, null);
+ // JTS LineString requires 0 or >= 2 points, so a single point decodes to empty
+ if (coordinates.size() < 2) {
+ return GEOMETRY_FACTORY.createLineString();
+ }
+
+ CoordinateSequence sequence = new CoordinateArraySequence(coordinates.toArray(new Coordinate[0]));
+ return new LineString(sequence, GEOMETRY_FACTORY);
}
@Description("Encodes a linestring or multipoint geometry to a polyline")
@ScalarFunction("to_encoded_polyline")
@SqlType(StandardTypes.VARCHAR)
- public static Slice toEncodedPolyline(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Slice toEncodedPolyline(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
- validateType("encode_polyline", geometry, EnumSet.of(LINE_STRING, MULTI_POINT));
- GeometryType geometryType = GeometryType.getForEsriGeometryType(geometry.geometryType());
+ validateType("encode_polyline", geometry, Set.of(LINE_STRING, MULTI_POINT));
+ GeometryType geometryType = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
return switch (geometryType) {
- case LINE_STRING, MULTI_POINT -> encodePolyline((MultiVertexGeometry) geometry.getEsriGeometry());
+ case LINE_STRING, MULTI_POINT -> encodePolyline(geometry);
default -> throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Unexpected geometry type: " + geometryType);
};
}
- private static Slice encodePolyline(MultiVertexGeometry multiVertexGeometry)
+ private static Slice encodePolyline(Geometry geometry)
{
long lastLatitude = 0;
long lastLongitude = 0;
DynamicSliceOutput output = new DynamicSliceOutput(0);
- for (int i = 0; i < multiVertexGeometry.getPointCount(); i++) {
- Point point = multiVertexGeometry.getPoint(i);
-
- long latitude = Math.round(point.getY() * 1e5);
- long longitude = Math.round(point.getX() * 1e5);
+ Coordinate[] coordinates = geometry.getCoordinates();
+ for (Coordinate coordinate : coordinates) {
+ long latitude = Math.round(coordinate.getY() * 1e5);
+ long longitude = Math.round(coordinate.getX() * 1e5);
long latitudeDelta = latitude - lastLatitude;
long longitudeDelta = longitude - lastLongitude;
@@ -149,9 +147,9 @@ private static void encode(long value, DynamicSliceOutput output)
output.appendByte((byte) (value + 63));
}
- private static void validateType(String function, OGCGeometry geometry, Set validTypes)
+ private static void validateType(String function, Geometry geometry, Set validTypes)
{
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
+ GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
if (!validTypes.contains(type)) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("%s only applies to %s. Input type is: %s", function, Joiner.on(" or ").join(validTypes), type));
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeoFunctions.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeoFunctions.java
index 3348c8befe6b..30916cf72446 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeoFunctions.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeoFunctions.java
@@ -13,39 +13,15 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.GeometryCursor;
-import com.esri.core.geometry.GeometryEngine;
-import com.esri.core.geometry.ListeningGeometryCursor;
-import com.esri.core.geometry.MultiPath;
-import com.esri.core.geometry.MultiPoint;
-import com.esri.core.geometry.MultiVertexGeometry;
-import com.esri.core.geometry.NonSimpleResult;
-import com.esri.core.geometry.NonSimpleResult.Reason;
-import com.esri.core.geometry.OperatorSimplifyOGC;
-import com.esri.core.geometry.OperatorUnion;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.Polygon;
-import com.esri.core.geometry.Polyline;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WktExportFlags;
-import com.esri.core.geometry.ogc.OGCConcreteGeometryCollection;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
-import com.esri.core.geometry.ogc.OGCLineString;
-import com.esri.core.geometry.ogc.OGCPoint;
-import com.esri.core.geometry.ogc.OGCPolygon;
import com.google.common.base.Joiner;
import com.google.common.base.VerifyException;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.trino.geospatial.GeometryType;
import io.trino.geospatial.KdbTree;
import io.trino.geospatial.Rectangle;
-import io.trino.geospatial.serde.GeometrySerde;
-import io.trino.geospatial.serde.GeometrySerializationType;
+import io.trino.geospatial.serde.EsriShapeReader;
import io.trino.geospatial.serde.JtsGeometrySerde;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
@@ -59,43 +35,44 @@
import io.trino.spi.type.RowType;
import io.trino.spi.type.StandardTypes;
import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryCollection;
import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.LinearRing;
+import org.locationtech.jts.geom.MultiLineString;
+import org.locationtech.jts.geom.MultiPoint;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Point;
+import org.locationtech.jts.geom.Polygon;
import org.locationtech.jts.io.ParseException;
-import org.locationtech.jts.io.WKBReader;
+import org.locationtech.jts.io.WKBWriter;
+import org.locationtech.jts.io.WKTReader;
+import org.locationtech.jts.io.WKTWriter;
import org.locationtech.jts.io.kml.KMLReader;
import org.locationtech.jts.linearref.LengthIndexedLine;
import org.locationtech.jts.operation.distance.DistanceOp;
+import org.locationtech.jts.operation.linemerge.LineMerger;
+import org.locationtech.jts.operation.overlayng.OverlayNG;
+import org.locationtech.jts.operation.overlayng.OverlayNGRobust;
+import org.locationtech.jts.operation.relateng.RelateNG;
+import org.locationtech.jts.operation.union.UnaryUnionOp;
+import org.locationtech.jts.operation.valid.IsValidOp;
+import org.locationtech.jts.operation.valid.TopologyValidationError;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.ArrayDeque;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
-import java.util.Deque;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
-import java.util.Objects;
import java.util.Set;
-import static com.esri.core.geometry.Geometry.Type;
-import static com.esri.core.geometry.GeometryEngine.geometryToWkt;
-import static com.esri.core.geometry.NonSimpleResult.Reason.Clustering;
-import static com.esri.core.geometry.NonSimpleResult.Reason.Cracking;
-import static com.esri.core.geometry.NonSimpleResult.Reason.CrossOver;
-import static com.esri.core.geometry.NonSimpleResult.Reason.DegenerateSegments;
-import static com.esri.core.geometry.NonSimpleResult.Reason.OGCDisconnectedInterior;
-import static com.esri.core.geometry.NonSimpleResult.Reason.OGCPolygonSelfTangency;
-import static com.esri.core.geometry.NonSimpleResult.Reason.OGCPolylineSelfTangency;
-import static com.esri.core.geometry.ogc.OGCGeometry.createFromEsriGeometry;
-import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.slice.Slices.utf8Slice;
-import static io.airlift.slice.Slices.wrappedHeapBuffer;
import static io.trino.geospatial.GeometryType.GEOMETRY_COLLECTION;
import static io.trino.geospatial.GeometryType.LINE_STRING;
import static io.trino.geospatial.GeometryType.MULTI_LINE_STRING;
@@ -103,14 +80,9 @@
import static io.trino.geospatial.GeometryType.MULTI_POLYGON;
import static io.trino.geospatial.GeometryType.POINT;
import static io.trino.geospatial.GeometryType.POLYGON;
-import static io.trino.geospatial.GeometryUtils.getPointCount;
import static io.trino.geospatial.GeometryUtils.jsonFromJtsGeometry;
import static io.trino.geospatial.GeometryUtils.jtsGeometryFromJson;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-import static io.trino.geospatial.serde.GeometrySerde.deserializeEnvelope;
-import static io.trino.geospatial.serde.GeometrySerde.deserializeType;
-import static io.trino.geospatial.serde.GeometrySerde.serialize;
-import static io.trino.geospatial.serde.JtsGeometrySerde.serialize;
+import static io.trino.geospatial.serde.JtsGeometrySerde.validateAndGetSrid;
import static io.trino.plugin.geospatial.GeometryType.GEOMETRY;
import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.trino.spi.block.RowValueBuilder.buildRowValue;
@@ -131,27 +103,15 @@
import static java.lang.Math.toIntExact;
import static java.lang.Math.toRadians;
import static java.lang.String.format;
-import static java.util.Arrays.setAll;
import static java.util.Objects.requireNonNull;
import static org.locationtech.jts.simplify.TopologyPreservingSimplifier.simplify;
public final class GeoFunctions
{
private static final Joiner OR_JOINER = Joiner.on(" or ");
- private static final Slice EMPTY_POLYGON = serialize(new OGCPolygon(new Polygon(), null));
- private static final Slice EMPTY_MULTIPOINT = serialize(createFromEsriGeometry(new MultiPoint(), null, true));
+ private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
private static final double EARTH_RADIUS_KM = 6371.01;
private static final double EARTH_RADIUS_M = EARTH_RADIUS_KM * 1000.0;
- private static final Map NON_SIMPLE_REASONS = ImmutableMap.builder()
- .put(DegenerateSegments, "Degenerate segments")
- .put(Clustering, "Repeated points")
- .put(Cracking, "Intersecting or overlapping segments")
- .put(CrossOver, "Self-intersection")
- .put(OGCPolylineSelfTangency, "Self-tangency")
- .put(OGCPolygonSelfTangency, "Self-tangency")
- .put(OGCDisconnectedInterior, "Disconnected interior")
- .buildOrThrow();
- private static final int NUMBER_OF_DIMENSIONS = 3;
private static final Block EMPTY_ARRAY_OF_INTS = IntegerType.INTEGER.createFixedSizeBlockBuilder(0).build();
private static final float MIN_LATITUDE = -90;
@@ -161,18 +121,9 @@ public final class GeoFunctions
private static final int HADOOP_SHAPE_SIZE_WKID = 4;
private static final int HADOOP_SHAPE_SIZE_TYPE = 1;
- private static final int[] HADOOP_SHAPE_TYPES = {
- WktExportFlags.wktExportDefaults,
- WktExportFlags.wktExportPoint,
- WktExportFlags.wktExportLineString,
- WktExportFlags.wktExportPolygon,
- WktExportFlags.wktExportMultiPoint,
- WktExportFlags.wktExportMultiLineString,
- WktExportFlags.wktExportMultiPolygon
- };
-
- private static final EnumSet GEOMETRY_TYPES_FOR_SPHERICAL_GEOGRAPHY = EnumSet.of(
- Type.Point, Type.Polyline, Type.Polygon, Type.MultiPoint);
+
+ private static final Set VALID_SPHERICAL_GEOGRAPHY_LEAF_TYPES = Set.of(
+ "Point", "LineString", "LinearRing", "Polygon");
private static final EnumSet VALID_TYPES_FOR_ST_POINTS = EnumSet.of(
LINE_STRING, POLYGON, POINT, MULTI_POINT, MULTI_LINE_STRING, MULTI_POLYGON, GEOMETRY_COLLECTION);
@@ -182,20 +133,20 @@ private GeoFunctions() {}
@Description("Returns a Geometry type LineString object from Well-Known Text representation (WKT)")
@ScalarFunction("ST_LineFromText")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice parseLine(@SqlType(VARCHAR) Slice input)
+ public static Geometry parseLine(@SqlType(VARCHAR) Slice input)
{
- OGCGeometry geometry = geometryFromText(input);
+ Geometry geometry = geometryFromText(input);
validateType("ST_LineFromText", geometry, EnumSet.of(LINE_STRING));
- return serialize(geometry);
+ return geometry;
}
@Description("Returns a LineString from an array of points")
@ScalarFunction("ST_LineString")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stLineString(@SqlType("array(" + StandardTypes.GEOMETRY + ")") Block input)
+ public static Geometry stLineString(@SqlType("array(" + StandardTypes.GEOMETRY + ")") Block input)
{
- MultiPath multipath = new Polyline();
- OGCPoint previousPoint = null;
+ List coordinates = new ArrayList<>();
+ Coordinate previousCoordinate = null;
for (int i = 0; i < input.getPositionCount(); i++) {
Slice slice = GEOMETRY.getSlice(input, i);
@@ -203,139 +154,180 @@ public static Slice stLineString(@SqlType("array(" + StandardTypes.GEOMETRY + ")
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to ST_LineString: null point at index %s", i + 1));
}
- OGCGeometry geometry = deserialize(slice);
- if (!(geometry instanceof OGCPoint point)) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("ST_LineString takes only an array of valid points, %s was passed", geometry.geometryType()));
+ Geometry geometry = JtsGeometrySerde.deserialize(slice);
+ if (!(geometry instanceof Point point)) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("ST_LineString takes only an array of valid points, %s was passed", geometry.getGeometryType()));
}
if (point.isEmpty()) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to ST_LineString: empty point at index %s", i + 1));
}
- if (previousPoint == null) {
- multipath.startPath(point.X(), point.Y());
+ Coordinate coordinate = point.getCoordinate();
+ if (coordinate.equals(previousCoordinate)) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT,
+ format("Invalid input to ST_LineString: consecutive duplicate points at index %s", i + 1));
}
- else {
- if (point.Equals(previousPoint)) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT,
- format("Invalid input to ST_LineString: consecutive duplicate points at index %s", i + 1));
- }
- multipath.lineTo(point.X(), point.Y());
- }
- previousPoint = point;
+ coordinates.add(coordinate);
+ previousCoordinate = coordinate;
+ }
+ // A linestring needs 0 or >= 2 points; single point returns empty
+ if (coordinates.size() == 1) {
+ return GEOMETRY_FACTORY.createLineString();
}
- OGCLineString linestring = new OGCLineString(multipath, 0, null);
- return serialize(linestring);
+ return GEOMETRY_FACTORY.createLineString(coordinates.toArray(new Coordinate[0]));
}
@Description("Returns a Geometry type Point object with the given coordinate values")
@ScalarFunction("ST_Point")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stPoint(@SqlType(DOUBLE) double x, @SqlType(DOUBLE) double y)
+ public static Geometry stPoint(@SqlType(DOUBLE) double x, @SqlType(DOUBLE) double y)
{
- OGCGeometry geometry = createFromEsriGeometry(new Point(x, y), null);
- return serialize(geometry);
+ return GEOMETRY_FACTORY.createPoint(new Coordinate(x, y));
}
@SqlNullable
@Description("Returns a multi-point geometry formed from input points")
@ScalarFunction("ST_MultiPoint")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stMultiPoint(@SqlType("array(" + StandardTypes.GEOMETRY + ")") Block input)
+ public static Geometry stMultiPoint(@SqlType("array(" + StandardTypes.GEOMETRY + ")") Block input)
{
- MultiPoint multipoint = new MultiPoint();
+ List points = new ArrayList<>();
for (int i = 0; i < input.getPositionCount(); i++) {
if (input.isNull(i)) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to ST_MultiPoint: null at index %s", i + 1));
}
Slice slice = GEOMETRY.getSlice(input, i);
- OGCGeometry geometry = deserialize(slice);
- if (!(geometry instanceof OGCPoint point)) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to ST_MultiPoint: geometry is not a point: %s at index %s", geometry.geometryType(), i + 1));
+ Geometry geometry = JtsGeometrySerde.deserialize(slice);
+ if (!(geometry instanceof Point point)) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to ST_MultiPoint: geometry is not a point: %s at index %s", geometry.getGeometryType(), i + 1));
}
if (point.isEmpty()) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("Invalid input to ST_MultiPoint: empty point at index %s", i + 1));
}
- multipoint.add(point.X(), point.Y());
+ points.add(point);
}
- if (multipoint.getPointCount() == 0) {
+ if (points.isEmpty()) {
return null;
}
- return serialize(createFromEsriGeometry(multipoint, null, true));
+ return GEOMETRY_FACTORY.createMultiPoint(points.toArray(new Point[0]));
}
@Description("Returns a Geometry type Polygon object from Well-Known Text representation (WKT)")
@ScalarFunction("ST_Polygon")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stPolygon(@SqlType(VARCHAR) Slice input)
+ public static Geometry stPolygon(@SqlType(VARCHAR) Slice input)
{
- OGCGeometry geometry = geometryFromText(input);
+ Geometry geometry = geometryFromText(input);
validateType("ST_Polygon", geometry, EnumSet.of(POLYGON));
- return serialize(geometry);
+ return geometry;
}
@Description("Returns the 2D Euclidean area of a geometry")
@ScalarFunction("ST_Area")
@SqlType(DOUBLE)
- public static double stArea(@SqlType(StandardTypes.GEOMETRY) Slice input)
- {
- OGCGeometry geometry = deserialize(input);
-
- // The Esri geometry library does not support area for geometry collections. We compute the area
- // of collections by summing the area of the individual components.
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
- if (type == GeometryType.GEOMETRY_COLLECTION) {
- double area = 0.0;
- GeometryCursor cursor = geometry.getEsriGeometryCursor();
- while (true) {
- com.esri.core.geometry.Geometry esriGeometry = cursor.next();
- if (esriGeometry == null) {
- return area;
- }
-
- area += esriGeometry.calculateArea2D();
- }
- }
- return geometry.getEsriGeometry().calculateArea2D();
+ public static double stArea(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
+ {
+ return geometry.getArea();
}
@Description("Returns a Geometry type object from Well-Known Text representation (WKT)")
@ScalarFunction("ST_GeometryFromText")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stGeometryFromText(@SqlType(VARCHAR) Slice input)
+ public static Geometry stGeometryFromText(@SqlType(VARCHAR) Slice input)
{
- return serialize(geometryFromText(input));
+ return geometryFromText(input);
}
- @Description("Returns a Geometry type object from Well-Known Binary representation (WKB)")
+ @Description("Returns a Geometry type object from Well-Known Binary representation (WKB or EWKB)")
@ScalarFunction("ST_GeomFromBinary")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stGeomFromBinary(@SqlType(VARBINARY) Slice input)
+ public static Geometry stGeomFromBinary(@SqlType(VARBINARY) Slice input)
+ {
+ // Parse the WKB/EWKB format - WKBReader handles both WKB (SRID=0) and EWKB (preserves SRID)
+ try {
+ return JtsGeometrySerde.deserialize(input);
+ }
+ catch (IllegalArgumentException e) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT, e.getMessage(), e);
+ }
+ }
+
+ @Description("Returns the spatial reference identifier for the geometry")
+ @ScalarFunction("ST_SRID")
+ @SqlType(INTEGER)
+ public static long stSrid(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
+ {
+ return geometry.getSRID();
+ }
+
+ @Description("Sets the spatial reference identifier for the geometry")
+ @ScalarFunction("ST_SetSRID")
+ @SqlType(StandardTypes.GEOMETRY)
+ public static Geometry stSetSrid(@SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(INTEGER) long srid)
+ {
+ geometry.setSRID(toIntExact(srid));
+ return geometry;
+ }
+
+ @Description("Returns the Extended Well-Known Binary (EWKB) representation of the geometry")
+ @ScalarFunction("ST_AsEWKB")
+ @SqlType(VARBINARY)
+ public static Slice stAsEwkb(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return serialize(geomFromBinary(input));
+ return JtsGeometrySerde.serialize(geometry);
}
@Description("Returns a Geometry type object from OGC KML representation")
@ScalarFunction("ST_GeomFromKML")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stGeomFromKML(@SqlType(VARCHAR) Slice input)
+ public static Geometry stGeomFromKML(@SqlType(VARCHAR) Slice input)
{
- return serialize(geomFromKML(input));
+ return geomFromKML(input);
}
@Description("Returns a Geometry type object from Spatial Framework for Hadoop representation")
@ScalarFunction("geometry_from_hadoop_shape")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice geometryFromHadoopShape(@SqlType(VARBINARY) Slice input)
+ public static Geometry geometryFromHadoopShape(@SqlType(VARBINARY) Slice input)
{
requireNonNull(input, "input is null");
+ // Check minimum length (SRID + type + at least some shape data)
+ int minOffset = HADOOP_SHAPE_SIZE_WKID + HADOOP_SHAPE_SIZE_TYPE;
+ if (input.length() <= minOffset) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Hadoop shape input is too short");
+ }
+
+ // Validate OGC type (valid types are 0-6)
+ byte hadoopShapeType = input.getByte(HADOOP_SHAPE_SIZE_WKID);
+ if (hadoopShapeType < 0 || hadoopShapeType > 6) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Invalid Hadoop shape type: " + hadoopShapeType);
+ }
+
try {
- OGCGeometry geometry = OGCGeometry.fromEsriShape(getShapeByteBuffer(input));
- String wkt = geometryToWkt(geometry.getEsriGeometry(), getWktExportFlags(input));
- return serialize(OGCGeometry.fromText(wkt));
+ Slice shapeSlice = input.slice(minOffset, input.length() - minOffset);
+
+ // Peek at ESRI shape type to validate it matches the OGC type
+ int esriShapeType = shapeSlice.getInt(0); // peek at first 4 bytes
+ validateShapeTypeMatch(hadoopShapeType, esriShapeType);
+
+ Geometry geometry = EsriShapeReader.read(shapeSlice);
+
+ // For empty geometries, use the OGC type to determine the correct Multi type
+ // OGC types: 0=unknown, 1=point, 2=linestring, 3=polygon, 4=multipoint, 5=multilinestring, 6=multipolygon
+ if (geometry.isEmpty()) {
+ geometry = switch (hadoopShapeType) {
+ case 4 -> GEOMETRY_FACTORY.createMultiPoint();
+ case 5 -> GEOMETRY_FACTORY.createMultiLineString();
+ case 6 -> GEOMETRY_FACTORY.createMultiPolygon();
+ default -> geometry;
+ };
+ }
+
+ return geometry;
}
catch (IndexOutOfBoundsException | UnsupportedOperationException | IllegalArgumentException e) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Invalid Hadoop shape", e);
@@ -345,66 +337,91 @@ public static Slice geometryFromHadoopShape(@SqlType(VARBINARY) Slice input)
@Description("Converts a Geometry object to a SphericalGeography object")
@ScalarFunction("to_spherical_geography")
@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY)
- public static Slice toSphericalGeography(@SqlType(StandardTypes.GEOMETRY) Slice input)
- {
- // "every point in input is in range" <=> "the envelope of input is in range"
- Envelope envelope = deserializeEnvelope(input);
- if (!envelope.isEmpty()) {
- checkLatitude(envelope.getYMin());
- checkLatitude(envelope.getYMax());
- checkLongitude(envelope.getXMin());
- checkLongitude(envelope.getXMax());
- }
- OGCGeometry geometry = deserialize(input);
- if (geometry.is3D()) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Cannot convert 3D geometry to a spherical geography");
- }
-
- GeometryCursor cursor = geometry.getEsriGeometryCursor();
- while (true) {
- com.esri.core.geometry.Geometry subGeometry = cursor.next();
- if (subGeometry == null) {
- break;
- }
+ public static Geometry toSphericalGeography(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
+ {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (!envelope.isNull()) {
+ checkLatitude(envelope.getMinY());
+ checkLatitude(envelope.getMaxY());
+ checkLongitude(envelope.getMinX());
+ checkLongitude(envelope.getMaxX());
+ }
- if (!GEOMETRY_TYPES_FOR_SPHERICAL_GEOGRAPHY.contains(subGeometry.getType())) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Cannot convert geometry of this type to spherical geography: " + subGeometry.getType());
+ // Check for 3D geometry
+ for (Coordinate coord : geometry.getCoordinates()) {
+ if (!isNaN(coord.getZ())) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Cannot convert 3D geometry to a spherical geography");
}
}
- return input;
+ // Validate geometry types
+ validateSphericalGeographyTypes(geometry);
+
+ return geometry;
+ }
+
+ private static void validateSphericalGeographyTypes(Geometry geometry)
+ {
+ // For collections (including MultiPoint, MultiLineString, MultiPolygon), recursively check each component
+ if (geometry instanceof GeometryCollection gc) {
+ for (int i = 0; i < gc.getNumGeometries(); i++) {
+ validateSphericalGeographyTypes(gc.getGeometryN(i));
+ }
+ }
+ else {
+ // Leaf geometry types: Point, LineString, LinearRing, Polygon
+ String type = geometry.getGeometryType();
+ if (!VALID_SPHERICAL_GEOGRAPHY_LEAF_TYPES.contains(type)) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT,
+ "Cannot convert geometry of this type to spherical geography: " + type);
+ }
+ }
}
@Description("Converts a SphericalGeography object to a Geometry object.")
@ScalarFunction("to_geometry")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice toGeometry(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Slice input)
+ public static Geometry toGeometry(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Geometry geometry)
{
// Every SphericalGeography object is a valid geometry object
- return input;
+ return geometry;
}
@Description("Returns the Well-Known Text (WKT) representation of the geometry")
@ScalarFunction("ST_AsText")
@SqlType(VARCHAR)
- public static Slice stAsText(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Slice stAsText(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return utf8Slice(deserialize(input).asText());
+ return utf8Slice(new WKTWriter().write(geometry));
+ }
+
+ @Description("Returns the Extended Well-Known Text (EWKT) representation of the geometry, including SRID")
+ @ScalarFunction("ST_AsEWKT")
+ @SqlType(VARCHAR)
+ public static Slice stAsEwkt(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
+ {
+ String wkt = new WKTWriter().write(geometry);
+ int srid = geometry.getSRID();
+ if (srid != 0) {
+ return utf8Slice("SRID=" + srid + ";" + wkt);
+ }
+ return utf8Slice(wkt);
}
@Description("Returns the Well-Known Binary (WKB) representation of the geometry")
@ScalarFunction("ST_AsBinary")
@SqlType(VARBINARY)
- public static Slice stAsBinary(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Slice stAsBinary(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return wrappedHeapBuffer(deserialize(input).asBinary());
+ // Strip SRID for OGC WKB compatibility (external systems expect standard WKB)
+ return Slices.wrappedBuffer(new WKBWriter(2, false).write(geometry));
}
@SqlNullable
@Description("Returns the geometry that represents all points whose distance from the specified geometry is less than or equal to the specified distance")
@ScalarFunction("ST_Buffer")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stBuffer(@SqlType(StandardTypes.GEOMETRY) Slice input, @SqlType(DOUBLE) double distance)
+ public static Geometry stBuffer(@SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(DOUBLE) double distance)
{
if (isNaN(distance)) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "distance is NaN");
@@ -415,81 +432,91 @@ public static Slice stBuffer(@SqlType(StandardTypes.GEOMETRY) Slice input, @SqlT
}
if (distance == 0) {
- return input;
+ return geometry;
}
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return null;
}
- return serialize(geometry.buffer(distance));
+ Geometry result = geometry.buffer(distance);
+ result.setSRID(geometry.getSRID());
+ return result;
}
@Description("Returns the Point value that is the mathematical centroid of a Geometry")
@ScalarFunction("ST_Centroid")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stCentroid(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Geometry stCentroid(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_Centroid", geometry, EnumSet.of(POINT, MULTI_POINT, LINE_STRING, MULTI_LINE_STRING, POLYGON, MULTI_POLYGON));
- GeometryType geometryType = GeometryType.getForEsriGeometryType(geometry.geometryType());
- if (geometryType == GeometryType.POINT) {
- return input;
+ GeometryType geometryType = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
+ if (geometryType == POINT) {
+ return geometry;
}
- int pointCount = ((MultiVertexGeometry) geometry.getEsriGeometry()).getPointCount();
- if (pointCount == 0) {
- return serialize(createFromEsriGeometry(new Point(), geometry.getEsriSpatialReference()));
+ if (geometry.isEmpty()) {
+ Point result = geometry.getFactory().createPoint();
+ result.setSRID(geometry.getSRID());
+ return result;
}
- return serialize(geometry.centroid());
+ Point result = geometry.getCentroid();
+ result.setSRID(geometry.getSRID());
+ return result;
}
@Description("Returns the minimum convex geometry that encloses all input geometries")
@ScalarFunction("ST_ConvexHull")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stConvexHull(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Geometry stConvexHull(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
- return input;
+ return geometry;
}
- if (GeometryType.getForEsriGeometryType(geometry.geometryType()) == POINT) {
- return input;
+ if (GeometryType.getForJtsGeometryType(geometry.getGeometryType()) == POINT) {
+ return geometry;
}
- return serialize(geometry.convexHull());
+ Geometry result = geometry.convexHull();
+ result.setSRID(geometry.getSRID());
+ return result;
}
@Description("Return the coordinate dimension of the Geometry")
@ScalarFunction("ST_CoordDim")
@SqlType(TINYINT)
- public static long stCoordinateDimension(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static long stCoordinateDimension(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return deserialize(input).coordinateDimension();
+ Coordinate[] coordinates = geometry.getCoordinates();
+ // Check if any coordinate has a valid Z value (non-NaN)
+ for (Coordinate coordinate : coordinates) {
+ if (!isNaN(coordinate.getZ())) {
+ return 3;
+ }
+ }
+ return 2;
}
@Description("Returns the inherent dimension of this Geometry object, which must be less than or equal to the coordinate dimension")
@ScalarFunction("ST_Dimension")
@SqlType(TINYINT)
- public static long stDimension(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static long stDimension(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return deserialize(input).dimension();
+ return geometry.getDimension();
}
@SqlNullable
@Description("Returns TRUE if the LineString or Multi-LineString's start and end points are coincident")
@ScalarFunction("ST_IsClosed")
@SqlType(BOOLEAN)
- public static Boolean stIsClosed(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Boolean stIsClosed(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_IsClosed", geometry, EnumSet.of(LINE_STRING, MULTI_LINE_STRING));
- MultiPath lines = (MultiPath) geometry.getEsriGeometry();
- int pathCount = lines.getPathCount();
- for (int i = 0; i < pathCount; i++) {
- Point start = lines.getPoint(lines.getPathStart(i));
- Point end = lines.getPoint(lines.getPathEnd(i) - 1);
- if (!end.equals(start)) {
+ if (geometry instanceof LineString lineString) {
+ return lineString.isClosed();
+ }
+ MultiLineString multiLineString = (MultiLineString) geometry;
+ for (int i = 0; i < multiLineString.getNumGeometries(); i++) {
+ if (!((LineString) multiLineString.getGeometryN(i)).isClosed()) {
return false;
}
}
@@ -500,114 +527,87 @@ public static Boolean stIsClosed(@SqlType(StandardTypes.GEOMETRY) Slice input)
@Description("Returns TRUE if this Geometry is an empty geometrycollection, polygon, point etc")
@ScalarFunction("ST_IsEmpty")
@SqlType(BOOLEAN)
- public static Boolean stIsEmpty(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Boolean stIsEmpty(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return deserializeEnvelope(input).isEmpty();
+ return geometry.isEmpty();
}
@Description("Returns TRUE if this Geometry has no anomalous geometric points, such as self intersection or self tangency")
@ScalarFunction("ST_IsSimple")
@SqlType(BOOLEAN)
- public static boolean stIsSimple(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static boolean stIsSimple(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
return geometry.isEmpty() || geometry.isSimple();
}
@Description("Returns true if the input geometry is well formed")
@ScalarFunction("ST_IsValid")
@SqlType(BOOLEAN)
- public static boolean stIsValid(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static boolean stIsValid(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- GeometryCursor cursor = deserialize(input).getEsriGeometryCursor();
- while (true) {
- com.esri.core.geometry.Geometry geometry = cursor.next();
- if (geometry == null) {
- return true;
- }
-
- if (!OperatorSimplifyOGC.local().isSimpleOGC(geometry, null, true, null, null)) {
- return false;
- }
- }
+ return new IsValidOp(geometry).isValid();
}
@Description("Returns the reason for why the input geometry is not valid. Returns null if the input is valid.")
@ScalarFunction("geometry_invalid_reason")
@SqlType(VARCHAR)
@SqlNullable
- public static Slice invalidReason(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Slice invalidReason(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- GeometryCursor cursor = deserialize(input).getEsriGeometryCursor();
- NonSimpleResult result = new NonSimpleResult();
- while (true) {
- com.esri.core.geometry.Geometry geometry = cursor.next();
- if (geometry == null) {
- return null;
- }
-
- if (!OperatorSimplifyOGC.local().isSimpleOGC(geometry, null, true, result, null)) {
- String reasonText = NON_SIMPLE_REASONS.getOrDefault(result.m_reason, result.m_reason.name());
-
- if (!(geometry instanceof MultiVertexGeometry multiVertexGeometry)) {
- return utf8Slice(reasonText);
- }
-
- if (result.m_vertexIndex1 >= 0 && result.m_vertexIndex2 >= 0) {
- Point point1 = multiVertexGeometry.getPoint(result.m_vertexIndex1);
- Point point2 = multiVertexGeometry.getPoint(result.m_vertexIndex2);
- return utf8Slice(format("%s at or near (%s %s) and (%s %s)", reasonText, point1.getX(), point1.getY(), point2.getX(), point2.getY()));
- }
+ IsValidOp validOp = new IsValidOp(geometry);
+ if (validOp.isValid()) {
+ return null;
+ }
- if (result.m_vertexIndex1 >= 0) {
- Point point = multiVertexGeometry.getPoint(result.m_vertexIndex1);
- return utf8Slice(format("%s at or near (%s %s)", reasonText, point.getX(), point.getY()));
- }
+ TopologyValidationError error = validOp.getValidationError();
+ if (error == null) {
+ return null;
+ }
- return utf8Slice(reasonText);
- }
+ Coordinate coordinate = error.getCoordinate();
+ if (coordinate != null) {
+ return utf8Slice(format("%s at or near (%s %s)", error.getMessage(), coordinate.getX(), coordinate.getY()));
}
+ return utf8Slice(error.getMessage());
}
@Description("Returns the length of a LineString or Multi-LineString using Euclidean measurement on a 2D plane (based on spatial ref) in projected units")
@ScalarFunction("ST_Length")
@SqlType(DOUBLE)
- public static double stLength(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static double stLength(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_Length", geometry, EnumSet.of(LINE_STRING, MULTI_LINE_STRING));
- return geometry.getEsriGeometry().calculateLength2D();
+ return geometry.getLength();
}
@SqlNullable
@Description("Returns the great-circle length in meters of a linestring or multi-linestring on Earth's surface")
@ScalarFunction("ST_Length")
@SqlType(DOUBLE)
- public static Double stSphericalLength(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Slice input)
+ public static Double stSphericalLength(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return null;
}
validateSphericalType("ST_Length", geometry, EnumSet.of(LINE_STRING, MULTI_LINE_STRING));
- MultiPath lineString = (MultiPath) geometry.getEsriGeometry();
double sum = 0;
- // sum up paths on (multi)linestring
- for (int path = 0; path < lineString.getPathCount(); path++) {
- if (lineString.getPathSize(path) < 2) {
+ // Handle both LineString and MultiLineString
+ int numGeometries = geometry.getNumGeometries();
+ for (int g = 0; g < numGeometries; g++) {
+ LineString lineString = (LineString) geometry.getGeometryN(g);
+ Coordinate[] coordinates = lineString.getCoordinates();
+ if (coordinates.length < 2) {
continue;
}
- // sum up distances between adjacent points on this path
- int pathStart = lineString.getPathStart(path);
- Point previous = lineString.getPoint(pathStart);
- for (int i = pathStart + 1; i < lineString.getPathEnd(path); i++) {
- Point next = lineString.getPoint(i);
+ // sum up distances between adjacent points on this linestring
+ for (int i = 1; i < coordinates.length; i++) {
+ Coordinate previous = coordinates[i - 1];
+ Coordinate next = coordinates[i];
sum += greatCircleDistance(previous.getY(), previous.getX(), next.getY(), next.getX());
- previous = next;
}
}
@@ -618,22 +618,19 @@ public static Double stSphericalLength(@SqlType(StandardTypes.SPHERICAL_GEOGRAPH
@Description("Returns a float between 0 and 1 representing the location of the closest point on the LineString to the given Point, as a fraction of total 2d line length.")
@ScalarFunction("line_locate_point")
@SqlType(DOUBLE)
- public static Double lineLocatePoint(@SqlType(StandardTypes.GEOMETRY) Slice lineSlice, @SqlType(StandardTypes.GEOMETRY) Slice pointSlice)
+ public static Double lineLocatePoint(@SqlType(StandardTypes.GEOMETRY) Geometry line, @SqlType(StandardTypes.GEOMETRY) Geometry point)
{
- Geometry line = JtsGeometrySerde.deserialize(lineSlice);
- Geometry point = JtsGeometrySerde.deserialize(pointSlice);
-
if (line.isEmpty() || point.isEmpty()) {
return null;
}
GeometryType lineType = GeometryType.getForJtsGeometryType(line.getGeometryType());
- if (lineType != GeometryType.LINE_STRING && lineType != GeometryType.MULTI_LINE_STRING) {
+ if (lineType != LINE_STRING && lineType != MULTI_LINE_STRING) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("First argument to line_locate_point must be a LineString or a MultiLineString. Got: %s", line.getGeometryType()));
}
GeometryType pointType = GeometryType.getForJtsGeometryType(point.getGeometryType());
- if (pointType != GeometryType.POINT) {
+ if (pointType != POINT) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("Second argument to line_locate_point must be a Point. Got: %s", point.getGeometryType()));
}
@@ -644,17 +641,16 @@ public static Double lineLocatePoint(@SqlType(StandardTypes.GEOMETRY) Slice line
@Description("Returns a Point interpolated along a LineString at the fraction given.")
@ScalarFunction("line_interpolate_point")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice lineInterpolatePoint(
- @SqlType(StandardTypes.GEOMETRY) Slice input,
- @SqlType(StandardTypes.DOUBLE) double distanceFraction)
+ public static Geometry lineInterpolatePoint(
+ @SqlType(StandardTypes.GEOMETRY) Geometry geometry,
+ @SqlType(DOUBLE) double distanceFraction)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return null;
}
List interpolatedPoints = interpolatePoints(geometry, distanceFraction, false);
- return serialize(createFromEsriGeometry(interpolatedPoints.get(0), null));
+ return interpolatedPoints.getFirst();
}
@SqlNullable
@@ -662,10 +658,9 @@ public static Slice lineInterpolatePoint(
@ScalarFunction("line_interpolate_points")
@SqlType("array(" + StandardTypes.GEOMETRY + ")")
public static Block lineInterpolatePoints(
- @SqlType(StandardTypes.GEOMETRY) Slice input,
- @SqlType(StandardTypes.DOUBLE) double fractionStep)
+ @SqlType(StandardTypes.GEOMETRY) Geometry geometry,
+ @SqlType(DOUBLE) double fractionStep)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return null;
}
@@ -673,54 +668,37 @@ public static Block lineInterpolatePoints(
List interpolatedPoints = interpolatePoints(geometry, fractionStep, true);
BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, interpolatedPoints.size());
for (Point point : interpolatedPoints) {
- GEOMETRY.writeSlice(blockBuilder, serialize(createFromEsriGeometry(point, null)));
+ GEOMETRY.writeObject(blockBuilder, point);
}
return blockBuilder.build();
}
- private static List interpolatePoints(OGCGeometry geometry, double fractionStep, boolean repeated)
+ private static List interpolatePoints(Geometry geometry, double fractionStep, boolean repeated)
{
validateType("line_interpolate_point", geometry, EnumSet.of(LINE_STRING));
if (fractionStep < 0 || fractionStep > 1) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "fraction must be between 0 and 1");
}
- MultiPath path = (MultiPath) geometry.getEsriGeometry();
+ LineString lineString = (LineString) geometry;
+ LengthIndexedLine indexedLine = new LengthIndexedLine(lineString);
+ double lineLength = lineString.getLength();
if (fractionStep == 0) {
- return Collections.singletonList(path.getPoint(0));
+ return Collections.singletonList(lineString.getStartPoint());
}
if (fractionStep == 1) {
- return Collections.singletonList(path.getPoint(path.getPointCount() - 1));
+ return Collections.singletonList(lineString.getEndPoint());
}
int pointCount = repeated ? (int) Math.floor(1 / fractionStep) : 1;
List interpolatedPoints = new ArrayList<>(pointCount);
- double lineStringLength = path.calculateLength2D();
- Point previous = path.getPoint(0);
- double fractionConsumed = 0.0;
- double fractionIncrement = fractionStep;
-
- for (int i = 1; i < path.getPointCount() && interpolatedPoints.size() < pointCount; i++) {
- Point current = path.getPoint(i);
- double segmentLengthFraction = GeometryEngine.distance(previous, current, null) / lineStringLength;
-
- while (fractionStep < fractionConsumed + segmentLengthFraction && interpolatedPoints.size() < pointCount) {
- double segmentFraction = (fractionStep - fractionConsumed) / segmentLengthFraction;
- Point point = new Point();
- point.setX(previous.getX() + (current.getX() - previous.getX()) * segmentFraction);
- point.setY(previous.getY() + (current.getY() - previous.getY()) * segmentFraction);
- interpolatedPoints.add(point);
- fractionStep += fractionIncrement;
- }
-
- fractionConsumed += segmentLengthFraction;
- previous = current;
- }
-
- if (interpolatedPoints.size() < pointCount) {
- interpolatedPoints.add(path.getPoint(path.getPointCount() - 1));
+ double currentFraction = fractionStep;
+ while (interpolatedPoints.size() < pointCount) {
+ Coordinate coord = indexedLine.extractPoint(currentFraction * lineLength);
+ interpolatedPoints.add(GEOMETRY_FACTORY.createPoint(coord));
+ currentFraction += fractionStep;
}
return interpolatedPoints;
@@ -730,84 +708,82 @@ private static List interpolatePoints(OGCGeometry geometry, double fracti
@Description("Returns X maxima of a bounding box of a Geometry")
@ScalarFunction("ST_XMax")
@SqlType(DOUBLE)
- public static Double stXMax(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Double stXMax(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Envelope envelope = deserializeEnvelope(input);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
return null;
}
- return envelope.getXMax();
+ return envelope.getMaxX();
}
@SqlNullable
@Description("Returns Y maxima of a bounding box of a Geometry")
@ScalarFunction("ST_YMax")
@SqlType(DOUBLE)
- public static Double stYMax(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Double stYMax(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Envelope envelope = deserializeEnvelope(input);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
return null;
}
- return envelope.getYMax();
+ return envelope.getMaxY();
}
@SqlNullable
@Description("Returns X minima of a bounding box of a Geometry")
@ScalarFunction("ST_XMin")
@SqlType(DOUBLE)
- public static Double stXMin(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Double stXMin(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Envelope envelope = deserializeEnvelope(input);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
return null;
}
- return envelope.getXMin();
+ return envelope.getMinX();
}
@SqlNullable
@Description("Returns Y minima of a bounding box of a Geometry")
@ScalarFunction("ST_YMin")
@SqlType(DOUBLE)
- public static Double stYMin(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Double stYMin(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Envelope envelope = deserializeEnvelope(input);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
return null;
}
- return envelope.getYMin();
+ return envelope.getMinY();
}
@SqlNullable
@Description("Returns the cardinality of the collection of interior rings of a polygon")
@ScalarFunction("ST_NumInteriorRing")
@SqlType(BIGINT)
- public static Long stNumInteriorRings(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Long stNumInteriorRings(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_NumInteriorRing", geometry, EnumSet.of(POLYGON));
if (geometry.isEmpty()) {
return null;
}
- return Long.valueOf(((OGCPolygon) geometry).numInteriorRing());
+ return (long) ((Polygon) geometry).getNumInteriorRing();
}
@SqlNullable
@Description("Returns an array of interior rings of a polygon")
@ScalarFunction("ST_InteriorRings")
@SqlType("array(" + StandardTypes.GEOMETRY + ")")
- public static Block stInteriorRings(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Block stInteriorRings(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_InteriorRings", geometry, EnumSet.of(POLYGON));
if (geometry.isEmpty()) {
return null;
}
- OGCPolygon polygon = (OGCPolygon) geometry;
- BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, polygon.numInteriorRing());
- for (int i = 0; i < polygon.numInteriorRing(); i++) {
- GEOMETRY.writeSlice(blockBuilder, serialize(polygon.interiorRingN(i)));
+ Polygon polygon = (Polygon) geometry;
+ BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, polygon.getNumInteriorRing());
+ for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
+ GEOMETRY.writeObject(blockBuilder, polygon.getInteriorRingN(i));
}
return blockBuilder.build();
}
@@ -815,144 +791,244 @@ public static Block stInteriorRings(@SqlType(StandardTypes.GEOMETRY) Slice input
@Description("Returns the cardinality of the geometry collection")
@ScalarFunction("ST_NumGeometries")
@SqlType(INTEGER)
- public static long stNumGeometries(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static long stNumGeometries(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return 0;
}
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
+ GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
if (!type.isMultitype()) {
return 1;
}
- return ((OGCGeometryCollection) geometry).numGeometries();
+ return geometry.getNumGeometries();
}
@Description("Returns a geometry that represents the point set union of the input geometries.")
@ScalarFunction("ST_Union")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stUnion(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Geometry stUnion(@SqlType(StandardTypes.GEOMETRY) Geometry left, @SqlType(StandardTypes.GEOMETRY) Geometry right)
{
- return stUnion(ImmutableList.of(left, right));
+ return stUnionGeometries(ImmutableList.of(left, right));
}
@Description("Returns a geometry that represents the point set union of the input geometries.")
@ScalarFunction("geometry_union")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice geometryUnion(@SqlType("array(" + StandardTypes.GEOMETRY + ")") Block input)
+ public static Geometry geometryUnion(@SqlType("array(" + StandardTypes.GEOMETRY + ")") Block input)
{
- return stUnion(getGeometrySlicesFromBlock(input));
+ return stUnionGeometries(getGeometriesFromBlock(input));
}
- private static Slice stUnion(Iterable slices)
+ private static Geometry stUnionGeometries(Iterable inputGeometries)
{
- // The current state of Esri/geometry-api-java does not allow support for multiple dimensions being
- // fed to the union operator without dropping the lower dimensions:
- // https://github.com/Esri/geometry-api-java/issues/199
- // When operating over a collection of geometries, it is more efficient to reuse the same operator
- // for the entire operation. Therefore, split the inputs and operators by dimension, and then union
- // each dimension's result at the end.
- ListeningGeometryCursor[] cursorsByDimension = new ListeningGeometryCursor[NUMBER_OF_DIMENSIONS];
- GeometryCursor[] operatorsByDimension = new GeometryCursor[NUMBER_OF_DIMENSIONS];
+ List geometries = new ArrayList<>();
+ int expectedSrid = 0;
+ for (Geometry geometry : inputGeometries) {
+ // Ignore null inputs
+ if (geometry == null) {
+ continue;
+ }
+ // Validate and track SRID
+ int srid = geometry.getSRID();
+ if (expectedSrid == 0) {
+ expectedSrid = srid;
+ }
+ else if (srid != 0 && srid != expectedSrid) {
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT,
+ format("SRID mismatch: %d vs %d", expectedSrid, srid));
+ }
+ if (!geometry.isEmpty()) {
+ // Flatten geometry collections to get individual geometries
+ flattenGeometry(geometry, geometries);
+ }
+ }
- setAll(cursorsByDimension, i -> new ListeningGeometryCursor());
- setAll(operatorsByDimension, i -> OperatorUnion.local().execute(cursorsByDimension[i], null, null));
+ if (geometries.isEmpty()) {
+ // Return empty geometry collection instead of null for empty inputs
+ return GEOMETRY_FACTORY.createGeometryCollection();
+ }
- Iterator slicesIterator = slices.iterator();
- if (!slicesIterator.hasNext()) {
- return null;
+ // JTS UnaryUnionOp handles mixed dimensions properly
+ Geometry result = UnaryUnionOp.union(geometries);
+
+ // Post-process to match ESRI behavior:
+ // 1. Merge connected line segments
+ // 2. Reduce homogeneous geometry collections to Multi* types
+ result = postProcessUnion(result);
+
+ result.setSRID(expectedSrid);
+ return result;
+ }
+
+ /**
+ * Post-processes union result to match ESRI behavior:
+ * 1. Merge connected line segments
+ * 2. Reduce homogeneous geometry collections to Multi* types
+ */
+ private static Geometry postProcessUnion(Geometry geometry)
+ {
+ // Handle MultiLineString specially - merge connected lines
+ if (geometry instanceof MultiLineString mls) {
+ LineMerger lineMerger = new LineMerger();
+ lineMerger.add(mls);
+ @SuppressWarnings("unchecked")
+ Collection merged = lineMerger.getMergedLineStrings();
+ if (merged.size() == 1) {
+ return merged.iterator().next();
+ }
+ return GEOMETRY_FACTORY.createMultiLineString(merged.toArray(new LineString[0]));
}
- while (slicesIterator.hasNext()) {
- Slice slice = slicesIterator.next();
- // Ignore null inputs
- if (slice.getInput().available() == 0) {
- continue;
+
+ if (!(geometry instanceof GeometryCollection gc) ||
+ geometry instanceof MultiPoint ||
+ geometry instanceof MultiPolygon) {
+ return geometry;
+ }
+
+ List points = new ArrayList<>();
+ List lineStrings = new ArrayList<>();
+ List polygons = new ArrayList<>();
+ List others = new ArrayList<>();
+
+ for (int i = 0; i < gc.getNumGeometries(); i++) {
+ Geometry g = gc.getGeometryN(i);
+ if (g instanceof Point p) {
+ points.add(p);
+ }
+ else if (g instanceof LineString ls) {
+ lineStrings.add(ls);
+ }
+ else if (g instanceof Polygon p) {
+ polygons.add(p);
+ }
+ else if (g instanceof MultiLineString mls) {
+ for (int j = 0; j < mls.getNumGeometries(); j++) {
+ lineStrings.add((LineString) mls.getGeometryN(j));
+ }
+ }
+ else {
+ others.add(g);
}
+ }
- for (OGCGeometry geometry : flattenCollection(deserialize(slice))) {
- int dimension = geometry.dimension();
- cursorsByDimension[dimension].tick(geometry.getEsriGeometry());
- operatorsByDimension[dimension].tock();
+ List result = new ArrayList<>();
+
+ // Merge line strings and add to result
+ if (!lineStrings.isEmpty()) {
+ LineMerger lineMerger = new LineMerger();
+ lineStrings.forEach(lineMerger::add);
+ @SuppressWarnings("unchecked")
+ Collection merged = lineMerger.getMergedLineStrings();
+ if (merged.size() == 1) {
+ result.add(merged.iterator().next());
+ }
+ else if (merged.size() > 1) {
+ result.add(GEOMETRY_FACTORY.createMultiLineString(merged.toArray(new LineString[0])));
}
}
- List outputs = new ArrayList<>();
- for (GeometryCursor operator : operatorsByDimension) {
- OGCGeometry unionedGeometry = createFromEsriGeometry(operator.next(), null);
- if (unionedGeometry != null) {
- outputs.add(unionedGeometry);
+ // Reduce points to MultiPoint
+ if (!points.isEmpty()) {
+ if (points.size() == 1) {
+ result.add(points.get(0));
+ }
+ else {
+ result.add(GEOMETRY_FACTORY.createMultiPoint(points.toArray(new Point[0])));
}
}
- if (outputs.size() == 1) {
- return serialize(outputs.get(0));
+ // Reduce polygons to MultiPolygon
+ if (!polygons.isEmpty()) {
+ if (polygons.size() == 1) {
+ result.add(polygons.get(0));
+ }
+ else {
+ result.add(GEOMETRY_FACTORY.createMultiPolygon(polygons.toArray(new Polygon[0])));
+ }
+ }
+
+ // Add any other geometry types
+ result.addAll(others);
+
+ if (result.size() == 1) {
+ return result.get(0);
+ }
+ return GEOMETRY_FACTORY.createGeometryCollection(result.toArray(new Geometry[0]));
+ }
+
+ private static void flattenGeometry(Geometry geometry, List result)
+ {
+ if (geometry instanceof GeometryCollection gc) {
+ for (int i = 0; i < gc.getNumGeometries(); i++) {
+ flattenGeometry(gc.getGeometryN(i), result);
+ }
+ }
+ else if (!geometry.isEmpty()) {
+ result.add(geometry);
}
- return serialize(new OGCConcreteGeometryCollection(outputs, null).flattenAndRemoveOverlaps().reduceFromMulti());
}
@SqlNullable
@Description("Returns the geometry element at the specified index (indices started with 1)")
@ScalarFunction("ST_GeometryN")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stGeometryN(@SqlType(StandardTypes.GEOMETRY) Slice input, @SqlType(INTEGER) long index)
+ public static Geometry stGeometryN(@SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(INTEGER) long index)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return null;
}
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
+ GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
if (!type.isMultitype()) {
if (index == 1) {
- return input;
+ return geometry;
}
return null;
}
- OGCGeometryCollection geometryCollection = ((OGCGeometryCollection) geometry);
- if (index < 1 || index > geometryCollection.numGeometries()) {
+ if (index < 1 || index > geometry.getNumGeometries()) {
return null;
}
- OGCGeometry ogcGeometry = geometryCollection.geometryN((int) index - 1);
- return serialize(ogcGeometry);
+ Geometry result = geometry.getGeometryN((int) index - 1);
+ result.setSRID(geometry.getSRID());
+ return result;
}
@SqlNullable
@Description("Returns the vertex of a linestring at the specified index (indices started with 1) ")
@ScalarFunction("ST_PointN")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stPointN(@SqlType(StandardTypes.GEOMETRY) Slice input, @SqlType(INTEGER) long index)
+ public static Geometry stPointN(@SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(INTEGER) long index)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_PointN", geometry, EnumSet.of(LINE_STRING));
- OGCLineString linestring = (OGCLineString) geometry;
- if (index < 1 || index > linestring.numPoints()) {
+ LineString linestring = (LineString) geometry;
+ if (index < 1 || index > linestring.getNumPoints()) {
return null;
}
- return serialize(linestring.pointN(toIntExact(index) - 1));
+ Point result = linestring.getPointN(toIntExact(index) - 1);
+ result.setSRID(geometry.getSRID());
+ return result;
}
@SqlNullable
@Description("Returns an array of geometries in the specified collection")
@ScalarFunction("ST_Geometries")
@SqlType("array(" + StandardTypes.GEOMETRY + ")")
- public static Block stGeometries(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Block stGeometries(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return null;
}
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
+ GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
if (!type.isMultitype()) {
BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, 1);
- GEOMETRY.writeSlice(blockBuilder, serialize(geometry));
+ GEOMETRY.writeObject(blockBuilder, geometry);
return blockBuilder.build();
}
- OGCGeometryCollection collection = (OGCGeometryCollection) geometry;
- BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, collection.numGeometries());
- for (int i = 0; i < collection.numGeometries(); i++) {
- GEOMETRY.writeSlice(blockBuilder, serialize(collection.geometryN(i)));
+ BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, geometry.getNumGeometries());
+ for (int i = 0; i < geometry.getNumGeometries(); i++) {
+ GEOMETRY.writeObject(blockBuilder, geometry.getGeometryN(i));
}
return blockBuilder.build();
}
@@ -961,58 +1037,55 @@ public static Block stGeometries(@SqlType(StandardTypes.GEOMETRY) Slice input)
@Description("Returns the interior ring element at the specified index (indices start at 1)")
@ScalarFunction("ST_InteriorRingN")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stInteriorRingN(@SqlType(StandardTypes.GEOMETRY) Slice input, @SqlType(INTEGER) long index)
+ public static Geometry stInteriorRingN(@SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(INTEGER) long index)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_InteriorRingN", geometry, EnumSet.of(POLYGON));
- OGCPolygon polygon = (OGCPolygon) geometry;
- if (index < 1 || index > polygon.numInteriorRing()) {
+ Polygon polygon = (Polygon) geometry;
+ if (index < 1 || index > polygon.getNumInteriorRing()) {
return null;
}
- OGCGeometry interiorRing = polygon.interiorRingN(toIntExact(index) - 1);
- return serialize(interiorRing);
+ LinearRing ring = polygon.getInteriorRingN(toIntExact(index) - 1);
+ ring.setSRID(geometry.getSRID());
+ return linearRingToLineString(ring);
}
@Description("Returns the number of points in a Geometry")
@ScalarFunction("ST_NumPoints")
@SqlType(BIGINT)
- public static long stNumPoints(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static long stNumPoints(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return getPointCount(deserialize(input));
+ return geometry.getNumPoints();
}
@SqlNullable
@Description("Returns TRUE if and only if the line is closed and simple")
@ScalarFunction("ST_IsRing")
@SqlType(BOOLEAN)
- public static Boolean stIsRing(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Boolean stIsRing(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_IsRing", geometry, EnumSet.of(LINE_STRING));
- OGCLineString line = (OGCLineString) geometry;
- return line.isClosed() && line.isSimple();
+ return ((LineString) geometry).isRing();
}
@SqlNullable
@Description("Returns the first point of a LINESTRING geometry as a Point")
@ScalarFunction("ST_StartPoint")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stStartPoint(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Geometry stStartPoint(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_StartPoint", geometry, EnumSet.of(LINE_STRING));
if (geometry.isEmpty()) {
return null;
}
- MultiPath lines = (MultiPath) geometry.getEsriGeometry();
- SpatialReference reference = geometry.getEsriSpatialReference();
- return serialize(createFromEsriGeometry(lines.getPoint(0), reference));
+ Point result = ((LineString) geometry).getStartPoint();
+ result.setSRID(geometry.getSRID());
+ return result;
}
@Description("Returns a \"simplified\" version of the given geometry")
@ScalarFunction("simplify_geometry")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice simplifyGeometry(@SqlType(StandardTypes.GEOMETRY) Slice input, @SqlType(DOUBLE) double distanceTolerance)
+ public static Geometry simplifyGeometry(@SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(DOUBLE) double distanceTolerance)
{
if (isNaN(distanceTolerance)) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "distanceTolerance is NaN");
@@ -1023,35 +1096,35 @@ public static Slice simplifyGeometry(@SqlType(StandardTypes.GEOMETRY) Slice inpu
}
if (distanceTolerance == 0) {
- return input;
+ return geometry;
}
- return JtsGeometrySerde.serialize(simplify(JtsGeometrySerde.deserialize(input), distanceTolerance));
+ Geometry result = simplify(geometry, distanceTolerance);
+ result.setSRID(geometry.getSRID());
+ return result;
}
@SqlNullable
@Description("Returns the last point of a LINESTRING geometry as a Point")
@ScalarFunction("ST_EndPoint")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stEndPoint(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Geometry stEndPoint(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_EndPoint", geometry, EnumSet.of(LINE_STRING));
if (geometry.isEmpty()) {
return null;
}
- MultiPath lines = (MultiPath) geometry.getEsriGeometry();
- SpatialReference reference = geometry.getEsriSpatialReference();
- return serialize(createFromEsriGeometry(lines.getPoint(lines.getPointCount() - 1), reference));
+ Point result = ((LineString) geometry).getEndPoint();
+ result.setSRID(geometry.getSRID());
+ return result;
}
@SqlNullable
@Description("Returns an array of points in a geometry")
@ScalarFunction("ST_Points")
@SqlType("array(" + StandardTypes.GEOMETRY + ")")
- public static Block stPoints(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Block stPoints(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Geometry geometry = JtsGeometrySerde.deserialize(input);
validateType("ST_Points", geometry, VALID_TYPES_FOR_ST_POINTS);
if (geometry.isEmpty()) {
return null;
@@ -1067,10 +1140,10 @@ public static Block stPoints(@SqlType(StandardTypes.GEOMETRY) Slice input)
private static void buildPointsBlock(Geometry geometry, BlockBuilder blockBuilder)
{
GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
- if (type == GeometryType.POINT) {
- GEOMETRY.writeSlice(blockBuilder, JtsGeometrySerde.serialize(geometry));
+ if (type == POINT) {
+ GEOMETRY.writeObject(blockBuilder, geometry);
}
- else if (type == GeometryType.GEOMETRY_COLLECTION) {
+ else if (type == GEOMETRY_COLLECTION) {
GeometryCollection collection = (GeometryCollection) geometry;
for (int i = 0; i < collection.getNumGeometries(); i++) {
Geometry entry = collection.getGeometryN(i);
@@ -1081,7 +1154,7 @@ else if (type == GeometryType.GEOMETRY_COLLECTION) {
GeometryFactory geometryFactory = geometry.getFactory();
Coordinate[] vertices = geometry.getCoordinates();
for (Coordinate coordinate : vertices) {
- GEOMETRY.writeSlice(blockBuilder, JtsGeometrySerde.serialize(geometryFactory.createPoint(coordinate)));
+ GEOMETRY.writeObject(blockBuilder, geometryFactory.createPoint(coordinate));
}
}
}
@@ -1090,93 +1163,92 @@ else if (type == GeometryType.GEOMETRY_COLLECTION) {
@Description("Return the X coordinate of the point")
@ScalarFunction("ST_X")
@SqlType(DOUBLE)
- public static Double stX(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Double stX(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_X", geometry, EnumSet.of(POINT));
if (geometry.isEmpty()) {
return null;
}
- return ((OGCPoint) geometry).X();
+ return ((Point) geometry).getX();
}
@SqlNullable
@Description("Return the Y coordinate of the point")
@ScalarFunction("ST_Y")
@SqlType(DOUBLE)
- public static Double stY(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Double stY(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_Y", geometry, EnumSet.of(POINT));
if (geometry.isEmpty()) {
return null;
}
- return ((OGCPoint) geometry).Y();
+ return ((Point) geometry).getY();
}
@Description("Returns the closure of the combinatorial boundary of this Geometry")
@ScalarFunction("ST_Boundary")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stBoundary(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Geometry stBoundary(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
- if (geometry.isEmpty() && GeometryType.getForEsriGeometryType(geometry.geometryType()) == LINE_STRING) {
- // OCGGeometry#boundary crashes with NPE for LINESTRING EMPTY
- return EMPTY_MULTIPOINT;
+ Geometry result = geometry.getBoundary();
+ result.setSRID(geometry.getSRID());
+ // WKB format has no LinearRing type, convert to LineString for consistency
+ if (result instanceof LinearRing ring) {
+ return linearRingToLineString(ring);
}
- return serialize(geometry.boundary());
+ return result;
}
@Description("Returns the bounding rectangular polygon of a Geometry")
@ScalarFunction("ST_Envelope")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stEnvelope(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Geometry stEnvelope(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Envelope envelope = deserializeEnvelope(input);
- if (envelope.isEmpty()) {
- return EMPTY_POLYGON;
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
+ return GEOMETRY_FACTORY.createPolygon();
}
- return serialize(envelope);
+ Geometry envelopeGeometry = geometry.getFactory().toGeometry(envelope);
+ envelopeGeometry.setSRID(geometry.getSRID());
+ return envelopeGeometry;
}
@SqlNullable
@Description("Returns the lower left and upper right corners of bounding rectangular polygon of a Geometry")
@ScalarFunction("ST_EnvelopeAsPts")
@SqlType("array(" + StandardTypes.GEOMETRY + ")")
- public static Block stEnvelopeAsPts(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Block stEnvelopeAsPts(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Envelope envelope = deserializeEnvelope(input);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
return null;
}
BlockBuilder blockBuilder = GEOMETRY.createBlockBuilder(null, 2);
- Point lowerLeftCorner = new Point(envelope.getXMin(), envelope.getYMin());
- Point upperRightCorner = new Point(envelope.getXMax(), envelope.getYMax());
- GEOMETRY.writeSlice(blockBuilder, serialize(createFromEsriGeometry(lowerLeftCorner, null, false)));
- GEOMETRY.writeSlice(blockBuilder, serialize(createFromEsriGeometry(upperRightCorner, null, false)));
+ Point lowerLeftCorner = GEOMETRY_FACTORY.createPoint(new Coordinate(envelope.getMinX(), envelope.getMinY()));
+ Point upperRightCorner = GEOMETRY_FACTORY.createPoint(new Coordinate(envelope.getMaxX(), envelope.getMaxY()));
+ GEOMETRY.writeObject(blockBuilder, lowerLeftCorner);
+ GEOMETRY.writeObject(blockBuilder, upperRightCorner);
return blockBuilder.build();
}
@Description("Returns the Geometry value that represents the point set difference of two geometries")
@ScalarFunction("ST_Difference")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stDifference(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Geometry stDifference(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return serialize(leftGeometry.difference(rightGeometry));
+ // Use OverlayNGRobust for better handling of edge cases and invalid geometries
+ Geometry result = OverlayNGRobust.overlay(leftGeometry, rightGeometry, OverlayNG.DIFFERENCE);
+ result.setSRID(validateAndGetSrid(leftGeometry, rightGeometry));
+ return result;
}
@SqlNullable
@Description("Returns the 2-dimensional cartesian minimum distance (based on spatial ref) between two geometries in projected units")
@ScalarFunction("ST_Distance")
@SqlType(DOUBLE)
- public static Double stDistance(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Double stDistance(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
+ validateAndGetSrid(leftGeometry, rightGeometry);
return leftGeometry.isEmpty() || rightGeometry.isEmpty() ? null : leftGeometry.distance(rightGeometry);
}
@@ -1184,10 +1256,9 @@ public static Double stDistance(@SqlType(StandardTypes.GEOMETRY) Slice left, @Sq
@Description("Return the closest points on the two geometries")
@ScalarFunction("geometry_nearest_points")
@SqlType("row(" + StandardTypes.GEOMETRY + "," + StandardTypes.GEOMETRY + ")")
- public static SqlRow geometryNearestPoints(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static SqlRow geometryNearestPoints(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- Geometry leftGeometry = JtsGeometrySerde.deserialize(left);
- Geometry rightGeometry = JtsGeometrySerde.deserialize(right);
+ int srid = validateAndGetSrid(leftGeometry, rightGeometry);
if (leftGeometry.isEmpty() || rightGeometry.isEmpty()) {
return null;
}
@@ -1197,8 +1268,12 @@ public static SqlRow geometryNearestPoints(@SqlType(StandardTypes.GEOMETRY) Slic
Coordinate[] nearestCoordinates = DistanceOp.nearestPoints(leftGeometry, rightGeometry);
return buildRowValue(rowType, fieldBuilders -> {
- GEOMETRY.writeSlice(fieldBuilders.get(0), serialize(geometryFactory.createPoint(nearestCoordinates[0])));
- GEOMETRY.writeSlice(fieldBuilders.get(1), serialize(geometryFactory.createPoint(nearestCoordinates[1])));
+ Point point0 = geometryFactory.createPoint(nearestCoordinates[0]);
+ point0.setSRID(srid);
+ GEOMETRY.writeObject(fieldBuilders.get(0), point0);
+ Point point1 = geometryFactory.createPoint(nearestCoordinates[1]);
+ point1.setSRID(srid);
+ GEOMETRY.writeObject(fieldBuilders.get(1), point1);
});
}
@@ -1206,246 +1281,224 @@ public static SqlRow geometryNearestPoints(@SqlType(StandardTypes.GEOMETRY) Slic
@Description("Returns a line string representing the exterior ring of the POLYGON")
@ScalarFunction("ST_ExteriorRing")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stExteriorRing(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Geometry stExteriorRing(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
validateType("ST_ExteriorRing", geometry, EnumSet.of(POLYGON));
if (geometry.isEmpty()) {
return null;
}
- return serialize(((OGCPolygon) geometry).exteriorRing());
+ LinearRing ring = ((Polygon) geometry).getExteriorRing();
+ ring.setSRID(geometry.getSRID());
+ return linearRingToLineString(ring);
}
@Description("Returns the Geometry value that represents the point set intersection of two Geometries")
@ScalarFunction("ST_Intersection")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stIntersection(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Geometry stIntersection(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (deserializeType(left) == GeometrySerializationType.ENVELOPE && deserializeType(right) == GeometrySerializationType.ENVELOPE) {
- Envelope leftEnvelope = deserializeEnvelope(left);
- Envelope rightEnvelope = deserializeEnvelope(right);
-
- // Envelope#intersect updates leftEnvelope to the intersection of the two envelopes
- if (!leftEnvelope.intersect(rightEnvelope)) {
- return EMPTY_POLYGON;
- }
-
- Envelope intersection = leftEnvelope;
- if (intersection.getXMin() == intersection.getXMax()) {
- if (intersection.getYMin() == intersection.getYMax()) {
- return serialize(createFromEsriGeometry(new Point(intersection.getXMin(), intersection.getXMax()), null));
- }
- return serialize(createFromEsriGeometry(new Polyline(new Point(intersection.getXMin(), intersection.getYMin()), new Point(intersection.getXMin(), intersection.getYMax())), null));
- }
-
- if (intersection.getYMin() == intersection.getYMax()) {
- return serialize(createFromEsriGeometry(new Polyline(new Point(intersection.getXMin(), intersection.getYMin()), new Point(intersection.getXMax(), intersection.getYMin())), null));
- }
-
- return serialize(intersection);
- }
-
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return serialize(leftGeometry.intersection(rightGeometry));
+ Geometry result = OverlayNGRobust.overlay(leftGeometry, rightGeometry, OverlayNG.INTERSECTION);
+ result.setSRID(validateAndGetSrid(leftGeometry, rightGeometry));
+ return result;
}
@Description("Returns the Geometry value that represents the point set symmetric difference of two Geometries")
@ScalarFunction("ST_SymDifference")
@SqlType(StandardTypes.GEOMETRY)
- public static Slice stSymmetricDifference(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Geometry stSymmetricDifference(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
+ {
+ // Use OverlayNGRobust for better handling of edge cases and invalid geometries
+ Geometry result = OverlayNGRobust.overlay(leftGeometry, rightGeometry, OverlayNG.SYMDIFFERENCE);
+ result.setSRID(validateAndGetSrid(leftGeometry, rightGeometry));
+ return result;
+ }
+
+ /**
+ * Convert LinearRing to LineString.
+ * WKB format has no LinearRing type, so rings are always serialized as LineString.
+ * This helper ensures consistent behavior when returning ring geometries.
+ */
+ private static LineString linearRingToLineString(LinearRing ring)
{
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return serialize(leftGeometry.symDifference(rightGeometry));
+ LineString lineString = GEOMETRY_FACTORY.createLineString(ring.getCoordinateSequence());
+ lineString.setSRID(ring.getSRID());
+ return lineString;
}
@SqlNullable
@Description("Returns TRUE if and only if no points of right lie in the exterior of left, and at least one point of the interior of left lies in the interior of right")
@ScalarFunction("ST_Contains")
@SqlType(BOOLEAN)
- public static Boolean stContains(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stContains(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (!envelopes(left, right, Envelope::contains)) {
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ if (!leftGeometry.getEnvelopeInternal().contains(rightGeometry.getEnvelopeInternal())) {
return false;
}
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.contains(rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isContains();
}
@SqlNullable
@Description("Returns TRUE if the supplied geometries have some, but not all, interior points in common")
@ScalarFunction("ST_Crosses")
@SqlType(BOOLEAN)
- public static Boolean stCrosses(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stCrosses(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (!envelopes(left, right, Envelope::intersect)) {
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ if (!leftGeometry.getEnvelopeInternal().intersects(rightGeometry.getEnvelopeInternal())) {
return false;
}
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.crosses(rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isCrosses(leftGeometry.getDimension(), rightGeometry.getDimension());
}
@SqlNullable
@Description("Returns TRUE if the Geometries do not spatially intersect - if they do not share any space together")
@ScalarFunction("ST_Disjoint")
@SqlType(BOOLEAN)
- public static Boolean stDisjoint(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stDisjoint(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (!envelopes(left, right, Envelope::intersect)) {
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ if (!leftGeometry.getEnvelopeInternal().intersects(rightGeometry.getEnvelopeInternal())) {
return true;
}
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.disjoint(rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isDisjoint();
}
@SqlNullable
@Description("Returns TRUE if the given geometries represent the same geometry")
@ScalarFunction("ST_Equals")
@SqlType(BOOLEAN)
- public static Boolean stEquals(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stEquals(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.Equals(rightGeometry);
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isEquals(leftGeometry.getDimension(), rightGeometry.getDimension());
}
@SqlNullable
@Description("Returns TRUE if the Geometries spatially intersect in 2D - (share any portion of space) and FALSE if they don't (they are Disjoint)")
@ScalarFunction("ST_Intersects")
@SqlType(BOOLEAN)
- public static Boolean stIntersects(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stIntersects(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (!envelopes(left, right, Envelope::intersect)) {
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ if (!leftGeometry.getEnvelopeInternal().intersects(rightGeometry.getEnvelopeInternal())) {
return false;
}
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.intersects(rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isIntersects();
}
@SqlNullable
@Description("Returns TRUE if the Geometries share space, are of the same dimension, but are not completely contained by each other")
@ScalarFunction("ST_Overlaps")
@SqlType(BOOLEAN)
- public static Boolean stOverlaps(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stOverlaps(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (!envelopes(left, right, Envelope::intersect)) {
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ if (!leftGeometry.getEnvelopeInternal().intersects(rightGeometry.getEnvelopeInternal())) {
return false;
}
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.overlaps(rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isOverlaps(leftGeometry.getDimension(), rightGeometry.getDimension());
}
@SqlNullable
@Description("Returns TRUE if this Geometry is spatially related to another Geometry")
@ScalarFunction("ST_Relate")
@SqlType(BOOLEAN)
- public static Boolean stRelate(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right, @SqlType(VARCHAR) Slice relation)
+ public static Boolean stRelate(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry, @SqlType(VARCHAR) Slice relation)
{
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.relate(rightGeometry, relation.toStringUtf8());
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry, relation.toStringUtf8());
}
@SqlNullable
@Description("Returns TRUE if the geometries have at least one point in common, but their interiors do not intersect")
@ScalarFunction("ST_Touches")
@SqlType(BOOLEAN)
- public static Boolean stTouches(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stTouches(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (!envelopes(left, right, Envelope::intersect)) {
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ if (!leftGeometry.getEnvelopeInternal().intersects(rightGeometry.getEnvelopeInternal())) {
return false;
}
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.touches(rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isTouches(leftGeometry.getDimension(), rightGeometry.getDimension());
}
- @SuppressWarnings("ArgumentSelectionDefectChecker")
@SqlNullable
@Description("Returns TRUE if the geometry A is completely inside geometry B")
@ScalarFunction("ST_Within")
@SqlType(BOOLEAN)
- public static Boolean stWithin(@SqlType(StandardTypes.GEOMETRY) Slice left, @SqlType(StandardTypes.GEOMETRY) Slice right)
+ public static Boolean stWithin(@SqlType(StandardTypes.GEOMETRY) Geometry leftGeometry, @SqlType(StandardTypes.GEOMETRY) Geometry rightGeometry)
{
- if (!envelopes(right, left, Envelope::contains)) {
+ validateAndGetSrid(leftGeometry, rightGeometry);
+ if (!rightGeometry.getEnvelopeInternal().contains(leftGeometry.getEnvelopeInternal())) {
return false;
}
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
- verifySameSpatialReference(leftGeometry, rightGeometry);
- return leftGeometry.within(rightGeometry);
+ // Use RelateNG for better handling of edge cases and invalid geometries
+ return RelateNG.relate(leftGeometry, rightGeometry).isWithin();
}
@Description("Returns the type of the geometry")
@ScalarFunction("ST_GeometryType")
@SqlType(VARCHAR)
- public static Slice stGeometryType(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Slice stGeometryType(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return GeometrySerde.getGeometryType(input).standardName();
+ return GeometryType.getForJtsGeometryType(geometry.getGeometryType()).standardName();
}
@ScalarFunction
@SqlNullable
@Description("Returns an array of spatial partition IDs for a given geometry")
@SqlType("array(integer)")
- public static Block spatialPartitions(@SqlType(StandardTypes.KDB_TREE) Object kdbTree, @SqlType(StandardTypes.GEOMETRY) Slice geometry)
+ public static Block spatialPartitions(@SqlType(StandardTypes.KDB_TREE) Object kdbTree, @SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- Envelope envelope = deserializeEnvelope(geometry);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
// Empty geometry
return null;
}
- return spatialPartitions((KdbTree) kdbTree, new Rectangle(envelope.getXMin(), envelope.getYMin(), envelope.getXMax(), envelope.getYMax()));
+ return spatialPartitions((KdbTree) kdbTree, new Rectangle(envelope.getMinX(), envelope.getMinY(), envelope.getMaxX(), envelope.getMaxY()));
}
@ScalarFunction("from_geojson_geometry")
@Description("Returns a spherical geography from a GeoJSON string")
@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY)
- public static Slice fromGeoJsonGeometry(@SqlType(VARCHAR) Slice input)
+ public static Geometry fromGeoJsonGeometry(@SqlType(VARCHAR) Slice input)
{
- return serialize(jtsGeometryFromJson(input.toStringUtf8()));
+ return jtsGeometryFromJson(input.toStringUtf8());
}
@SqlNullable
@ScalarFunction("to_geojson_geometry")
@Description("Returns GeoJSON string based on the input spherical geography")
@SqlType(VARCHAR)
- public static Slice geographyToGeoJson(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Slice input)
+ public static Slice geographyToGeoJson(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Geometry geometry)
{
- return Slices.utf8Slice(jsonFromJtsGeometry(JtsGeometrySerde.deserialize(input)));
+ return utf8Slice(jsonFromJtsGeometry(geometry));
}
@SqlNullable
@ScalarFunction("to_geojson_geometry")
@Description("Returns GeoJSON string based on the input geometry")
@SqlType(VARCHAR)
- public static Slice geometryToGeoJson(@SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static Slice geometryToGeoJson(@SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- return Slices.utf8Slice(jsonFromJtsGeometry(JtsGeometrySerde.deserialize(input)));
+ return utf8Slice(jsonFromJtsGeometry(geometry));
}
@ScalarFunction
@SqlNullable
@Description("Returns an array of spatial partition IDs for a geometry representing a set of points within specified distance from the input geometry")
@SqlType("array(integer)")
- public static Block spatialPartitions(@SqlType(StandardTypes.KDB_TREE) Object kdbTree, @SqlType(StandardTypes.GEOMETRY) Slice geometry, @SqlType(DOUBLE) double distance)
+ public static Block spatialPartitions(@SqlType(StandardTypes.KDB_TREE) Object kdbTree, @SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(DOUBLE) double distance)
{
if (isNaN(distance)) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "distance is NaN");
@@ -1459,12 +1512,12 @@ public static Block spatialPartitions(@SqlType(StandardTypes.KDB_TREE) Object kd
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "distance is negative");
}
- Envelope envelope = deserializeEnvelope(geometry);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
return null;
}
- Rectangle expandedEnvelope2D = new Rectangle(envelope.getXMin() - distance, envelope.getYMin() - distance, envelope.getXMax() + distance, envelope.getYMax() + distance);
+ Rectangle expandedEnvelope2D = new Rectangle(envelope.getMinX() - distance, envelope.getMinY() - distance, envelope.getMaxX() + distance, envelope.getMaxY() + distance);
return spatialPartitions((KdbTree) kdbTree, expandedEnvelope2D);
}
@@ -1532,39 +1585,25 @@ public static double greatCircleDistance(
private static void checkLatitude(double latitude)
{
- if (Double.isNaN(latitude) || Double.isInfinite(latitude) || latitude < MIN_LATITUDE || latitude > MAX_LATITUDE) {
+ if (isNaN(latitude) || isInfinite(latitude) || latitude < MIN_LATITUDE || latitude > MAX_LATITUDE) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Latitude must be between -90 and 90");
}
}
private static void checkLongitude(double longitude)
{
- if (Double.isNaN(longitude) || Double.isInfinite(longitude) || longitude < MIN_LONGITUDE || longitude > MAX_LONGITUDE) {
+ if (isNaN(longitude) || isInfinite(longitude) || longitude < MIN_LONGITUDE || longitude > MAX_LONGITUDE) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Longitude must be between -180 and 180");
}
}
- private static OGCGeometry geometryFromText(Slice input)
+ private static Geometry geometryFromText(Slice input)
{
- OGCGeometry geometry;
try {
- geometry = OGCGeometry.fromText(input.toStringUtf8());
- }
- catch (IllegalArgumentException e) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Invalid WKT: " + input.toStringUtf8(), e);
- }
- geometry.setSpatialReference(null);
- return geometry;
- }
-
- private static Geometry geomFromBinary(Slice input)
- {
- requireNonNull(input, "input is null");
- try {
- return new WKBReader().read(input.getBytes());
+ return new WKTReader().read(input.toStringUtf8());
}
catch (ParseException e) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Invalid WKB", e);
+ throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Invalid WKT: " + input.toStringUtf8(), e);
}
}
@@ -1578,32 +1617,6 @@ private static Geometry geomFromKML(Slice input)
}
}
- private static ByteBuffer getShapeByteBuffer(Slice input)
- {
- int offset = HADOOP_SHAPE_SIZE_WKID + HADOOP_SHAPE_SIZE_TYPE;
- if (input.length() <= offset) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Hadoop shape input is too short");
- }
- return input.toByteBuffer(offset, input.length() - offset).slice().order(ByteOrder.LITTLE_ENDIAN);
- }
-
- private static int getWktExportFlags(Slice input)
- {
- byte hadoopShapeType = input.getByte(HADOOP_SHAPE_SIZE_WKID);
- if (hadoopShapeType < 0 || hadoopShapeType >= HADOOP_SHAPE_TYPES.length) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Invalid Hadoop shape type: " + hadoopShapeType);
- }
- return HADOOP_SHAPE_TYPES[hadoopShapeType];
- }
-
- private static void validateType(String function, OGCGeometry geometry, Set validTypes)
- {
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
- if (!validTypes.contains(type)) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("%s only applies to %s. Input type is: %s", function, OR_JOINER.join(validTypes), type));
- }
- }
-
private static void validateType(String function, Geometry geometry, Set validTypes)
{
GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
@@ -1612,34 +1625,37 @@ private static void validateType(String function, Geometry geometry, Set esriShapeType; // Unknown - accept any
+ case 1 -> 1; // Point -> Point
+ case 2 -> 3; // LineString -> Polyline
+ case 3 -> 5; // Polygon -> Polygon
+ case 4 -> 8; // MultiPoint -> MultiPoint
+ case 5 -> 3; // MultiLineString -> Polyline
+ case 6 -> 5; // MultiPolygon -> Polygon
+ default -> -1; // Invalid
+ };
- private static boolean envelopes(Slice left, Slice right, EnvelopesPredicate predicate)
- {
- Envelope leftEnvelope = deserializeEnvelope(left);
- Envelope rightEnvelope = deserializeEnvelope(right);
- if (leftEnvelope.isEmpty() || rightEnvelope.isEmpty()) {
- return false;
+ // Allow null shape (0) for any type as it represents empty geometry
+ if (esriShapeType != 0 && esriShapeType != expectedEsriType) {
+ throw new IllegalArgumentException("ESRI shape type " + esriShapeType + " does not match OGC type " + ogcType);
}
- return predicate.apply(leftEnvelope, rightEnvelope);
- }
-
- private interface EnvelopesPredicate
- {
- boolean apply(Envelope left, Envelope right);
}
@SqlNullable
@Description("Returns the great-circle distance in meters between two SphericalGeography points.")
@ScalarFunction("ST_Distance")
@SqlType(DOUBLE)
- public static Double stSphericalDistance(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Slice left, @SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Slice right)
+ public static Double stSphericalDistance(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Geometry leftGeometry, @SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Geometry rightGeometry)
{
- OGCGeometry leftGeometry = deserialize(left);
- OGCGeometry rightGeometry = deserialize(right);
if (leftGeometry.isEmpty() || rightGeometry.isEmpty()) {
return null;
}
@@ -1647,16 +1663,16 @@ public static Double stSphericalDistance(@SqlType(StandardTypes.SPHERICAL_GEOGRA
// TODO: support more SphericalGeography types.
validateSphericalType("ST_Distance", leftGeometry, EnumSet.of(POINT));
validateSphericalType("ST_Distance", rightGeometry, EnumSet.of(POINT));
- Point leftPoint = (Point) leftGeometry.getEsriGeometry();
- Point rightPoint = (Point) rightGeometry.getEsriGeometry();
+ Point leftPoint = (Point) leftGeometry;
+ Point rightPoint = (Point) rightGeometry;
// greatCircleDistance returns distance in KM.
return greatCircleDistance(leftPoint.getY(), leftPoint.getX(), rightPoint.getY(), rightPoint.getX()) * 1000;
}
- private static void validateSphericalType(String function, OGCGeometry geometry, Set validTypes)
+ private static void validateSphericalType(String function, Geometry geometry, Set validTypes)
{
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
+ GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType());
if (!validTypes.contains(type)) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("When applied to SphericalGeography inputs, %s only supports %s. Input type is: %s", function, OR_JOINER.join(validTypes), type));
}
@@ -1666,17 +1682,14 @@ private static void validateSphericalType(String function, OGCGeometry geometry,
@Description("Returns the area of a geometry on the Earth's surface using spherical model")
@ScalarFunction("ST_Area")
@SqlType(DOUBLE)
- public static Double stSphericalArea(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Slice input)
+ public static Double stSphericalArea(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY) Geometry geometry)
{
- OGCGeometry geometry = deserialize(input);
if (geometry.isEmpty()) {
return null;
}
validateSphericalType("ST_Area", geometry, EnumSet.of(POLYGON, MULTI_POLYGON));
- Polygon polygon = (Polygon) geometry.getEsriGeometry();
-
// See https://www.movable-type.co.uk/scripts/latlong.html
// and http://osgeo-org.1560.x6.nabble.com/Area-of-a-spherical-polygon-td3841625.html
// and https://www.element84.com/blog/determining-if-a-spherical-polygon-contains-a-pole
@@ -1684,10 +1697,18 @@ public static Double stSphericalArea(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY)
double sphericalExcess = 0.0;
- int numPaths = polygon.getPathCount();
- for (int i = 0; i < numPaths; i++) {
- double sign = polygon.isExteriorRing(i) ? 1.0 : -1.0;
- sphericalExcess += sign * Math.abs(computeSphericalExcess(polygon, polygon.getPathStart(i), polygon.getPathEnd(i)));
+ // Handle both Polygon and MultiPolygon
+ int numPolygons = geometry.getNumGeometries();
+ for (int p = 0; p < numPolygons; p++) {
+ Polygon polygon = (Polygon) geometry.getGeometryN(p);
+
+ // Exterior ring (positive contribution)
+ sphericalExcess += Math.abs(computeSphericalExcess(polygon.getExteriorRing().getCoordinates()));
+
+ // Interior rings (negative contribution - holes)
+ for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
+ sphericalExcess -= Math.abs(computeSphericalExcess(polygon.getInteriorRingN(i).getCoordinates()));
+ }
}
// Math.abs is required here because for Polygons with a 2D area of 0
@@ -1695,10 +1716,13 @@ public static Double stSphericalArea(@SqlType(StandardTypes.SPHERICAL_GEOGRAPHY)
return Math.abs(sphericalExcess * EARTH_RADIUS_M * EARTH_RADIUS_M);
}
- private static double computeSphericalExcess(Polygon polygon, int start, int end)
+ private static double computeSphericalExcess(Coordinate[] coordinates)
{
+ int end = coordinates.length;
+ int start = 0;
+
// Our calculations rely on not processing the same point twice
- if (polygon.getPoint(end - 1).equals(polygon.getPoint(start))) {
+ if (coordinates[end - 1].equals(coordinates[start])) {
end = end - 1;
}
@@ -1707,23 +1731,22 @@ private static double computeSphericalExcess(Polygon polygon, int start, int end
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "Polygon is not valid: a loop contains less then 3 vertices.");
}
- Point point = new Point();
// Initialize the calculator with the last point
- polygon.getPoint(end - 1, point);
+ Coordinate lastPoint = coordinates[end - 1];
double sphericalExcess = 0;
double courseDelta = 0;
boolean firstPoint = true;
double firstInitialBearing = 0;
double previousFinalBearing = 0;
- double previousPhi = toRadians(point.getY());
- double previousCos = Math.cos(previousPhi);
- double previousSin = Math.sin(previousPhi);
+ double previousPhi = toRadians(lastPoint.getY());
+ double previousCos = cos(previousPhi);
+ double previousSin = sin(previousPhi);
double previousTan = Math.tan(previousPhi / 2);
- double previousLongitude = toRadians(point.getX());
+ double previousLongitude = toRadians(lastPoint.getX());
for (int i = start; i < end; i++) {
- polygon.getPoint(i, point);
+ Coordinate point = coordinates[i];
double phi = toRadians(point.getY());
double tan = Math.tan(phi / 2);
double longitude = toRadians(point.getX());
@@ -1735,22 +1758,22 @@ private static double computeSphericalExcess(Polygon polygon, int start, int end
}
double deltaLongitude = longitude - previousLongitude;
- sphericalExcess += 2 * Math.atan2(Math.tan(deltaLongitude / 2) * (previousTan + tan), 1 + previousTan * tan);
+ sphericalExcess += 2 * atan2(Math.tan(deltaLongitude / 2) * (previousTan + tan), 1 + previousTan * tan);
- double cos = Math.cos(phi);
- double sin = Math.sin(phi);
- double sinOfDeltaLongitude = Math.sin(deltaLongitude);
- double cosOfDeltaLongitude = Math.cos(deltaLongitude);
+ double cos = cos(phi);
+ double sin = sin(phi);
+ double sinOfDeltaLongitude = sin(deltaLongitude);
+ double cosOfDeltaLongitude = cos(deltaLongitude);
// Initial bearing from previous to current
double y = sinOfDeltaLongitude * cos;
double x = previousCos * sin - previousSin * cos * cosOfDeltaLongitude;
- double initialBearing = (Math.atan2(y, x) + 2 * Math.PI) % (2 * Math.PI);
+ double initialBearing = (atan2(y, x) + 2 * PI) % (2 * PI);
// Final bearing from previous to current = opposite of bearing from current to previous
double finalY = -sinOfDeltaLongitude * previousCos;
double finalX = previousSin * cos - previousCos * sin * cosOfDeltaLongitude;
- double finalBearing = (Math.atan2(finalY, finalX) + PI) % (2 * Math.PI);
+ double finalBearing = (atan2(finalY, finalX) + PI) % (2 * PI);
// When processing our first point we don't yet have a previousFinalBearing
if (firstPoint) {
@@ -1760,10 +1783,10 @@ private static double computeSphericalExcess(Polygon polygon, int start, int end
firstPoint = false;
}
else {
- courseDelta += (initialBearing - previousFinalBearing + 3 * Math.PI) % (2 * Math.PI) - PI;
+ courseDelta += (initialBearing - previousFinalBearing + 3 * PI) % (2 * PI) - PI;
}
- courseDelta += (finalBearing - initialBearing + 3 * Math.PI) % (2 * Math.PI) - PI;
+ courseDelta += (finalBearing - initialBearing + 3 * PI) % (2 * PI) - PI;
previousFinalBearing = finalBearing;
previousCos = cos;
@@ -1774,18 +1797,18 @@ private static double computeSphericalExcess(Polygon polygon, int start, int end
}
// Now that we have our last final bearing, we can calculate the remaining course delta
- courseDelta += (firstInitialBearing - previousFinalBearing + 3 * Math.PI) % (2 * Math.PI) - PI;
+ courseDelta += (firstInitialBearing - previousFinalBearing + 3 * PI) % (2 * PI) - PI;
// The courseDelta should be 2Pi or - 2Pi, unless a pole is enclosed (and then it should be ~ 0)
// In which case we need to correct the spherical excess by 2Pi
if (Math.abs(courseDelta) < PI / 4) {
- sphericalExcess = Math.abs(sphericalExcess) - 2 * Math.PI;
+ sphericalExcess = Math.abs(sphericalExcess) - 2 * PI;
}
return sphericalExcess;
}
- private static Iterable getGeometrySlicesFromBlock(Block block)
+ private static Iterable getGeometriesFromBlock(Block block)
{
requireNonNull(block, "block is null");
return () -> new Iterator<>()
@@ -1799,62 +1822,17 @@ public boolean hasNext()
}
@Override
- public Slice next()
+ public Geometry next()
{
if (!hasNext()) {
- throw new NoSuchElementException("Slices have been consumed");
+ throw new NoSuchElementException("Geometries have been consumed");
}
- return GEOMETRY.getSlice(block, iteratorPosition++);
- }
- };
- }
-
- private static Iterable flattenCollection(OGCGeometry geometry)
- {
- if (geometry == null) {
- return ImmutableList.of();
- }
- if (!(geometry instanceof OGCConcreteGeometryCollection geometryCollection)) {
- return ImmutableList.of(geometry);
- }
- if (geometryCollection.numGeometries() == 0) {
- return ImmutableList.of();
- }
- return () -> new GeometryCollectionIterator(geometry);
- }
-
- private static class GeometryCollectionIterator
- implements Iterator
- {
- private final Deque geometriesDeque = new ArrayDeque<>();
-
- GeometryCollectionIterator(OGCGeometry geometries)
- {
- geometriesDeque.push(requireNonNull(geometries, "geometries is null"));
- }
-
- @Override
- public boolean hasNext()
- {
- if (geometriesDeque.isEmpty()) {
- return false;
- }
- while (geometriesDeque.peek() instanceof OGCConcreteGeometryCollection) {
- OGCGeometryCollection collection = (OGCGeometryCollection) geometriesDeque.pop();
- for (int i = 0; i < collection.numGeometries(); i++) {
- geometriesDeque.push(collection.geometryN(i));
+ if (block.isNull(iteratorPosition)) {
+ iteratorPosition++;
+ return null;
}
+ return (Geometry) GEOMETRY.getObject(block, iteratorPosition++);
}
- return !geometriesDeque.isEmpty();
- }
-
- @Override
- public OGCGeometry next()
- {
- if (!hasNext()) {
- throw new NoSuchElementException("Geometries have been consumed");
- }
- return geometriesDeque.pop();
- }
+ };
}
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeometryType.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeometryType.java
index 43d663a929e6..05e0eed78408 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeometryType.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/GeometryType.java
@@ -13,51 +13,17 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
-import io.airlift.slice.XxHash64;
-import io.trino.spi.block.Block;
-import io.trino.spi.block.BlockBuilder;
-import io.trino.spi.block.VariableWidthBlock;
-import io.trino.spi.block.VariableWidthBlockBuilder;
-import io.trino.spi.function.IsNull;
-import io.trino.spi.function.ScalarOperator;
-import io.trino.spi.type.AbstractVariableWidthType;
-import io.trino.spi.type.TypeOperatorDeclaration;
-import io.trino.spi.type.TypeOperators;
import io.trino.spi.type.TypeSignature;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-import static io.trino.spi.function.OperatorType.EQUAL;
-import static io.trino.spi.function.OperatorType.HASH_CODE;
-import static io.trino.spi.function.OperatorType.IDENTICAL;
-import static io.trino.spi.function.OperatorType.XX_HASH_64;
-
public class GeometryType
- extends AbstractVariableWidthType
+ extends AbstractGeometryType
{
public static final String NAME = "Geometry";
public static final GeometryType GEOMETRY = new GeometryType();
- private static final TypeOperatorDeclaration TYPE_OPERATOR_DECLARATION =
- TypeOperatorDeclaration.builder(Slice.class)
- .addOperators(DEFAULT_READ_OPERATORS)
- .addOperators(DEFAULT_COMPARABLE_OPERATORS)
- .build();
-
private GeometryType()
{
- super(new TypeSignature(NAME), Slice.class);
- }
-
- protected GeometryType(TypeSignature signature)
- {
- super(signature, Slice.class);
- }
-
- @Override
- public TypeOperatorDeclaration getTypeOperatorDeclaration(TypeOperators typeOperators)
- {
- return TYPE_OPERATOR_DECLARATION;
+ super(new TypeSignature(NAME));
}
@Override
@@ -65,79 +31,4 @@ public String getDisplayName()
{
return NAME;
}
-
- @Override
- public boolean isComparable()
- {
- return true;
- }
-
- @Override
- public Slice getSlice(Block block, int position)
- {
- VariableWidthBlock valueBlock = (VariableWidthBlock) block.getUnderlyingValueBlock();
- int valuePosition = block.getUnderlyingValuePosition(position);
- return valueBlock.getSlice(valuePosition);
- }
-
- @Override
- public void writeSlice(BlockBuilder blockBuilder, Slice value)
- {
- if (value == null) {
- blockBuilder.appendNull();
- return;
- }
- writeSlice(blockBuilder, value, 0, value.length());
- }
-
- @Override
- public void writeSlice(BlockBuilder blockBuilder, Slice value, int offset, int length)
- {
- if (value == null) {
- blockBuilder.appendNull();
- return;
- }
- ((VariableWidthBlockBuilder) blockBuilder).writeEntry(value, offset, length);
- }
-
- @Override
- public Object getObjectValue(Block block, int position)
- {
- if (block.isNull(position)) {
- return null;
- }
- try {
- return deserialize(getSlice(block, position)).asText();
- }
- catch (Exception e) {
- return "";
- }
- }
-
- @ScalarOperator(HASH_CODE)
- private static long hashCodeOperator(Slice value)
- {
- return value.hashCode();
- }
-
- @ScalarOperator(XX_HASH_64)
- private static long xxHash64Operator(Slice value)
- {
- return XxHash64.hash(value);
- }
-
- @ScalarOperator(EQUAL)
- private static boolean equalOperator(Slice left, Slice right)
- {
- return left.equals(right);
- }
-
- @ScalarOperator(IDENTICAL)
- private static boolean identical(Slice left, @IsNull boolean leftNull, Slice right, @IsNull boolean rightNull)
- {
- if (leftNull || rightNull) {
- return leftNull == rightNull;
- }
- return left.equals(right);
- }
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningAggregateFunction.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningAggregateFunction.java
index c123ccd8349e..fac1d97d8391 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningAggregateFunction.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningAggregateFunction.java
@@ -13,13 +13,13 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.function.AggregationFunction;
import io.trino.spi.function.InputFunction;
import io.trino.spi.function.OutputFunction;
import io.trino.spi.function.SqlType;
import io.trino.spi.type.StandardTypes;
+import org.locationtech.jts.geom.Geometry;
import static io.trino.plugin.geospatial.SpatialPartitioningAggregateFunction.NAME;
@@ -31,7 +31,7 @@ public final class SpatialPartitioningAggregateFunction
private SpatialPartitioningAggregateFunction() {}
@InputFunction
- public static void input(SpatialPartitioningState state, @SqlType(StandardTypes.GEOMETRY) Slice slice)
+ public static void input(SpatialPartitioningState state, @SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
throw new UnsupportedOperationException("spatial_partitioning(geometry) aggregate function should be re-written into spatial_partitioning(geometry, partitionCount)");
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningInternalAggregateFunction.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningInternalAggregateFunction.java
index 28dabb5a2660..4f215616b8c2 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningInternalAggregateFunction.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningInternalAggregateFunction.java
@@ -13,8 +13,6 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.Envelope;
-import io.airlift.slice.Slice;
import io.trino.geospatial.KdbTreeUtils;
import io.trino.geospatial.Rectangle;
import io.trino.spi.block.BlockBuilder;
@@ -23,13 +21,14 @@
import io.trino.spi.function.OutputFunction;
import io.trino.spi.function.SqlType;
import io.trino.spi.type.StandardTypes;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import static io.trino.geospatial.KdbTree.buildKdbTree;
-import static io.trino.geospatial.serde.GeometrySerde.deserializeEnvelope;
import static io.trino.plugin.geospatial.SpatialPartitioningAggregateFunction.NAME;
import static io.trino.spi.type.StandardTypes.INTEGER;
import static io.trino.spi.type.VarcharType.VARCHAR;
@@ -43,14 +42,14 @@ public final class SpatialPartitioningInternalAggregateFunction
private SpatialPartitioningInternalAggregateFunction() {}
@InputFunction
- public static void input(SpatialPartitioningState state, @SqlType(StandardTypes.GEOMETRY) Slice slice, @SqlType(INTEGER) long partitionCount)
+ public static void input(SpatialPartitioningState state, @SqlType(StandardTypes.GEOMETRY) Geometry geometry, @SqlType(INTEGER) long partitionCount)
{
- Envelope envelope = deserializeEnvelope(slice);
- if (envelope.isEmpty()) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ if (envelope.isNull()) {
return;
}
- Rectangle extent = new Rectangle(envelope.getXMin(), envelope.getYMin(), envelope.getXMax(), envelope.getYMax());
+ Rectangle extent = new Rectangle(envelope.getMinX(), envelope.getMinY(), envelope.getMaxX(), envelope.getMaxY());
if (state.getCount() == 0) {
state.setPartitionCount(toIntExact(partitionCount));
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningStateFactory.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningStateFactory.java
index 19443750a6fc..6e7ee50da5af 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningStateFactory.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SpatialPartitioningStateFactory.java
@@ -13,7 +13,6 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.Envelope;
import io.trino.array.IntBigArray;
import io.trino.array.LongBigArray;
import io.trino.array.ObjectBigArray;
@@ -46,7 +45,8 @@ public static final class GroupedSpatialPartitioningState
implements GroupedAccumulatorState, SpatialPartitioningState
{
private static final int INSTANCE_SIZE = instanceSize(GroupedSpatialPartitioningState.class);
- private static final int ENVELOPE_SIZE = toIntExact(new Envelope(1, 2, 3, 4).estimateMemorySize());
+ // Rectangle stores 4 doubles (xMin, yMin, xMax, yMax) plus object header
+ private static final int ENVELOPE_SIZE = toIntExact(instanceSize(Rectangle.class));
private long groupId;
private final IntBigArray partitionCounts = new IntBigArray();
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SphericalGeographyType.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SphericalGeographyType.java
index 004abae868f8..db0994037f2b 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SphericalGeographyType.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/SphericalGeographyType.java
@@ -13,45 +13,17 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
-import io.trino.spi.block.Block;
-import io.trino.spi.block.BlockBuilder;
-import io.trino.spi.block.VariableWidthBlock;
-import io.trino.spi.block.VariableWidthBlockBuilder;
-import io.trino.spi.type.AbstractVariableWidthType;
import io.trino.spi.type.TypeSignature;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-
public class SphericalGeographyType
- extends AbstractVariableWidthType
+ extends AbstractGeometryType
{
public static final String NAME = "SphericalGeography";
public static final SphericalGeographyType SPHERICAL_GEOGRAPHY = new SphericalGeographyType();
private SphericalGeographyType()
{
- super(new TypeSignature(NAME), Slice.class);
- }
-
- @Override
- public Slice getSlice(Block block, int position)
- {
- VariableWidthBlock valueBlock = (VariableWidthBlock) block.getUnderlyingValueBlock();
- int valuePosition = block.getUnderlyingValuePosition(position);
- return valueBlock.getSlice(valuePosition);
- }
-
- @Override
- public void writeSlice(BlockBuilder blockBuilder, Slice value)
- {
- writeSlice(blockBuilder, value, 0, value.length());
- }
-
- @Override
- public void writeSlice(BlockBuilder blockBuilder, Slice value, int offset, int length)
- {
- ((VariableWidthBlockBuilder) blockBuilder).writeEntry(value, offset, length);
+ super(new TypeSignature(NAME));
}
@Override
@@ -59,18 +31,4 @@ public String getDisplayName()
{
return NAME;
}
-
- @Override
- public Object getObjectValue(Block block, int position)
- {
- if (block.isNull(position)) {
- return null;
- }
- try {
- return deserialize(getSlice(block, position)).asText();
- }
- catch (Exception e) {
- return "";
- }
- }
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/ConvexHullAggregation.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/ConvexHullAggregation.java
index cf5168491e27..905db07b91a0 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/ConvexHullAggregation.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/ConvexHullAggregation.java
@@ -13,12 +13,6 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.google.common.base.Joiner;
-import io.airlift.slice.Slice;
-import io.trino.geospatial.GeometryType;
-import io.trino.geospatial.serde.GeometrySerde;
-import io.trino.spi.TrinoException;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.function.AggregationFunction;
import io.trino.spi.function.AggregationState;
@@ -28,12 +22,11 @@
import io.trino.spi.function.OutputFunction;
import io.trino.spi.function.SqlType;
import io.trino.spi.type.StandardTypes;
+import org.locationtech.jts.geom.Geometry;
-import java.util.Set;
-
+import static io.trino.geospatial.GeometryUtils.safeUnion;
+import static io.trino.geospatial.serde.JtsGeometrySerde.validateAndGetSrid;
import static io.trino.plugin.geospatial.GeometryType.GEOMETRY;
-import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
-import static java.lang.String.format;
/**
* Aggregate form of ST_ConvexHull, which takes a set of geometries and computes the convex hull
@@ -43,20 +36,27 @@
@AggregationFunction("convex_hull_agg")
public final class ConvexHullAggregation
{
- private static final Joiner OR_JOINER = Joiner.on(" or ");
-
private ConvexHullAggregation() {}
@InputFunction
public static void input(@AggregationState GeometryState state,
- @SqlType(StandardTypes.GEOMETRY) Slice input)
+ @SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = GeometrySerde.deserialize(input);
if (state.getGeometry() == null) {
- state.setGeometry(geometry.convexHull());
+ Geometry result = geometry.convexHull();
+ result.setSRID(geometry.getSRID());
+ state.setGeometry(result);
}
- else if (!geometry.isEmpty()) {
- state.setGeometry(state.getGeometry().union(geometry).convexHull());
+ else {
+ int srid = validateAndGetSrid(state.getGeometry(), geometry);
+ if (!geometry.isEmpty()) {
+ Geometry result = safeUnion(state.getGeometry(), geometry).convexHull();
+ result.setSRID(srid);
+ state.setGeometry(result);
+ }
+ else {
+ updateGeometrySrid(state, srid);
+ }
}
}
@@ -67,9 +67,29 @@ public static void combine(@AggregationState GeometryState state,
if (state.getGeometry() == null) {
state.setGeometry(otherState.getGeometry());
}
- else if (otherState.getGeometry() != null && !otherState.getGeometry().isEmpty()) {
- state.setGeometry(state.getGeometry().union(otherState.getGeometry()).convexHull());
+ else if (otherState.getGeometry() != null) {
+ int srid = validateAndGetSrid(state.getGeometry(), otherState.getGeometry());
+ if (!otherState.getGeometry().isEmpty()) {
+ Geometry result = safeUnion(state.getGeometry(), otherState.getGeometry()).convexHull();
+ result.setSRID(srid);
+ state.setGeometry(result);
+ }
+ else {
+ updateGeometrySrid(state, srid);
+ }
+ }
+ }
+
+ private static void updateGeometrySrid(GeometryState state, int srid)
+ {
+ Geometry geometry = state.getGeometry();
+ if (geometry.getSRID() == srid) {
+ return;
}
+
+ Geometry result = geometry.copy();
+ result.setSRID(srid);
+ state.setGeometry(result);
}
@OutputFunction(StandardTypes.GEOMETRY)
@@ -79,15 +99,7 @@ public static void output(@AggregationState GeometryState state, BlockBuilder ou
out.appendNull();
}
else {
- GEOMETRY.writeSlice(out, GeometrySerde.serialize(state.getGeometry()));
- }
- }
-
- private static void validateType(String function, OGCGeometry geometry, Set validTypes)
- {
- GeometryType type = GeometryType.getForEsriGeometryType(geometry.geometryType());
- if (!validTypes.contains(type)) {
- throw new TrinoException(INVALID_FUNCTION_ARGUMENT, format("%s only applies to %s. Input type is: %s", function, OR_JOINER.join(validTypes), type));
+ GEOMETRY.writeObject(out, state.getGeometry());
}
}
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryState.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryState.java
index f9d854dca006..08941165a679 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryState.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryState.java
@@ -13,15 +13,15 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import com.esri.core.geometry.ogc.OGCGeometry;
import io.trino.spi.function.AccumulatorState;
import io.trino.spi.function.AccumulatorStateMetadata;
+import org.locationtech.jts.geom.Geometry;
@AccumulatorStateMetadata(stateSerializerClass = GeometryStateSerializer.class, stateFactoryClass = GeometryStateFactory.class)
public interface GeometryState
extends AccumulatorState
{
- OGCGeometry getGeometry();
+ Geometry getGeometry();
- void setGeometry(OGCGeometry geometry);
+ void setGeometry(Geometry geometry);
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateFactory.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateFactory.java
index e802babd5602..8d8c9426cb9c 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateFactory.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateFactory.java
@@ -13,18 +13,16 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import com.esri.core.geometry.ogc.OGCGeometry;
import io.trino.array.ObjectBigArray;
import io.trino.spi.function.AccumulatorStateFactory;
import io.trino.spi.function.GroupedAccumulatorState;
+import org.locationtech.jts.geom.Geometry;
-import static io.airlift.slice.SizeOf.instanceSize;
+import static io.trino.geospatial.GeometryUtils.estimateMemorySize;
public class GeometryStateFactory
implements AccumulatorStateFactory
{
- private static final long OGC_GEOMETRY_BASE_INSTANCE_SIZE = instanceSize(OGCGeometry.class);
-
@Override
public GeometryState createSingleState()
{
@@ -40,23 +38,23 @@ public GeometryState createGroupedState()
public static class GroupedGeometryState
implements GeometryState, GroupedAccumulatorState
{
- private final ObjectBigArray geometries = new ObjectBigArray<>();
+ private final ObjectBigArray geometries = new ObjectBigArray<>();
private int groupId;
private long size;
@Override
- public OGCGeometry getGeometry()
+ public Geometry getGeometry()
{
return geometries.get(groupId);
}
@Override
- public void setGeometry(OGCGeometry geometry)
+ public void setGeometry(Geometry geometry)
{
- OGCGeometry previousValue = this.geometries.getAndSet(groupId, geometry);
- size -= getGeometryMemorySize(previousValue);
- size += getGeometryMemorySize(geometry);
+ Geometry previousValue = this.geometries.getAndSet(groupId, geometry);
+ size -= estimateMemorySize(previousValue);
+ size += estimateMemorySize(geometry);
}
@Override
@@ -78,35 +76,19 @@ public final void setGroupId(int groupId)
}
}
- // Do a best-effort attempt to estimate the memory size
- private static long getGeometryMemorySize(OGCGeometry geometry)
- {
- if (geometry == null) {
- return 0;
- }
- // Due to the following issue:
- // https://github.com/Esri/geometry-api-java/issues/192
- // We must check if the geometry is empty before calculating its size. Once the issue is resolved
- // and we bring the fix into our codebase, we can remove this check.
- if (geometry.isEmpty()) {
- return OGC_GEOMETRY_BASE_INSTANCE_SIZE;
- }
- return geometry.estimateMemorySize();
- }
-
public static class SingleGeometryState
implements GeometryState
{
- private OGCGeometry geometry;
+ private Geometry geometry;
@Override
- public OGCGeometry getGeometry()
+ public Geometry getGeometry()
{
return geometry;
}
@Override
- public void setGeometry(OGCGeometry geometry)
+ public void setGeometry(Geometry geometry)
{
this.geometry = geometry;
}
@@ -114,7 +96,7 @@ public void setGeometry(OGCGeometry geometry)
@Override
public long getEstimatedSize()
{
- return getGeometryMemorySize(geometry);
+ return estimateMemorySize(geometry);
}
}
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateSerializer.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateSerializer.java
index 3a6a80b63a85..bc65298fa3f5 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateSerializer.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryStateSerializer.java
@@ -13,7 +13,7 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import io.trino.geospatial.serde.GeometrySerde;
+import io.trino.geospatial.serde.JtsGeometrySerde;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.function.AccumulatorStateSerializer;
@@ -37,13 +37,13 @@ public void serialize(GeometryState state, BlockBuilder out)
out.appendNull();
}
else {
- GEOMETRY.writeSlice(out, GeometrySerde.serialize(state.getGeometry()));
+ GEOMETRY.writeSlice(out, JtsGeometrySerde.serialize(state.getGeometry()));
}
}
@Override
public void deserialize(Block block, int index, GeometryState state)
{
- state.setGeometry(GeometrySerde.deserialize(GEOMETRY.getSlice(block, index)));
+ state.setGeometry(JtsGeometrySerde.deserialize(GEOMETRY.getSlice(block, index)));
}
}
diff --git a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryUnionAgg.java b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryUnionAgg.java
index b8cb060b5be2..5c86ac87e23d 100644
--- a/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryUnionAgg.java
+++ b/plugin/trino-geospatial/src/main/java/io/trino/plugin/geospatial/aggregation/GeometryUnionAgg.java
@@ -13,9 +13,6 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import io.airlift.slice.Slice;
-import io.trino.geospatial.serde.GeometrySerde;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.function.AggregationFunction;
import io.trino.spi.function.AggregationState;
@@ -25,7 +22,10 @@
import io.trino.spi.function.OutputFunction;
import io.trino.spi.function.SqlType;
import io.trino.spi.type.StandardTypes;
+import org.locationtech.jts.geom.Geometry;
+import static io.trino.geospatial.GeometryUtils.safeUnion;
+import static io.trino.geospatial.serde.JtsGeometrySerde.validateAndGetSrid;
import static io.trino.plugin.geospatial.GeometryType.GEOMETRY;
/**
@@ -39,14 +39,21 @@ public final class GeometryUnionAgg
private GeometryUnionAgg() {}
@InputFunction
- public static void input(@AggregationState GeometryState state, @SqlType(StandardTypes.GEOMETRY) Slice input)
+ public static void input(@AggregationState GeometryState state, @SqlType(StandardTypes.GEOMETRY) Geometry geometry)
{
- OGCGeometry geometry = GeometrySerde.deserialize(input);
if (state.getGeometry() == null) {
state.setGeometry(geometry);
}
- else if (!geometry.isEmpty()) {
- state.setGeometry(state.getGeometry().union(geometry));
+ else {
+ int srid = validateAndGetSrid(state.getGeometry(), geometry);
+ if (!geometry.isEmpty()) {
+ Geometry result = safeUnion(state.getGeometry(), geometry);
+ result.setSRID(srid);
+ state.setGeometry(result);
+ }
+ else {
+ updateGeometrySrid(state, srid);
+ }
}
}
@@ -56,9 +63,29 @@ public static void combine(@AggregationState GeometryState state, @AggregationSt
if (state.getGeometry() == null) {
state.setGeometry(otherState.getGeometry());
}
- else if (otherState.getGeometry() != null && !otherState.getGeometry().isEmpty()) {
- state.setGeometry(state.getGeometry().union(otherState.getGeometry()));
+ else if (otherState.getGeometry() != null) {
+ int srid = validateAndGetSrid(state.getGeometry(), otherState.getGeometry());
+ if (!otherState.getGeometry().isEmpty()) {
+ Geometry result = safeUnion(state.getGeometry(), otherState.getGeometry());
+ result.setSRID(srid);
+ state.setGeometry(result);
+ }
+ else {
+ updateGeometrySrid(state, srid);
+ }
+ }
+ }
+
+ private static void updateGeometrySrid(GeometryState state, int srid)
+ {
+ Geometry geometry = state.getGeometry();
+ if (geometry.getSRID() == srid) {
+ return;
}
+
+ Geometry result = geometry.copy();
+ result.setSRID(srid);
+ state.setGeometry(result);
}
@OutputFunction(StandardTypes.GEOMETRY)
@@ -68,7 +95,7 @@ public static void output(@AggregationState GeometryState state, BlockBuilder ou
out.appendNull();
}
else {
- GEOMETRY.writeSlice(out, GeometrySerde.serialize(state.getGeometry()));
+ GEOMETRY.writeObject(out, state.getGeometry());
}
}
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkEnvelopeIntersection.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkEnvelopeIntersection.java
index 059dc9370f6f..0197b98954ee 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkEnvelopeIntersection.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkEnvelopeIntersection.java
@@ -13,8 +13,8 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Geometry;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@@ -30,7 +30,6 @@
import java.util.concurrent.TimeUnit;
import static io.airlift.slice.Slices.utf8Slice;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
import static io.trino.jmh.Benchmarks.benchmark;
import static io.trino.plugin.geospatial.GeoFunctions.stEnvelope;
import static io.trino.plugin.geospatial.GeoFunctions.stGeometryFromText;
@@ -46,13 +45,13 @@
public class BenchmarkEnvelopeIntersection
{
@Benchmark
- public Slice envelopes(BenchmarkData data)
+ public Geometry envelopes(BenchmarkData data)
{
return stIntersection(data.envelope, data.otherEnvelope);
}
@Benchmark
- public Slice geometries(BenchmarkData data)
+ public Geometry geometries(BenchmarkData data)
{
return stIntersection(data.geometry, data.otherGeometry);
}
@@ -60,11 +59,11 @@ public Slice geometries(BenchmarkData data)
@State(Scope.Thread)
public static class BenchmarkData
{
- private Slice envelope;
- private Slice otherEnvelope;
+ private Geometry envelope;
+ private Geometry otherEnvelope;
- private Slice geometry;
- private Slice otherGeometry;
+ private Geometry geometry;
+ private Geometry otherGeometry;
@Setup
public void setup()
@@ -82,7 +81,7 @@ public void validate()
BenchmarkData data = new BenchmarkData();
data.setup();
BenchmarkEnvelopeIntersection benchmark = new BenchmarkEnvelopeIntersection();
- assertThat(deserialize(benchmark.envelopes(data))).isEqualTo(deserialize(benchmark.geometries(data)));
+ assertThat(benchmark.envelopes(data).equalsTopo(benchmark.geometries(data))).isTrue();
}
static void main()
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkGeometryToBingTiles.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkGeometryToBingTiles.java
index 40ac40814957..9624fbb9a330 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkGeometryToBingTiles.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkGeometryToBingTiles.java
@@ -13,8 +13,8 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
+import org.locationtech.jts.geom.Geometry;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@@ -58,8 +58,8 @@ public Object envelopeToBingTiles(BenchmarkData data)
@State(Scope.Thread)
public static class BenchmarkData
{
- private Slice geometry;
- private Slice envelope;
+ private Geometry geometry;
+ private Geometry envelope;
private int zoomLevel;
@Setup
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTArea.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTArea.java
index 35a70f24f271..5a283203aaca 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTArea.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTArea.java
@@ -13,8 +13,8 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Geometry;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@@ -74,10 +74,10 @@ public Object stArea500k(BenchmarkData data)
@State(Scope.Thread)
public static class BenchmarkData
{
- private Slice geometry;
- private Slice geometry500k;
- private Slice geography;
- private Slice geography500k;
+ private Geometry geometry;
+ private Geometry geometry500k;
+ private Geometry geography;
+ private Geometry geography500k;
@Setup
public void setup()
@@ -109,7 +109,7 @@ public void verify()
assertThat(Math.round(1000 * (Double) benchmark.stSphericalArea(data) / 3.659E8)).isEqualTo(1000);
assertThat(Math.round(1000 * (Double) benchmark.stSphericalArea500k(data) / 38842273735.0)).isEqualTo(1000);
- assertThat(benchmark.stArea(data)).isEqualTo(0.05033099592771004);
+ assertThat(benchmark.stArea(data)).isEqualTo(0.05033099592771002);
assertThat(Math.round(1000 * (Double) benchmark.stArea500k(data) / Math.PI)).isEqualTo(1000);
}
@@ -123,7 +123,8 @@ private static String createPolygon(int vertexCount)
return Math.cos(angle) + " " + Math.sin(angle);
})
.collect(Collectors.joining(",")));
- builder.append("))");
+ // Close the polygon ring by repeating the first vertex
+ builder.append(", 1.0 0.0))");
return builder.toString();
}
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTContains.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTContains.java
index cdf949654694..aff78d314feb 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTContains.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTContains.java
@@ -13,10 +13,10 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@@ -32,8 +32,9 @@
import java.io.IOException;
import java.util.concurrent.TimeUnit;
-import static io.trino.geospatial.serde.GeometrySerde.deserialize;
-import static io.trino.geospatial.serde.GeometrySerde.deserializeEnvelope;
+import static io.trino.geospatial.serde.JtsGeometrySerde.deserialize;
+import static io.trino.geospatial.serde.JtsGeometrySerde.deserializeEnvelope;
+import static io.trino.geospatial.serde.JtsGeometrySerde.serialize;
import static io.trino.jmh.Benchmarks.benchmark;
import static io.trino.plugin.geospatial.GeometryBenchmarkUtils.loadPolygon;
@@ -66,13 +67,13 @@ public Object stContainsOuterPointNotInEnvelopeSimpleGeometry(BenchmarkData data
@Benchmark
public Object deserializeSimpleGeometry(BenchmarkData data)
{
- return deserialize(data.simpleGeometry);
+ return deserialize(data.simpleGeometrySerialized);
}
@Benchmark
public Object deserializeEnvelopeSimpleGeometry(BenchmarkData data)
{
- return deserializeEnvelope(data.simpleGeometry);
+ return deserializeEnvelope(data.simpleGeometrySerialized);
}
@Benchmark
@@ -84,7 +85,7 @@ public Object stContainsInnerPoint(BenchmarkData data)
@Benchmark
public Object stContainsInnerPointDeserialized(BenchmarkData data)
{
- return data.ogcGeometry.contains(data.innerOgcPoint);
+ return data.jtsGeometry.contains(data.innerJtsPoint);
}
@Benchmark
@@ -96,7 +97,7 @@ public Object stContainsOuterPointInEnvelope(BenchmarkData data)
@Benchmark
public Object stContainsOuterPointInEnvelopeDeserialized(BenchmarkData data)
{
- return data.ogcGeometry.contains(data.outerOgcPointInEnvelope);
+ return data.jtsGeometry.contains(data.outerJtsPointInEnvelope);
}
@Benchmark
@@ -108,48 +109,52 @@ public Object stContainsOuterPointNotInEnvelope(BenchmarkData data)
@Benchmark
public Object stContainsOuterPointNotInEnvelopeDeserialized(BenchmarkData data)
{
- return data.ogcGeometry.contains(data.outerOgcPointNotInEnvelope);
+ return data.jtsGeometry.contains(data.outerJtsPointNotInEnvelope);
}
@Benchmark
public Object benchmarkDeserialize(BenchmarkData data)
{
- return deserialize(data.geometry);
+ return deserialize(data.geometrySerialized);
}
@Benchmark
public Object benchmarkDeserializeEnvelope(BenchmarkData data)
{
- return deserializeEnvelope(data.geometry);
+ return deserializeEnvelope(data.geometrySerialized);
}
@State(Scope.Thread)
public static class BenchmarkData
{
- private Slice geometry;
- private Slice simpleGeometry;
- private Slice innerPoint;
- private Slice outerPointInEnvelope;
- private Slice outerPointNotInEnvelope;
- private OGCGeometry ogcGeometry;
- private OGCPoint innerOgcPoint;
- private OGCPoint outerOgcPointInEnvelope;
- private OGCPoint outerOgcPointNotInEnvelope;
+ private Geometry geometry;
+ private Slice geometrySerialized;
+ private Geometry simpleGeometry;
+ private Slice simpleGeometrySerialized;
+ private Geometry innerPoint;
+ private Geometry outerPointInEnvelope;
+ private Geometry outerPointNotInEnvelope;
+ private Geometry jtsGeometry;
+ private Point innerJtsPoint;
+ private Point outerJtsPointInEnvelope;
+ private Point outerJtsPointNotInEnvelope;
@Setup
public void setup()
throws IOException
{
geometry = GeoFunctions.stGeometryFromText(Slices.utf8Slice(loadPolygon("large_polygon.txt")));
- simpleGeometry = GeoFunctions.stGeometryFromText(Slices.utf8Slice("POLYGON ((16.5 54, 16.5 54.1, 16.8 54.1, 16.8 54))"));
+ geometrySerialized = serialize(geometry);
+ simpleGeometry = GeoFunctions.stGeometryFromText(Slices.utf8Slice("POLYGON ((16.5 54, 16.5 54.1, 16.8 54.1, 16.8 54, 16.5 54))"));
+ simpleGeometrySerialized = serialize(simpleGeometry);
innerPoint = GeoFunctions.stPoint(16.6, 54.0167);
outerPointInEnvelope = GeoFunctions.stPoint(16.6667, 54.05);
outerPointNotInEnvelope = GeoFunctions.stPoint(16.6333, 54.2);
- ogcGeometry = deserialize(geometry);
- innerOgcPoint = (OGCPoint) deserialize(innerPoint);
- outerOgcPointInEnvelope = (OGCPoint) deserialize(outerPointInEnvelope);
- outerOgcPointNotInEnvelope = (OGCPoint) deserialize(outerPointNotInEnvelope);
+ jtsGeometry = geometry;
+ innerJtsPoint = (Point) innerPoint;
+ outerJtsPointInEnvelope = (Point) outerPointInEnvelope;
+ outerJtsPointNotInEnvelope = (Point) outerPointNotInEnvelope;
}
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTEnvelope.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTEnvelope.java
index 075eb4f87e16..c3bd8e060db1 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTEnvelope.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTEnvelope.java
@@ -13,8 +13,8 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
+import org.locationtech.jts.geom.Geometry;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@@ -43,13 +43,13 @@
public class BenchmarkSTEnvelope
{
@Benchmark
- public Slice simpleGeometry(BenchmarkData data)
+ public Geometry simpleGeometry(BenchmarkData data)
{
return GeoFunctions.stEnvelope(data.simpleGeometry);
}
@Benchmark
- public Slice complexGeometry(BenchmarkData data)
+ public Geometry complexGeometry(BenchmarkData data)
{
return GeoFunctions.stEnvelope(data.complexGeometry);
}
@@ -57,15 +57,15 @@ public Slice complexGeometry(BenchmarkData data)
@State(Scope.Thread)
public static class BenchmarkData
{
- private Slice complexGeometry;
- private Slice simpleGeometry;
+ private Geometry complexGeometry;
+ private Geometry simpleGeometry;
@Setup
public void setup()
throws IOException
{
complexGeometry = GeoFunctions.stGeometryFromText(Slices.utf8Slice(loadPolygon("large_polygon.txt")));
- simpleGeometry = GeoFunctions.stGeometryFromText(Slices.utf8Slice("POLYGON ((1 1, 4 1, 1 4))"));
+ simpleGeometry = GeoFunctions.stGeometryFromText(Slices.utf8Slice("POLYGON ((1 1, 4 1, 1 4, 1 1))"));
}
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTIntersects.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTIntersects.java
index 5105c748b6c8..1ebd05e857f1 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTIntersects.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTIntersects.java
@@ -13,8 +13,8 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Geometry;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@@ -107,18 +107,18 @@ public void validateBenchmarkData()
@State(Scope.Thread)
public static class BenchmarkData
{
- private Slice simpleGeometry;
- private Slice geometry;
- private Slice innerLine;
- private Slice crossingLine;
- private Slice outerLineInEnvelope;
- private Slice outerLineNotInEnvelope;
+ private Geometry simpleGeometry;
+ private Geometry geometry;
+ private Geometry innerLine;
+ private Geometry crossingLine;
+ private Geometry outerLineInEnvelope;
+ private Geometry outerLineNotInEnvelope;
@Setup
public void setup()
throws IOException
{
- simpleGeometry = stGeometryFromText(utf8Slice("POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54))"));
+ simpleGeometry = stGeometryFromText(utf8Slice("POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54, 16.5 54))"));
geometry = stGeometryFromText(utf8Slice(loadPolygon("large_polygon.txt")));
innerLine = stGeometryFromText(utf8Slice("LINESTRING (16.6 54.0167, 16.6 54.017)"));
crossingLine = stGeometryFromText(utf8Slice("LINESTRING (16.6 53, 16.6 56)"));
@@ -132,9 +132,9 @@ public void validate()
validate(geometry);
}
- public void validate(Slice geometry)
+ public void validate(Geometry geometry)
{
- Slice envelope = stEnvelope(geometry);
+ Geometry envelope = stEnvelope(geometry);
// innerLine
verify(stIntersects(geometry, innerLine));
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTXMin.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTXMin.java
index 5363731da15a..3a82729aeecb 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTXMin.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/BenchmarkSTXMin.java
@@ -13,7 +13,7 @@
*/
package io.trino.plugin.geospatial;
-import io.airlift.slice.Slice;
+import org.locationtech.jts.geom.Geometry;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@@ -59,15 +59,15 @@ public double complexGeometry(BenchmarkData data)
@State(Scope.Thread)
public static class BenchmarkData
{
- private Slice complexGeometry;
- private Slice simpleGeometry;
+ private Geometry complexGeometry;
+ private Geometry simpleGeometry;
@Setup
public void setup()
throws IOException
{
complexGeometry = stGeometryFromText(utf8Slice(loadPolygon("large_polygon.txt")));
- simpleGeometry = stGeometryFromText(utf8Slice("POLYGON ((1 1, 4 1, 1 4))"));
+ simpleGeometry = stGeometryFromText(utf8Slice("POLYGON ((1 1, 4 1, 1 4, 1 1))"));
}
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/GeoTestUtils.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/GeoTestUtils.java
new file mode 100644
index 000000000000..83726c084a56
--- /dev/null
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/GeoTestUtils.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.geospatial;
+
+import io.trino.sql.query.QueryAssertions;
+
+import java.util.List;
+
+import static io.airlift.slice.Slices.utf8Slice;
+import static io.trino.plugin.geospatial.GeoFunctions.stEquals;
+import static io.trino.plugin.geospatial.GeoFunctions.stGeometryFromText;
+import static io.trino.plugin.geospatial.GeoFunctions.stGeometryType;
+import static io.trino.plugin.geospatial.GeoFunctions.stIsEmpty;
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Shared test utility methods for geospatial tests.
+ * These methods use ST_Equals for geometric comparison, which is insensitive
+ * to vertex ordering and starting point differences between geometry libraries.
+ */
+public final class GeoTestUtils
+{
+ private GeoTestUtils() {}
+
+ /**
+ * Check if two WKT strings represent spatially equal geometries.
+ */
+ public static boolean spatiallyEquals(String wkt1, String wkt2)
+ {
+ var geom1 = stGeometryFromText(utf8Slice(wkt1));
+ var geom2 = stGeometryFromText(utf8Slice(wkt2));
+
+ if (stIsEmpty(geom1) && stIsEmpty(geom2)) {
+ return stGeometryType(geom1).equals(stGeometryType(geom2));
+ }
+ return stEquals(geom1, geom2);
+ }
+
+ /**
+ * Assert that an expression evaluates to a geometry spatially equal to the expected WKT.
+ */
+ public static void assertSpatialEquals(QueryAssertions assertions, String actualExpression, String expectedWkt)
+ {
+ // Evaluate Actual to WKT
+ String actualWkt = (String) assertions.expression("ST_AsText(%s)".formatted(actualExpression))
+ .evaluate()
+ .value();
+
+ assertThat(actualWkt)
+ .withFailMessage("Actual geometry expression evaluated to NULL")
+ .isNotNull();
+
+ var expectedGeometry = stGeometryFromText(utf8Slice(expectedWkt));
+ var actualGeometry = stGeometryFromText(utf8Slice(actualWkt));
+ if (stIsEmpty(expectedGeometry)) {
+ assertThat(stIsEmpty(actualGeometry))
+ .withFailMessage("Expected empty geometry, but got: %s", actualWkt)
+ .isTrue();
+ assertThat(stGeometryType(actualGeometry))
+ .withFailMessage("Expected empty geometry type %s, but got %s (%s)", stGeometryType(expectedGeometry), stGeometryType(actualGeometry), actualWkt)
+ .isEqualTo(stGeometryType(expectedGeometry));
+ return;
+ }
+
+ assertThat(stEquals(expectedGeometry, actualGeometry))
+ .withFailMessage("Geometry mismatch!\nExpected: %s\nActual: %s", expectedWkt, actualWkt)
+ .isTrue();
+ }
+
+ public static void assertSpatialArrayEquals(QueryAssertions assertions, String actualExpression, String... expectedWkts)
+ {
+ // Evaluate Actual to WKT List
+ @SuppressWarnings("unchecked")
+ List actualWkts = (List) assertions.expression(
+ "transform(%s, g -> ST_AsText(g))".formatted(actualExpression))
+ .evaluate()
+ .value();
+
+ assertThat(actualWkts)
+ .describedAs("Mismatch in array size for expression: %s", actualExpression)
+ .isNotNull()
+ .hasSize(expectedWkts.length);
+
+ for (int i = 0; i < expectedWkts.length; i++) {
+ String actual = actualWkts.get(i);
+ String expected = expectedWkts[i];
+
+ if (expected == null) {
+ assertThat(actual)
+ .withFailMessage("Expected NULL geometry at array index %d, but got: %s", i, actual)
+ .isNull();
+ continue;
+ }
+
+ assertThat(actual)
+ .withFailMessage("Actual geometry at array index %d evaluated to NULL", i)
+ .isNotNull();
+
+ var actGeom = stGeometryFromText(utf8Slice(actual));
+ var expGeom = stGeometryFromText(utf8Slice(expected));
+
+ if (stIsEmpty(expGeom)) {
+ assertThat(stIsEmpty(actGeom))
+ .withFailMessage("Expected empty geometry at array index %d, but got: %s", i, actual)
+ .isTrue();
+ assertThat(stGeometryType(actGeom))
+ .withFailMessage("Expected empty geometry type %s at array index %d, but got %s (%s)", stGeometryType(expGeom), i, stGeometryType(actGeom), actual)
+ .isEqualTo(stGeometryType(expGeom));
+ continue;
+ }
+
+ assertThat(stEquals(actGeom, expGeom))
+ .withFailMessage("Geometry mismatch at array index %d.\nExpected: %s\nActual: %s", i, expected, actual)
+ .isTrue();
+ }
+ }
+}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestBingTileFunctions.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestBingTileFunctions.java
index 6cae222876ab..dd9c6d206651 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestBingTileFunctions.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestBingTileFunctions.java
@@ -35,6 +35,7 @@
import static io.trino.operator.scalar.ApplyFunction.APPLY_FUNCTION;
import static io.trino.plugin.geospatial.BingTile.fromCoordinates;
import static io.trino.plugin.geospatial.BingTileType.BING_TILE;
+import static io.trino.plugin.geospatial.GeoTestUtils.assertSpatialEquals;
import static io.trino.spi.function.OperatorType.EQUAL;
import static io.trino.spi.function.OperatorType.IDENTICAL;
import static io.trino.spi.type.TinyintType.TINYINT;
@@ -447,13 +448,12 @@ public void testBingTileZoomLevel()
@Test
public void testBingTilePolygon()
{
- assertThat(assertions.function("ST_AsText", "bing_tile_polygon(bing_tile('123030123010121'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((59.996337890625 30.11662158281937, 60.00732421875 30.11662158281937, 60.00732421875 30.12612436422458, 59.996337890625 30.12612436422458, 59.996337890625 30.11662158281937))");
+ assertSpatialEquals(assertions, "bing_tile_polygon(bing_tile('123030123010121'))",
+ "POLYGON ((59.996337890625 30.11662158281937, 60.00732421875 30.11662158281937, 60.00732421875 30.12612436422458, 59.996337890625 30.12612436422458, 59.996337890625 30.11662158281937))");
assertThat(assertions.function("ST_AsText", "ST_Centroid(bing_tile_polygon(bing_tile('123030123010121')))"))
.hasType(VARCHAR)
- .isEqualTo("POINT (60.0018310546875 30.121372973521975)");
+ .isEqualTo("POINT (60.0018310546875 30.12137297352197)");
// Check bottom right corner of a stack of tiles at different zoom levels
assertThat(assertions.function("ST_AsText", "apply(bing_tile_polygon(bing_tile(1, 1, 1)), g -> ST_Point(ST_XMax(g), ST_YMin(g)))"))
@@ -568,8 +568,8 @@ public void testGeometryToBingTiles()
assertGeometryToBingTiles("POINT (60 30.12)", 15, ImmutableList.of("123030123010121"));
assertGeometryToBingTiles("POINT (60 30.12)", 16, ImmutableList.of("1230301230101212"));
- assertGeometryToBingTiles("POLYGON ((0 0, 0 10, 10 10, 10 0))", 6, ImmutableList.of("122220", "122222", "122221", "122223"));
- assertGeometryToBingTiles("POLYGON ((0 0, 0 10, 10 10))", 6, ImmutableList.of("122220", "122222", "122221"));
+ assertGeometryToBingTiles("POLYGON ((0 0, 0 10, 10 10, 10 0, 0 0))", 6, ImmutableList.of("122220", "122222", "122221", "122223"));
+ assertGeometryToBingTiles("POLYGON ((0 0, 0 10, 10 10, 0 0))", 6, ImmutableList.of("122220", "122222", "122221"));
assertGeometryToBingTiles("POLYGON ((10 10, -10 10, -20 -15, 10 10))", 3, ImmutableList.of("033", "211", "122"));
assertGeometryToBingTiles("POLYGON ((10 10, -10 10, -20 -15, 10 10))", 6, ImmutableList.of("211102", "211120", "033321", "033323", "211101", "211103", "211121", "033330", "033332", "211110", "211112", "033331", "033333", "211111", "122220", "122222", "122221"));
@@ -609,7 +609,7 @@ public void testGeometryToBingTiles()
.hasMessage("Longitude span for the geometry must be in [-180.00, 180.00] range");
assertTrinoExceptionThrownBy(() -> assertions.expression("geometry_to_bing_tiles(geometry, zoom)")
- .binding("geometry", "ST_GeometryFromText('POLYGON ((1000 10, -10 10, -20 -15))')")
+ .binding("geometry", "ST_GeometryFromText('POLYGON ((1000 10, -10 10, -20 -15, 1000 10))')")
.binding("zoom", Integer.toString(10))
.evaluate())
.hasMessage("Longitude span for the geometry must be in [-180.00, 180.00] range");
@@ -622,7 +622,7 @@ public void testGeometryToBingTiles()
.hasMessage("Latitude span for the geometry must be in [-85.05, 85.05] range");
assertTrinoExceptionThrownBy(() -> assertions.expression("geometry_to_bing_tiles(geometry, zoom)")
- .binding("geometry", "ST_GeometryFromText('POLYGON ((10 1000, -10 10, -20 -15))')")
+ .binding("geometry", "ST_GeometryFromText('POLYGON ((10 1000, -10 10, -20 -15, 10 1000))')")
.binding("zoom", Integer.toString(10))
.evaluate())
.hasMessage("Latitude span for the geometry must be in [-85.05, 85.05] range");
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestEncodedPolylineFunctions.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestEncodedPolylineFunctions.java
index 805c07ff6a95..e0303331cdc9 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestEncodedPolylineFunctions.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestEncodedPolylineFunctions.java
@@ -62,6 +62,10 @@ public void testFromEncodedPolyline()
assertThat(assertions.function("from_encoded_polyline", "'_p~iF~ps|U_ulLnnqC_mqNvxq`@'"))
.hasType(GEOMETRY)
.matches("ST_GeometryFromText('LINESTRING (-120.2 38.5, -120.95 40.7, -126.45300000000002 43.252)')");
+
+ assertThat(assertions.expression("from_encoded_polyline(to_encoded_polyline(ST_GeometryFromText('MULTIPOINT (-120.2 38.5)')))"))
+ .hasType(GEOMETRY)
+ .matches("ST_GeometryFromText('LINESTRING EMPTY')");
}
@Test
@@ -98,7 +102,7 @@ public void testToEncodedPolyline()
assertTrinoExceptionThrownBy(assertions.expression("to_encoded_polyline(ST_GeometryFromText('POINT (-120.2 38.5)'))")::evaluate)
.hasErrorCode(INVALID_FUNCTION_ARGUMENT);
- assertTrinoExceptionThrownBy(assertions.expression("to_encoded_polyline(ST_GeometryFromText('MULTILINESTRING ((-122.39174 37.77701))'))")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.expression("to_encoded_polyline(ST_GeometryFromText('MULTILINESTRING ((-122.39174 37.77701, -122.39174 37.77701))'))")::evaluate)
.hasErrorCode(INVALID_FUNCTION_ARGUMENT);
}
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoFunctions.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoFunctions.java
index 8e37a6ff249e..46950d3000da 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoFunctions.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoFunctions.java
@@ -13,13 +13,10 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
import com.google.common.collect.ImmutableList;
import io.trino.geospatial.KdbTreeUtils;
import io.trino.geospatial.Rectangle;
-import io.trino.geospatial.serde.GeometrySerde;
+import io.trino.geospatial.serde.JtsGeometrySerde;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.type.ArrayType;
@@ -30,18 +27,26 @@
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.parallel.Execution;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
import java.util.Arrays;
import java.util.List;
+import java.util.Locale;
import java.util.stream.Collectors;
-import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.geospatial.KdbTree.buildKdbTree;
import static io.trino.plugin.geospatial.GeoFunctions.stCentroid;
+import static io.trino.plugin.geospatial.GeoTestUtils.assertSpatialArrayEquals;
+import static io.trino.plugin.geospatial.GeoTestUtils.assertSpatialEquals;
import static io.trino.plugin.geospatial.GeometryType.GEOMETRY;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.VarbinaryType.VARBINARY;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static io.trino.testing.assertions.TrinoExceptionAssert.assertTrinoExceptionThrownBy;
import static org.assertj.core.api.Assertions.assertThat;
@@ -133,7 +138,7 @@ private void assertSpatialPartitions(String kdbTreeJson, String wkt, double dist
public void testGeometryGetObjectValue()
{
BlockBuilder builder = GEOMETRY.createBlockBuilder(null, 1);
- GEOMETRY.writeSlice(builder, GeoFunctions.stPoint(1.2, 3.4));
+ GEOMETRY.writeSlice(builder, JtsGeometrySerde.serialize(GeoFunctions.stPoint(1.2, 3.4)));
Block block = builder.build();
assertThat("POINT (1.2 3.4)").isEqualTo(GEOMETRY.getObjectValue(block, 0));
@@ -165,20 +170,20 @@ public void testSTLineFromText()
assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_LineFromText('MULTILINESTRING EMPTY')")::evaluate)
.hasMessage("ST_LineFromText only applies to LINE_STRING. Input type is: MULTI_LINE_STRING");
- assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_LineFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_LineFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')")::evaluate)
.hasMessage("ST_LineFromText only applies to LINE_STRING. Input type is: POLYGON");
}
@Test
public void testSTPolygon()
{
- assertThat(assertions.function("ST_AsText", "ST_Polygon('POLYGON EMPTY')"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Polygon('POLYGON EMPTY')",
+ "POLYGON EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Polygon('POLYGON ((1 1, 1 4, 4 4, 4 1))')"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 1, 4 1, 4 4, 1 4, 1 1))");
+ assertSpatialEquals(assertions,
+ "ST_Polygon('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')",
+ "POLYGON ((1 1, 4 1, 4 4, 1 4, 1 1))");
assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_Polygon('LINESTRING (1 1, 2 2, 1 3)')")::evaluate)
.hasMessage("ST_Polygon only applies to POLYGON. Input type is: LINE_STRING");
@@ -187,7 +192,7 @@ public void testSTPolygon()
@Test
public void testSTArea()
{
- assertArea("POLYGON ((2 2, 2 6, 6 6, 6 2))", 16.0);
+ assertArea("POLYGON ((2 2, 2 6, 6 6, 6 2, 2 2))", 16.0);
assertArea("POLYGON EMPTY", 0.0);
assertArea("LINESTRING (1 4, 2 5)", 0.0);
assertArea("LINESTRING EMPTY", 0.0);
@@ -215,35 +220,34 @@ private void assertArea(String wkt, double expectedArea)
@Test
public void testSTBuffer()
{
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_Point(0, 0), 0.5)"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((0.5 0, 0.4989294616193014 0.03270156461507146, 0.49572243068690486 0.0652630961100257, 0.4903926402016149 0.09754516100806403, 0.4829629131445338 0.12940952255126026, 0.47346506474755257 0.16071973265158065, 0.46193976625564315 0.19134171618254472, 0.4484363707663439 0.22114434510950046, 0.43301270189221913 0.2499999999999998, 0.41573480615127245 0.2777851165098009, 0.39667667014561747 0.30438071450436016, 0.3759199037394886 0.32967290755003426, 0.3535533905932737 0.3535533905932736, 0.32967290755003437 0.3759199037394886, 0.3043807145043603 0.39667667014561747, 0.2777851165098011 0.4157348061512725, 0.24999999999999997 0.43301270189221924, 0.22114434510950062 0.4484363707663441, 0.19134171618254486 0.4619397662556433, 0.16071973265158077 0.4734650647475528, 0.12940952255126037 0.48296291314453416, 0.09754516100806412 0.4903926402016152, 0.06526309611002579 0.4957224306869052, 0.03270156461507153 0.49892946161930174, 0 0.5, -0.03270156461507146 0.4989294616193014, -0.0652630961100257 0.49572243068690486, -0.09754516100806403 0.4903926402016149, -0.12940952255126026 0.4829629131445338, -0.16071973265158065 0.47346506474755257, -0.19134171618254472 0.46193976625564315, -0.22114434510950046 0.4484363707663439, -0.2499999999999998 0.43301270189221913, -0.2777851165098009 0.41573480615127245, -0.30438071450436016 0.39667667014561747, -0.32967290755003426 0.3759199037394886, -0.3535533905932736 0.3535533905932737, -0.3759199037394886 0.32967290755003437, -0.39667667014561747 0.3043807145043603, -0.4157348061512725 0.2777851165098011, -0.43301270189221924 0.24999999999999997, -0.4484363707663441 0.22114434510950062, -0.4619397662556433 0.19134171618254486, -0.4734650647475528 0.16071973265158077, -0.48296291314453416 0.12940952255126037, -0.4903926402016152 0.09754516100806412, -0.4957224306869052 0.06526309611002579, -0.49892946161930174 0.03270156461507153, -0.5 0, -0.4989294616193014 -0.03270156461507146, -0.49572243068690486 -0.0652630961100257, -0.4903926402016149 -0.09754516100806403, -0.4829629131445338 -0.12940952255126026, -0.47346506474755257 -0.16071973265158065, -0.46193976625564315 -0.19134171618254472, -0.4484363707663439 -0.22114434510950046, -0.43301270189221913 -0.2499999999999998, -0.41573480615127245 -0.2777851165098009, -0.39667667014561747 -0.30438071450436016, -0.3759199037394886 -0.32967290755003426, -0.3535533905932737 -0.3535533905932736, -0.32967290755003437 -0.3759199037394886, -0.3043807145043603 -0.39667667014561747, -0.2777851165098011 -0.4157348061512725, -0.24999999999999997 -0.43301270189221924, -0.22114434510950062 -0.4484363707663441, -0.19134171618254486 -0.4619397662556433, -0.16071973265158077 -0.4734650647475528, -0.12940952255126037 -0.48296291314453416, -0.09754516100806412 -0.4903926402016152, -0.06526309611002579 -0.4957224306869052, -0.03270156461507153 -0.49892946161930174, 0 -0.5, 0.03270156461507146 -0.4989294616193014, 0.0652630961100257 -0.49572243068690486, 0.09754516100806403 -0.4903926402016149, 0.12940952255126026 -0.4829629131445338, 0.16071973265158065 -0.47346506474755257, 0.19134171618254472 -0.46193976625564315, 0.22114434510950046 -0.4484363707663439, 0.2499999999999998 -0.43301270189221913, 0.2777851165098009 -0.41573480615127245, 0.30438071450436016 -0.39667667014561747, 0.32967290755003426 -0.3759199037394886, 0.3535533905932736 -0.3535533905932737, 0.3759199037394886 -0.32967290755003437, 0.39667667014561747 -0.3043807145043603, 0.4157348061512725 -0.2777851165098011, 0.43301270189221924 -0.24999999999999997, 0.4484363707663441 -0.22114434510950062, 0.4619397662556433 -0.19134171618254486, 0.4734650647475528 -0.16071973265158077, 0.48296291314453416 -0.12940952255126037, 0.4903926402016152 -0.09754516100806412, 0.4957224306869052 -0.06526309611002579, 0.49892946161930174 -0.03270156461507153, 0.5 0))");
+ // ST_Buffer involves trigonometric calculations that produce slightly different
+ // floating-point results across CPU architectures (ARM vs x86). Instead of checking
+ // exact coordinates, we verify the area (which is stable across architectures).
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_LineFromText('LINESTRING (0 0, 1 1, 2 0.5)'), 0.2)"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((0 -0.19999999999999996, 0.013080625846028537 -0.19957178464772052, 0.02610523844401036 -0.19828897227476194, 0.03901806440322564 -0.19615705608064593, 0.05176380902050415 -0.1931851652578136, 0.06428789306063232 -0.18938602589902098, 0.07653668647301792 -0.18477590650225728, 0.0884577380438003 -0.17937454830653754, 0.09999999999999987 -0.17320508075688767, 0.11111404660392044 -0.166293922460509, 0.12175228580174413 -0.15867066805824703, 0.13186916302001372 -0.15036796149579545, 0.14142135623730945 -0.14142135623730945, 1.0394906098164265 0.7566478973418078, 1.9105572809000084 0.32111456180001685, 1.9115422619561997 0.32062545169346235, 1.923463313526982 0.31522409349774266, 1.9357121069393677 0.3106139741009789, 1.9482361909794959 0.3068148347421863, 1.9609819355967744 0.3038429439193539, 1.9738947615559896 0.30171102772523795, 1.9869193741539715 0.30042821535227926, 2 0.3, 2.0130806258460288 0.3004282153522794, 2.02610523844401 0.30171102772523806, 2.0390180644032254 0.30384294391935407, 2.051763809020504 0.30681483474218646, 2.0642878930606323 0.31061397410097896, 2.076536686473018 0.3152240934977427, 2.0884577380438003 0.32062545169346246, 2.1 0.3267949192431123, 2.1111140466039204 0.333706077539491, 2.121752285801744 0.34132933194175297, 2.1318691630200135 0.34963203850420455, 2.1414213562373092 0.35857864376269055, 2.1503679614957956 0.3681308369799863, 2.158670668058247 0.37824771419825587, 2.166293922460509 0.38888595339607956, 2.1732050807568877 0.4, 2.1793745483065377 0.41154226195619975, 2.1847759065022574 0.4234633135269821, 2.189386025899021 0.4357121069393677, 2.193185165257814 0.44823619097949585, 2.1961570560806463 0.46098193559677436, 2.1982889722747623 0.4738947615559897, 2.1995717846477207 0.4869193741539714, 2.2 0.5, 2.1995717846477207 0.5130806258460285, 2.198288972274762 0.5261052384440102, 2.196157056080646 0.5390180644032256, 2.1931851652578134 0.5517638090205041, 2.189386025899021 0.5642878930606323, 2.1847759065022574 0.5765366864730179, 2.1793745483065377 0.5884577380438002, 2.1732050807568877 0.5999999999999999, 2.166293922460509 0.6111140466039204, 2.158670668058247 0.6217522858017441, 2.1503679614957956 0.6318691630200137, 2.1414213562373097 0.6414213562373094, 2.131869163020014 0.6503679614957955, 2.121752285801744 0.658670668058247, 2.1111140466039204 0.666293922460509, 2.1 0.6732050807568877, 2.0894427190999916 0.6788854381999831, 1.0894427190999916 1.1788854381999831, 1.0884577380438003 1.1793745483065377, 1.076536686473018 1.1847759065022574, 1.0642878930606323 1.189386025899021, 1.0517638090205041 1.1931851652578138, 1.0390180644032256 1.196157056080646, 1.0261052384440104 1.198288972274762, 1.0130806258460288 1.1995717846477207, 1 1.2, 0.9869193741539715 1.1995717846477205, 0.9738947615559896 1.1982889722747618, 0.9609819355967744 1.1961570560806458, 0.9482361909794959 1.1931851652578136, 0.9357121069393677 1.189386025899021, 0.9234633135269821 1.1847759065022574, 0.9115422619561997 1.1793745483065377, 0.9000000000000001 1.1732050807568877, 0.8888859533960796 1.166293922460509, 0.8782477141982559 1.158670668058247, 0.8681308369799863 1.1503679614957956, 0.8585786437626906 1.1414213562373094, -0.14142135623730967 0.1414213562373095, -0.15036796149579557 0.13186916302001372, -0.1586706680582468 0.12175228580174413, -0.1662939224605089 0.11111404660392044, -0.17320508075688767 0.09999999999999998, -0.17937454830653765 0.08845773804380025, -0.1847759065022574 0.07653668647301792, -0.18938602589902098 0.06428789306063232, -0.19318516525781382 0.05176380902050415, -0.19615705608064626 0.03901806440322564, -0.19828897227476228 0.026105238444010304, -0.19957178464772074 0.013080625846028593, -0.20000000000000018 0, -0.19957178464772074 -0.013080625846028537, -0.19828897227476183 -0.026105238444010248, -0.19615705608064582 -0.03901806440322564, -0.19318516525781337 -0.05176380902050415, -0.18938602589902098 -0.06428789306063232, -0.1847759065022574 -0.07653668647301792, -0.17937454830653765 -0.0884577380438002, -0.17320508075688767 -0.09999999999999987, -0.1662939224605089 -0.11111404660392044, -0.1586706680582468 -0.12175228580174413, -0.15036796149579557 -0.13186916302001372, -0.14142135623730967 -0.14142135623730945, -0.13186916302001395 -0.15036796149579545, -0.12175228580174391 -0.15867066805824703, -0.11111404660392044 -0.166293922460509, -0.10000000000000009 -0.17320508075688767, -0.0884577380438003 -0.17937454830653765, -0.07653668647301792 -0.1847759065022574, -0.06428789306063232 -0.1893860258990211, -0.05176380902050415 -0.1931851652578137, -0.03901806440322586 -0.19615705608064604, -0.026105238444010137 -0.19828897227476205, -0.01308062584602876 -0.19957178464772074, 0 -0.19999999999999996))");
+ // Point buffer: area should be approximately pi * r^2 = pi * 0.25 ≈ 0.785
+ assertThat((Double) assertions.expression("ST_Area(ST_Buffer(ST_Point(0, 0), 0.5))")
+ .evaluate().value())
+ .isCloseTo(0.785, within(0.01));
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))'), 1.2)"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((-1.2 0, -1.1974307078863233 -0.0784837550761715, -1.1897338336485717 -0.15663143066406168, -1.1769423364838756 -0.23410838641935366, -1.1591109915468811 -0.3105828541230246, -1.1363161553941261 -0.38572735836379357, -1.1086554390135435 -0.4592201188381073, -1.0762472898392252 -0.530746428262801, -1.0392304845413258 -0.5999999999999995, -0.9977635347630538 -0.6666842796235222, -0.9520240083494819 -0.7305137148104643, -0.9022077689747725 -0.7912149781200822, -0.8485281374238568 -0.8485281374238567, -0.7912149781200825 -0.9022077689747725, -0.7305137148104647 -0.9520240083494819, -0.6666842796235226 -0.997763534763054, -0.5999999999999999 -1.039230484541326, -0.5307464282628015 -1.0762472898392257, -0.45922011883810765 -1.108655439013544, -0.38572735836379385 -1.1363161553941266, -0.3105828541230249 -1.159110991546882, -0.2341083864193539 -1.1769423364838765, -0.15663143066406188 -1.1897338336485723, -0.07848375507617167 -1.1974307078863242, 0 -1.2, 5 -1.2, 5.078483755076172 -1.1974307078863233, 5.156631430664062 -1.1897338336485717, 5.234108386419353 -1.1769423364838756, 5.310582854123025 -1.1591109915468811, 5.385727358363794 -1.1363161553941261, 5.4592201188381075 -1.1086554390135435, 5.530746428262801 -1.0762472898392252, 5.6 -1.0392304845413258, 5.666684279623523 -0.9977635347630538, 5.730513714810464 -0.9520240083494819, 5.791214978120082 -0.9022077689747725, 5.848528137423857 -0.8485281374238568, 5.9022077689747725 -0.7912149781200825, 5.952024008349482 -0.7305137148104647, 5.997763534763054 -0.6666842796235226, 6.039230484541326 -0.5999999999999999, 6.076247289839226 -0.5307464282628015, 6.108655439013544 -0.45922011883810765, 6.136316155394127 -0.38572735836379385, 6.159110991546882 -0.3105828541230249, 6.176942336483877 -0.2341083864193539, 6.189733833648573 -0.15663143066406188, 6.197430707886324 -0.07848375507617167, 6.2 0, 6.2 5, 6.1974307078863236 5.078483755076172, 6.189733833648572 5.156631430664062, 6.176942336483876 5.234108386419353, 6.159110991546881 5.310582854123025, 6.136316155394126 5.385727358363794, 6.1086554390135435 5.4592201188381075, 6.076247289839225 5.530746428262801, 6.039230484541326 5.6, 5.997763534763054 5.666684279623523, 5.952024008349482 5.730513714810464, 5.9022077689747725 5.791214978120082, 5.848528137423857 5.848528137423857, 5.791214978120083 5.9022077689747725, 5.730513714810464 5.952024008349482, 5.666684279623523 5.997763534763054, 5.6 6.039230484541326, 5.530746428262802 6.076247289839226, 5.4592201188381075 6.108655439013544, 5.385727358363794 6.136316155394127, 5.310582854123025 6.159110991546882, 5.234108386419354 6.176942336483877, 5.156631430664062 6.189733833648573, 5.078483755076172 6.197430707886324, 5 6.2, 0 6.2, -0.0784837550761715 6.1974307078863236, -0.15663143066406168 6.189733833648572, -0.23410838641935366 6.176942336483876, -0.3105828541230246 6.159110991546881, -0.38572735836379357 6.136316155394126, -0.4592201188381073 6.1086554390135435, -0.530746428262801 6.076247289839225, -0.5999999999999995 6.039230484541326, -0.6666842796235222 5.997763534763054, -0.7305137148104643 5.952024008349482, -0.7912149781200822 5.9022077689747725, -0.8485281374238567 5.848528137423857, -0.9022077689747725 5.791214978120083, -0.9520240083494819 5.730513714810464, -0.997763534763054 5.666684279623523, -1.039230484541326 5.6, -1.0762472898392257 5.530746428262802, -1.108655439013544 5.4592201188381075, -1.1363161553941266 5.385727358363794, -1.159110991546882 5.310582854123025, -1.1769423364838765 5.234108386419354, -1.1897338336485723 5.156631430664062, -1.1974307078863242 5.078483755076172, -1.2 5, -1.2 0))");
+ // LineString buffer: verify approximate area
+ assertThat((Double) assertions.expression("ST_Area(ST_Buffer(ST_LineFromText('LINESTRING (0 0, 1 1, 2 0.5)'), 0.2))")
+ .evaluate().value())
+ .isCloseTo(1.13, within(0.05));
- // zero distance
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_Point(0, 0), 0)"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (0 0)");
-
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_LineFromText('LINESTRING (0 0, 1 1, 2 0.5)'), 0)"))
- .hasType(VARCHAR)
- .isEqualTo("LINESTRING (0 0, 1 1, 2 0.5)");
+ // Polygon buffer: area should be approximately (5+2*1.2)^2 with rounded corners ≈ 53.5
+ assertThat((Double) assertions.expression("ST_Area(ST_Buffer(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))'), 1.2))")
+ .evaluate().value())
+ .isCloseTo(53.5, within(0.5));
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))'), 0)"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((0 0, 5 0, 5 5, 0 5, 0 0))");
+ // zero distance
+ assertSpatialEquals(assertions, "ST_Buffer(ST_Point(0, 0), 0)", "POINT (0 0)");
+ assertSpatialEquals(assertions, "ST_Buffer(ST_LineFromText('LINESTRING (0 0, 1 1, 2 0.5)'), 0)", "LINESTRING (0 0, 1 1, 2 0.5)");
+ assertSpatialEquals(assertions, "ST_Buffer(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))'), 0)", "POLYGON ((0 0, 5 0, 5 5, 0 5, 0 0))");
- // geometry collection
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_Intersection(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')), 0.2)"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOLYGON (((5 0.8, 5.013080625846029 0.8004282153522794, 5.026105238444011 0.801711027725238, 5.039018064403225 0.803842943919354, 5.051763809020504 0.8068148347421864, 5.064287893060633 0.8106139741009789, 5.076536686473018 0.8152240934977427, 5.0884577380438 0.8206254516934623, 5.1 0.8267949192431123, 5.11111404660392 0.833706077539491, 5.121752285801744 0.841329331941753, 5.1318691630200135 0.8496320385042045, 5.141421356237309 0.8585786437626906, 5.150367961495795 0.8681308369799863, 5.158670668058247 0.8782477141982559, 5.166293922460509 0.8888859533960796, 5.173205080756888 0.9, 5.179374548306538 0.9115422619561997, 5.184775906502257 0.9234633135269821, 5.189386025899021 0.9357121069393677, 5.193185165257813 0.9482361909794959, 5.196157056080646 0.9609819355967744, 5.198288972274762 0.9738947615559896, 5.199571784647721 0.9869193741539714, 5.2 1, 5.199571784647721 1.0130806258460288, 5.198288972274762 1.0261052384440104, 5.196157056080646 1.0390180644032256, 5.193185165257813 1.0517638090205041, 5.189386025899021 1.0642878930606323, 5.184775906502257 1.076536686473018, 5.179374548306537 1.0884577380438003, 5.173205080756888 1.1, 5.166293922460509 1.1111140466039204, 5.158670668058247 1.1217522858017441, 5.150367961495795 1.1318691630200137, 5.141421356237309 1.1414213562373094, 5.1318691630200135 1.1503679614957956, 5.121752285801744 1.158670668058247, 5.11111404660392 1.1662939224605091, 5.1 1.1732050807568877, 5.0884577380438 1.1793745483065377, 5.076536686473018 1.1847759065022574, 5.064287893060632 1.1893860258990212, 5.051763809020504 1.1931851652578138, 5.039018064403225 1.196157056080646, 5.026105238444011 1.198288972274762, 5.013080625846029 1.1995717846477207, 5 1.2, 4.986919374153971 1.1995717846477207, 4.973894761555989 1.198288972274762, 4.960981935596775 1.196157056080646, 4.948236190979496 1.1931851652578136, 4.935712106939367 1.189386025899021, 4.923463313526982 1.1847759065022574, 4.9115422619562 1.1793745483065377, 4.9 1.1732050807568877, 4.88888595339608 1.166293922460509, 4.878247714198256 1.158670668058247, 4.8681308369799865 1.1503679614957956, 4.858578643762691 1.1414213562373094, 4.849632038504205 1.1318691630200137, 4.841329331941753 1.1217522858017441, 4.833706077539491 1.1111140466039204, 4.826794919243112 1.1, 4.820625451693462 1.0884577380438003, 4.815224093497743 1.076536686473018, 4.810613974100979 1.0642878930606323, 4.806814834742187 1.0517638090205041, 4.803842943919354 1.0390180644032256, 4.801711027725238 1.0261052384440104, 4.800428215352279 1.0130806258460285, 4.8 1, 4.800428215352279 0.9869193741539714, 4.801711027725238 0.9738947615559896, 4.803842943919354 0.9609819355967743, 4.806814834742187 0.9482361909794959, 4.810613974100979 0.9357121069393677, 4.815224093497743 0.923463313526982, 4.820625451693463 0.9115422619561997, 4.826794919243112 0.8999999999999999, 4.833706077539491 0.8888859533960796, 4.841329331941753 0.8782477141982559, 4.849632038504205 0.8681308369799862, 4.858578643762691 0.8585786437626904, 4.8681308369799865 0.8496320385042044, 4.878247714198256 0.841329331941753, 4.88888595339608 0.8337060775394909, 4.9 0.8267949192431122, 4.9115422619562 0.8206254516934623, 4.923463313526982 0.8152240934977426, 4.935712106939368 0.8106139741009788, 4.948236190979496 0.8068148347421863, 4.960981935596775 0.8038429439193538, 4.973894761555989 0.801711027725238, 4.986919374153971 0.8004282153522793, 5 0.8)), ((3 3.8, 4 3.8, 4.013080625846029 3.8004282153522793, 4.026105238444011 3.801711027725238, 4.039018064403225 3.803842943919354, 4.051763809020504 3.8068148347421866, 4.064287893060632 3.810613974100979, 4.076536686473018 3.8152240934977426, 4.0884577380438 3.8206254516934623, 4.1 3.8267949192431123, 4.11111404660392 3.833706077539491, 4.121752285801744 3.841329331941753, 4.1318691630200135 3.8496320385042044, 4.141421356237309 3.8585786437626903, 4.150367961495795 3.868130836979986, 4.158670668058247 3.878247714198256, 4.166293922460509 3.8888859533960796, 4.173205080756888 3.9, 4.179374548306537 3.9115422619561997, 4.184775906502257 3.923463313526982, 4.189386025899021 3.9357121069393677, 4.193185165257813 3.948236190979496, 4.196157056080646 3.960981935596774, 4.198288972274762 3.97389476155599, 4.199571784647721 3.9869193741539712, 4.2 4, 4.199571784647721 4.013080625846029, 4.198288972274762 4.026105238444011, 4.196157056080646 4.039018064403225, 4.193185165257813 4.051763809020504, 4.189386025899021 4.064287893060632, 4.184775906502257 4.076536686473018, 4.179374548306537 4.0884577380438, 4.173205080756888 4.1, 4.166293922460509 4.11111404660392, 4.158670668058247 4.121752285801744, 4.150367961495795 4.1318691630200135, 4.141421356237309 4.141421356237309, 4.1318691630200135 4.150367961495795, 4.121752285801744 4.158670668058247, 4.11111404660392 4.166293922460509, 4.1 4.173205080756888, 4.0884577380438 4.179374548306537, 4.076536686473018 4.184775906502257, 4.064287893060632 4.189386025899021, 4.051763809020504 4.193185165257813, 4.039018064403225 4.196157056080646, 4.026105238444011 4.198288972274762, 4.013080625846029 4.199571784647721, 4 4.2, 3 4.2, 2.9869193741539712 4.199571784647721, 2.9738947615559894 4.198288972274762, 2.9609819355967746 4.196157056080646, 2.948236190979496 4.193185165257813, 2.9357121069393677 4.189386025899021, 2.923463313526982 4.184775906502257, 2.9115422619561997 4.179374548306537, 2.9000000000000004 4.173205080756888, 2.8888859533960796 4.166293922460509, 2.878247714198256 4.158670668058247, 2.8681308369799865 4.150367961495795, 2.8585786437626908 4.141421356237309, 2.8496320385042044 4.1318691630200135, 2.841329331941753 4.121752285801744, 2.833706077539491 4.11111404660392, 2.8267949192431123 4.1, 2.8206254516934623 4.0884577380438, 2.8152240934977426 4.076536686473018, 2.8106139741009786 4.064287893060632, 2.8068148347421866 4.051763809020504, 2.8038429439193537 4.039018064403225, 2.801711027725238 4.026105238444011, 2.8004282153522793 4.013080625846029, 2.8 4, 2.8004282153522793 3.9869193741539712, 2.801711027725238 3.97389476155599, 2.8038429439193537 3.9609819355967746, 2.8068148347421866 3.948236190979496, 2.810613974100979 3.9357121069393677, 2.8152240934977426 3.923463313526982, 2.8206254516934623 3.9115422619561997, 2.8267949192431123 3.9, 2.833706077539491 3.8888859533960796, 2.841329331941753 3.878247714198256, 2.8496320385042044 3.8681308369799865, 2.8585786437626908 3.8585786437626908, 2.8681308369799865 3.8496320385042044, 2.878247714198256 3.841329331941753, 2.8888859533960796 3.833706077539491, 2.9 3.8267949192431123, 2.9115422619561997 3.8206254516934623, 2.923463313526982 3.8152240934977426, 2.9357121069393677 3.810613974100979, 2.948236190979496 3.806814834742186, 2.9609819355967746 3.8038429439193537, 2.9738947615559894 3.8017110277252377, 2.9869193741539712 3.8004282153522793, 3 3.8)))");
+ // geometry collection buffer: verify area is positive (intersection produces points at (5, 1) and (4, 4))
+ assertThat((Double) assertions.expression("ST_Area(ST_Buffer(ST_Intersection(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')), 0.2))")
+ .evaluate().value())
+ .isGreaterThan(0.0);
// empty geometry
assertThat(assertions.function("ST_Buffer", "ST_GeometryFromText('POINT EMPTY')", "1"))
@@ -257,92 +261,92 @@ public void testSTBuffer()
.hasMessage("distance is negative");
// infinity() and nan() distance
- assertThat(assertions.function("ST_AsText", "ST_Buffer(ST_Point(0, 0), infinity())"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOLYGON EMPTY");
+ assertSpatialEquals(assertions, "ST_Buffer(ST_Point(0, 0), infinity())", "POLYGON EMPTY");
assertTrinoExceptionThrownBy(assertions.function("ST_Buffer", "ST_Point(0, 0)", "nan()")::evaluate)
.hasMessage("distance is NaN");
- // For small polygons, there was a bug in ESRI that throw an NPE. This
- // was fixed (https://github.com/Esri/geometry-api-java/pull/243) to
- // return an empty geometry instead. Ideally, these would return
- // something approximately like `ST_Buffer(ST_Centroid(geometry))`.
+ // JTS correctly returns non-empty buffers for small polygons (unlike ESRI which returned empty)
assertThat(assertions.function("ST_IsEmpty", "ST_Buffer(ST_Buffer(ST_Point(177.50102959662, 64.726807421691), 0.0000000001), 0.00005)"))
.hasType(BOOLEAN)
- .isEqualTo(true);
+ .isEqualTo(false);
assertThat(assertions.function("ST_IsEmpty", "ST_Buffer(ST_GeometryFromText(" +
"'POLYGON ((177.0 64.0, 177.0000000001 64.0, 177.0000000001 64.0000000001, 177.0 64.0000000001, 177.0 64.0))'), 0.01)"))
.hasType(BOOLEAN)
- .isEqualTo(true);
+ .isEqualTo(false);
}
@Test
public void testSTCentroid()
{
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('LINESTRING EMPTY'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('LINESTRING EMPTY'))",
+ "POINT EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('POINT (3 5)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (3 5)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('POINT (3 5)'))",
+ "POINT (3 5)");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('MULTIPOINT (1 2, 2 4, 3 6, 4 8)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (2.5 5)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('MULTIPOINT (1 2, 2 4, 3 6, 4 8)'))",
+ "POINT (2.5 5)");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('LINESTRING (1 1, 2 2, 3 3)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (2 2)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('LINESTRING (1 1, 2 2, 3 3)'))",
+ "POINT (2 2)");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (3 2)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'))",
+ "POINT (3 2)");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (2.5 2.5)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'))",
+ "POINT (2.5 2.5)");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('POLYGON ((1 1, 5 1, 3 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (3 2)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('POLYGON ((1 1, 5 1, 3 4, 1 1))'))",
+ "POINT (3 2)");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (3.3333333333333335 4)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))'))",
+ "POINT (3.3333333333333335 4)");
- assertThat(assertions.function("ST_AsText", "ST_Centroid(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 1 2, 2 2, 2 1, 1 1))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (2.5416666666666665 2.5416666666666665)");
+ assertSpatialEquals(assertions,
+ "ST_Centroid(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 1 2, 2 2, 2 1, 1 1))'))",
+ "POINT (2.5416666666666665 2.5416666666666665)");
- assertApproximateCentroid("MULTIPOLYGON (((4.903234300000006 52.08474289999999, 4.903234265193165 52.084742934806826, 4.903234299999999 52.08474289999999, 4.903234300000006 52.08474289999999)))", new Point(4.9032343, 52.0847429), 1e-7);
+ assertApproximateCentroid("MULTIPOLYGON (((4.903234300000006 52.08474289999999, 4.903234265193165 52.084742934806826, 4.903234299999999 52.08474289999999, 4.903234300000006 52.08474289999999)))", new Coordinate(4.9032343, 52.0847429), 1e-7);
// Numerical stability tests
assertApproximateCentroid(
"MULTIPOLYGON (((153.492818 -28.13729, 153.492821 -28.137291, 153.492816 -28.137289, 153.492818 -28.13729)))",
- new Point(153.49282, -28.13729), 1e-5);
+ new Coordinate(153.49282, -28.13729), 1e-5);
assertApproximateCentroid(
"MULTIPOLYGON (((153.112475 -28.360526, 153.1124759 -28.360527, 153.1124759 -28.360526, 153.112475 -28.360526)))",
- new Point(153.112475, -28.360526), 1e-5);
+ new Coordinate(153.112475, -28.360526), 1e-5);
assertApproximateCentroid(
"POLYGON ((4.903234300000006 52.08474289999999, 4.903234265193165 52.084742934806826, 4.903234299999999 52.08474289999999, 4.903234300000006 52.08474289999999))",
- new Point(4.9032343, 52.0847429), 1e-6);
+ new Coordinate(4.9032343, 52.0847429), 1e-6);
assertApproximateCentroid(
"MULTIPOLYGON (((4.903234300000006 52.08474289999999, 4.903234265193165 52.084742934806826, 4.903234299999999 52.08474289999999, 4.903234300000006 52.08474289999999)))",
- new Point(4.9032343, 52.0847429), 1e-6);
+ new Coordinate(4.9032343, 52.0847429), 1e-6);
assertApproximateCentroid(
"POLYGON ((-81.0387349 29.20822, -81.039974 29.210597, -81.0410331 29.2101579, -81.0404758 29.2090879, -81.0404618 29.2090609, -81.040433 29.209005, -81.0404269 29.208993, -81.0404161 29.2089729, -81.0398001 29.20779, -81.0387349 29.20822), (-81.0404229 29.208986, -81.04042 29.2089809, -81.0404269 29.208993, -81.0404229 29.208986))",
- new Point(-81.039885, 29.209191), 1e-6);
+ new Coordinate(-81.039885, 29.209191), 1e-6);
}
- private void assertApproximateCentroid(String wkt, Point expectedCentroid, double epsilon)
+ private void assertApproximateCentroid(String wkt, Coordinate expectedCentroid, double epsilon)
{
- OGCPoint actualCentroid = (OGCPoint) GeometrySerde.deserialize(
- stCentroid(GeometrySerde.serialize(OGCGeometry.fromText(wkt))));
- assertThat(expectedCentroid.getX()).isCloseTo(actualCentroid.X(), within(epsilon));
- assertThat(expectedCentroid.getY()).isCloseTo(actualCentroid.Y(), within(epsilon));
+ try {
+ Geometry geometry = stCentroid(new WKTReader().read(wkt));
+ Point actualCentroid = (Point) geometry;
+ assertThat(expectedCentroid.getX()).isCloseTo(actualCentroid.getX(), within(epsilon));
+ assertThat(expectedCentroid.getY()).isCloseTo(actualCentroid.getY(), within(epsilon));
+ }
+ catch (ParseException e) {
+ throw new RuntimeException(e);
+ }
}
@Test
@@ -369,47 +373,47 @@ public void testSTConvexHull()
// non-convex geometry
assertConvexHull("LINESTRING (1 1, 1 9, 2 2, 1 1, 4 0)", "POLYGON ((1 1, 4 0, 1 9, 1 1))");
- assertConvexHull("POLYGON ((0 0, 0 3, 4 4, 1 1, 3 0))", "POLYGON ((0 0, 3 0, 4 4, 0 3, 0 0))");
+ assertConvexHull("POLYGON ((0 0, 0 3, 4 4, 1 1, 3 0, 0 0))", "POLYGON ((0 0, 3 0, 4 4, 0 3, 0 0))");
// all points are on the same line
assertConvexHull("LINESTRING (20 20, 30 30)", "LINESTRING (20 20, 30 30)");
assertConvexHull("MULTILINESTRING ((0 0, 3 3), (1 1, 2 2), (2 2, 4 4), (5 5, 8 8))", "LINESTRING (0 0, 8 8)");
assertConvexHull("MULTIPOINT (0 1, 1 2, 2 3, 3 4, 4 5, 5 6)", "LINESTRING (0 1, 5 6)");
- assertConvexHull("GEOMETRYCOLLECTION (POINT (0 0), LINESTRING (1 1, 4 4, 2 2), POINT (10 10), POLYGON ((5 5, 7 7)), POINT (2 2), LINESTRING (6 6, 9 9), POLYGON ((1 1)))", "LINESTRING (0 0, 10 10)");
+ assertConvexHull("GEOMETRYCOLLECTION (POINT (0 0), LINESTRING (1 1, 4 4, 2 2), POINT (10 10), LINESTRING (5 5, 7 7), POINT (2 2), LINESTRING (6 6, 9 9), POINT (1 1))", "LINESTRING (0 0, 10 10)");
assertConvexHull("GEOMETRYCOLLECTION (GEOMETRYCOLLECTION (POINT (2 2), POINT (1 1)), POINT (3 3))", "LINESTRING (3 3, 1 1)");
// not all points are on the same line
assertConvexHull("MULTILINESTRING ((1 1, 5 1, 6 6), (2 4, 4 0), (2 -4, 4 4), (3 -2, 4 -3))", "POLYGON ((1 1, 2 -4, 4 -3, 5 1, 6 6, 2 4, 1 1))");
assertConvexHull("MULTIPOINT (0 2, 1 0, 3 0, 4 0, 4 2, 2 2, 2 4)", "POLYGON ((0 2, 1 0, 4 0, 4 2, 2 4, 0 2))");
- assertConvexHull("MULTIPOLYGON (((0 3, 2 0, 3 6), (2 1, 2 3, 5 3, 5 1), (1 7, 2 4, 4 2, 5 6, 3 8)))", "POLYGON ((0 3, 2 0, 5 1, 5 6, 3 8, 1 7, 0 3))");
- assertConvexHull("GEOMETRYCOLLECTION (POINT (2 3), LINESTRING (2 8, 7 10), POINT (8 10), POLYGON ((4 4, 4 8, 9 8, 6 6, 6 4, 8 3, 6 1)), POINT (4 2), LINESTRING (3 6, 5 5), POLYGON ((7 5, 7 6, 8 6, 8 5)))", "POLYGON ((2 3, 6 1, 8 3, 9 8, 8 10, 7 10, 2 8, 2 3))");
- assertConvexHull("GEOMETRYCOLLECTION (GEOMETRYCOLLECTION (POINT (2 3), LINESTRING (2 8, 7 10), GEOMETRYCOLLECTION (POINT (8 10))), POLYGON ((4 4, 4 8, 9 8, 6 6, 6 4, 8 3, 6 1)), POINT (4 2), LINESTRING (3 6, 5 5), POLYGON ((7 5, 7 6, 8 6, 8 5)))", "POLYGON ((2 3, 6 1, 8 3, 9 8, 8 10, 7 10, 2 8, 2 3))");
+ assertConvexHull("MULTIPOLYGON (((0 3, 2 0, 3 6, 0 3), (2 1, 2 3, 5 3, 5 1, 2 1), (1 7, 2 4, 4 2, 5 6, 3 8, 1 7)))", "POLYGON ((0 3, 2 0, 5 1, 5 6, 3 8, 1 7, 0 3))");
+ assertConvexHull("GEOMETRYCOLLECTION (POINT (2 3), LINESTRING (2 8, 7 10), POINT (8 10), POLYGON ((4 4, 4 8, 9 8, 6 6, 6 4, 8 3, 6 1, 4 4)), POINT (4 2), LINESTRING (3 6, 5 5), POLYGON ((7 5, 7 6, 8 6, 8 5, 7 5)))", "POLYGON ((2 3, 6 1, 8 3, 9 8, 8 10, 7 10, 2 8, 2 3))");
+ assertConvexHull("GEOMETRYCOLLECTION (GEOMETRYCOLLECTION (POINT (2 3), LINESTRING (2 8, 7 10), GEOMETRYCOLLECTION (POINT (8 10))), POLYGON ((4 4, 4 8, 9 8, 6 6, 6 4, 8 3, 6 1, 4 4)), POINT (4 2), LINESTRING (3 6, 5 5), POLYGON ((7 5, 7 6, 8 6, 8 5, 7 5)))", "POLYGON ((2 3, 6 1, 8 3, 9 8, 8 10, 7 10, 2 8, 2 3))");
// single-element multi-geometries and geometry collections
assertConvexHull("MULTILINESTRING ((1 1, 5 1, 6 6))", "POLYGON ((1 1, 5 1, 6 6, 1 1))");
assertConvexHull("MULTILINESTRING ((1 1, 5 1, 1 4, 5 4))", "POLYGON ((1 1, 5 1, 5 4, 1 4, 1 1))");
assertConvexHull("MULTIPOINT (0 2)", "POINT (0 2)");
- assertConvexHull("MULTIPOLYGON (((0 3, 2 0, 3 6)))", "POLYGON ((0 3, 2 0, 3 6, 0 3))");
- assertConvexHull("MULTIPOLYGON (((0 0, 4 0, 4 4, 0 4, 2 2)))", "POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0))");
+ assertConvexHull("MULTIPOLYGON (((0 3, 2 0, 3 6, 0 3)))", "POLYGON ((0 3, 2 0, 3 6, 0 3))");
+ assertConvexHull("MULTIPOLYGON (((0 0, 4 0, 4 4, 0 4, 2 2, 0 0)))", "POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0))");
assertConvexHull("GEOMETRYCOLLECTION (POINT (2 3))", "POINT (2 3)");
assertConvexHull("GEOMETRYCOLLECTION (LINESTRING (1 1, 5 1, 6 6))", "POLYGON ((1 1, 5 1, 6 6, 1 1))");
assertConvexHull("GEOMETRYCOLLECTION (LINESTRING (1 1, 5 1, 1 4, 5 4))", "POLYGON ((1 1, 5 1, 5 4, 1 4, 1 1))");
- assertConvexHull("GEOMETRYCOLLECTION (POLYGON ((0 3, 2 0, 3 6)))", "POLYGON ((0 3, 2 0, 3 6, 0 3))");
- assertConvexHull("GEOMETRYCOLLECTION (POLYGON ((0 0, 4 0, 4 4, 0 4, 2 2)))", "POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0))");
+ assertConvexHull("GEOMETRYCOLLECTION (POLYGON ((0 3, 2 0, 3 6, 0 3)))", "POLYGON ((0 3, 2 0, 3 6, 0 3))");
+ assertConvexHull("GEOMETRYCOLLECTION (POLYGON ((0 0, 4 0, 4 4, 0 4, 2 2, 0 0)))", "POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0))");
}
private void assertConvexHull(String inputWKT, String expectWKT)
{
- assertThat(assertions.expression("ST_AsText(ST_ConvexHull(geometry))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(inputWKT)))
- .hasType(VARCHAR)
- .isEqualTo(expectWKT);
+ assertSpatialEquals(
+ assertions,
+ "ST_ConvexHull(ST_GeometryFromText('%s'))".formatted(inputWKT),
+ expectWKT);
}
@Test
public void testSTCoordDim()
{
- assertThat(assertions.function("ST_CoordDim", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')"))
+ assertThat(assertions.function("ST_CoordDim", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')"))
.isEqualTo((byte) 2);
assertThat(assertions.function("ST_CoordDim", "ST_GeometryFromText('POLYGON EMPTY')"))
@@ -428,7 +432,7 @@ public void testSTDimension()
assertThat(assertions.function("ST_Dimension", "ST_GeometryFromText('POLYGON EMPTY')"))
.isEqualTo((byte) 2);
- assertThat(assertions.function("ST_Dimension", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')"))
+ assertThat(assertions.function("ST_Dimension", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')"))
.isEqualTo((byte) 2);
assertThat(assertions.function("ST_Dimension", "ST_GeometryFromText('LINESTRING EMPTY')"))
@@ -447,7 +451,7 @@ public void testSTIsClosed()
assertThat(assertions.function("ST_IsClosed", "ST_GeometryFromText('LINESTRING (1 1, 2 2, 1 3)')"))
.isEqualTo(false);
- assertTrinoExceptionThrownBy(assertions.function("ST_IsClosed", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("ST_IsClosed", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')")::evaluate)
.hasMessage("ST_IsClosed only applies to LINE_STRING or MULTI_LINE_STRING. Input type is: POLYGON");
}
@@ -485,8 +489,8 @@ public void testSTIsSimple()
assertSimpleGeometry("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))");
assertNotSimpleGeometry("MULTILINESTRING ((1 1, 5 1), (2 4, 4 0))");
assertSimpleGeometry("POLYGON EMPTY");
- assertSimpleGeometry("POLYGON ((2 0, 2 1, 3 1))");
- assertSimpleGeometry("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))");
+ assertSimpleGeometry("POLYGON ((2 0, 2 1, 3 1, 2 0))");
+ assertSimpleGeometry("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))");
}
@Test
@@ -495,16 +499,16 @@ public void testSimplifyGeometry()
// Eliminate unnecessary points on the same line.
assertThat(assertions.function("ST_AsText", "simplify_geometry(ST_GeometryFromText('POLYGON ((1 0, 2 1, 3 1, 3 1, 4 1, 1 0))'), 1.5)"))
.hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 0, 4 1, 2 1, 1 0))");
+ .isEqualTo("POLYGON ((1 0, 2 1, 4 1, 1 0))");
// Use distanceTolerance to control fidelity.
assertThat(assertions.function("ST_AsText", "simplify_geometry(ST_GeometryFromText('POLYGON ((1 0, 1 1, 2 1, 2 3, 3 3, 3 1, 4 1, 4 0, 1 0))'), 1.0)"))
.hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 0, 4 0, 3 3, 2 3, 1 0))");
+ .isEqualTo("POLYGON ((1 0, 2 3, 3 3, 4 0, 1 0))");
assertThat(assertions.function("ST_AsText", "simplify_geometry(ST_GeometryFromText('POLYGON ((1 0, 1 1, 2 1, 2 3, 3 3, 3 1, 4 1, 4 0, 1 0))'), 0.5)"))
.hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 0, 4 0, 4 1, 3 1, 3 3, 2 3, 2 1, 1 1, 1 0))");
+ .isEqualTo("POLYGON ((1 0, 1 1, 2 1, 2 3, 3 3, 3 1, 4 1, 4 0, 1 0))");
// Negative distance tolerance is invalid.
assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "simplify_geometry(ST_GeometryFromText('POLYGON ((1 0, 1 1, 2 1, 2 3, 3 3, 3 1, 4 1, 4 0, 1 0))'), -0.5)")::evaluate)
@@ -529,22 +533,24 @@ public void testSTIsValid()
assertValidGeometry("LINESTRING (0 0, 1 2, 3 4)");
assertValidGeometry("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))");
assertValidGeometry("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))");
- assertValidGeometry("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))");
+ assertValidGeometry("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))");
assertValidGeometry("GEOMETRYCOLLECTION (POINT (1 2), LINESTRING (0 0, 1 2, 3 4), POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0)))");
+ // Note: MULTIPOINT and LINESTRING with duplicate points are valid per OGC spec
+ // JTS correctly treats these as valid (ESRI previously considered them invalid)
+ assertValidGeometry("MULTIPOINT ((0 0), (0 1), (1 1), (0 1))");
+ assertValidGeometry("LINESTRING (0 0, 0 1, 0 1, 1 1, 1 0, 0 0)");
+ assertValidGeometry("LINESTRING (0 0, -1 0.5, 0 1, 1 1, 1 0, 0 1, 0 0)");
+
// invalid geometries
- assertInvalidGeometry("MULTIPOINT ((0 0), (0 1), (1 1), (0 1))", "Repeated points at or near (0.0 1.0) and (0.0 1.0)");
- assertInvalidGeometry("LINESTRING (0 0, 0 1, 0 1, 1 1, 1 0, 0 0)", "Degenerate segments at or near (0.0 1.0)");
- assertInvalidGeometry("LINESTRING (0 0, -1 0.5, 0 1, 1 1, 1 0, 0 1, 0 0)", "Self-tangency at or near (0.0 1.0) and (0.0 1.0)");
- assertInvalidGeometry("POLYGON ((0 0, 1 1, 0 1, 1 0, 0 0))", "Intersecting or overlapping segments at or near (1.0 0.0) and (1.0 1.0)");
- assertInvalidGeometry("POLYGON ((0 0, 0 1, 0 1, 1 1, 1 0, 0 0), (2 2, 2 3, 3 3, 3 2, 2 2))", "Degenerate segments at or near (0.0 1.0)");
- assertInvalidGeometry("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0), (2 2, 2 3, 3 3, 3 2, 2 2))", "RingOrientation");
- assertInvalidGeometry("POLYGON ((0 0, 0 1, 2 1, 1 1, 1 0, 0 0))", "Intersecting or overlapping segments at or near (0.0 1.0) and (2.0 1.0)");
- assertInvalidGeometry("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0), (0 1, 1 1, 0.5 0.5, 0 1))", "Self-intersection at or near (0.0 1.0) and (1.0 1.0)");
- assertInvalidGeometry("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0), (0 0, 0.5 0.7, 1 1, 0.5 0.4, 0 0))", "Disconnected interior at or near (0.0 1.0)");
- assertInvalidGeometry("POLYGON ((0 0, -1 0.5, 0 1, 1 1, 1 0, 0 1, 0 0))", "Self-tangency at or near (0.0 1.0) and (0.0 1.0)");
- assertInvalidGeometry("MULTIPOLYGON (((0 0, 0 1, 1 1, 1 0, 0 0)), ((0.5 0.5, 0.5 2, 2 2, 2 0.5, 0.5 0.5)))", "Intersecting or overlapping segments at or near (0.0 1.0) and (0.5 0.5)");
- assertInvalidGeometry("GEOMETRYCOLLECTION (POINT (1 2), POLYGON ((0 0, 0 1, 2 1, 1 1, 1 0, 0 0)))", "Intersecting or overlapping segments at or near (0.0 1.0) and (2.0 1.0)");
+ assertInvalidGeometry("POLYGON ((0 0, 1 1, 0 1, 1 0, 0 0))", "Self-intersection");
+ assertInvalidGeometry("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0), (2 2, 2 3, 3 3, 3 2, 2 2))", "Hole lies outside shell");
+ assertInvalidGeometry("POLYGON ((0 0, 0 1, 2 1, 1 1, 1 0, 0 0))", "Self-intersection");
+ assertInvalidGeometry("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0), (0 1, 1 1, 0.5 0.5, 0 1))", "Self-intersection");
+ assertInvalidGeometry("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0), (0 0, 0.5 0.7, 1 1, 0.5 0.4, 0 0))", "Interior is disconnected");
+ assertInvalidGeometry("POLYGON ((0 0, -1 0.5, 0 1, 1 1, 1 0, 0 1, 0 0))", "Self-intersection");
+ assertInvalidGeometry("MULTIPOLYGON (((0 0, 0 1, 1 1, 1 0, 0 0)), ((0.5 0.5, 0.5 2, 2 2, 2 0.5, 0.5 0.5)))", "Self-intersection");
+ assertInvalidGeometry("GEOMETRYCOLLECTION (POINT (1 2), POLYGON ((0 0, 0 1, 2 1, 1 1, 1 0, 0 0)))", "Self-intersection");
// corner cases
assertThat(assertions.function("ST_IsValid", "ST_GeometryFromText(null)"))
@@ -563,14 +569,14 @@ private void assertValidGeometry(String wkt)
.isNull(VARCHAR);
}
- private void assertInvalidGeometry(String wkt, String reason)
+ private void assertInvalidGeometry(String wkt, String reasonContains)
{
assertThat(assertions.function("ST_IsValid", "ST_GeometryFromText('%s')".formatted(wkt)))
.isEqualTo(false);
assertThat(assertions.function("geometry_invalid_reason", "ST_GeometryFromText('%s')".formatted(wkt)))
.hasType(VARCHAR)
- .isEqualTo(reason);
+ .satisfies(result -> assertThat(((String) result).toLowerCase(Locale.ROOT)).contains(reasonContains.toLowerCase(Locale.ROOT)));
}
@Test
@@ -585,7 +591,7 @@ public void testSTLength()
assertThat(assertions.function("ST_Length", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')"))
.isEqualTo(6.0);
- assertTrinoExceptionThrownBy(assertions.function("ST_Length", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("ST_Length", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')")::evaluate)
.hasMessage("ST_Length only applies to LINE_STRING or MULTI_LINE_STRING. Input type is: POLYGON");
}
@@ -595,10 +601,8 @@ public void testSTLengthSphericalGeography()
// Empty linestring returns null
assertSTLengthSphericalGeography("LINESTRING EMPTY", null);
- // Linestring with one point has length 0
- assertSTLengthSphericalGeography("LINESTRING (0 0)", 0.0);
-
// Linestring with only one distinct point has length 0
+ assertSTLengthSphericalGeography("LINESTRING (0 0, 0 0)", 0.0);
assertSTLengthSphericalGeography("LINESTRING (0 0, 0 0, 0 0)", 0.0);
double length = 4350866.6362;
@@ -613,7 +617,7 @@ public void testSTLengthSphericalGeography()
assertSTLengthSphericalGeography("LINESTRING (0.0 90.0, 0.0 -90.0, 0.0 90.0)", 4.003e7);
// Empty multi-linestring returns null
- assertSTLengthSphericalGeography("MULTILINESTRING (EMPTY)", null);
+ assertSTLengthSphericalGeography("MULTILINESTRING EMPTY", null);
// Multi-linestring with one path is equivalent to a single linestring
assertSTLengthSphericalGeography("MULTILINESTRING ((-71.05 42.36, -87.62 41.87, -122.41 37.77))", length);
@@ -679,10 +683,10 @@ public void testLineLocatePoint()
assertThat(assertions.function("line_locate_point", "ST_GeometryFromText('LINESTRING (0 0, 0 1, 2 1)')", "ST_GeometryFromText('POINT EMPTY')"))
.isNull(DOUBLE);
- assertTrinoExceptionThrownBy(assertions.function("line_locate_point", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')", "ST_Point(0.4, 1)")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("line_locate_point", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')", "ST_Point(0.4, 1)")::evaluate)
.hasMessage("First argument to line_locate_point must be a LineString or a MultiLineString. Got: Polygon");
- assertTrinoExceptionThrownBy(assertions.function("line_locate_point", "ST_GeometryFromText('LINESTRING (0 0, 0 1, 2 1)')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("line_locate_point", "ST_GeometryFromText('LINESTRING (0 0, 0 1, 2 1)')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')")::evaluate)
.hasMessage("Second argument to line_locate_point must be a Point. Got: Polygon");
}
@@ -695,7 +699,7 @@ public void testLineInterpolatePoint()
assertLineInterpolatePoint("LINESTRING (0 0, 1 1, 10 10)", 0.0, "POINT (0 0)");
assertLineInterpolatePoint("LINESTRING (0 0, 1 1, 10 10)", 0.1, "POINT (1 1)");
assertLineInterpolatePoint("LINESTRING (0 0, 1 1, 10 10)", 0.05, "POINT (0.5 0.5)");
- assertLineInterpolatePoint("LINESTRING (0 0, 1 1, 10 10)", 0.4, "POINT (4.000000000000001 4.000000000000001)");
+ assertLineInterpolatePoint("LINESTRING (0 0, 1 1, 10 10)", 0.4, "POINT (4 4)");
assertLineInterpolatePoint("LINESTRING (0 0, 1 1, 10 10)", 1.0, "POINT (10 10)");
assertLineInterpolatePoint("LINESTRING (0 0, 1 1)", 0.0, "POINT (0 0)");
@@ -727,9 +731,9 @@ public void testLineInterpolatePoints()
.isNull(new ArrayType(GEOMETRY));
assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 0.0, "0 0");
- assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 0.4, "4.000000000000001 4.000000000000001", "8 8");
- assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 0.3, "3 3", "6 6", "9 9");
- assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 0.5, "5.000000000000001 5.000000000000001", "10 10");
+ assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 0.4, "4 4", "8 8");
+ assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 0.3, "3 3", "5.999999999999999 5.999999999999999", "9 9");
+ assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 0.5, "5 5", "10 10");
assertLineInterpolatePoints("LINESTRING (0 0, 1 1, 10 10)", 1, "10 10");
assertTrinoExceptionThrownBy(assertions.function("line_interpolate_points", "ST_GeometryFromText('LINESTRING (0 0, 1 0, 1 9)')", "-0.5")::evaluate)
@@ -744,20 +748,18 @@ public void testLineInterpolatePoints()
private void assertLineInterpolatePoint(String wkt, double fraction, String expectedPoint)
{
- assertThat(assertions.expression("ST_AsText(line_interpolate_point(geometry, fraction))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt))
- .binding("fraction", Double.toString(fraction)))
- .hasType(VARCHAR)
- .isEqualTo(expectedPoint);
+ assertSpatialEquals(assertions,
+ "line_interpolate_point(ST_GeometryFromText('%s'), %s)".formatted(wkt, fraction),
+ expectedPoint);
}
- private void assertLineInterpolatePoints(String wkt, double fraction, String... expected)
+ private void assertLineInterpolatePoints(String wkt, double fraction, String... expectedCoords)
{
- assertThat(assertions.expression("transform(line_interpolate_points(geometry, fraction), x -> ST_AsText(x))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt))
- .binding("fraction", Double.toString(fraction)))
- .hasType(new ArrayType(VARCHAR))
- .isEqualTo(Arrays.stream(expected).map(s -> "POINT (" + s + ")").collect(toImmutableList()));
+ assertSpatialArrayEquals(assertions,
+ "line_interpolate_points(ST_GeometryFromText('%s'), %s)".formatted(wkt, fraction),
+ Arrays.stream(expectedCoords)
+ .map(s -> "POINT (" + s + ")")
+ .toArray(String[]::new));
}
@Test
@@ -787,16 +789,16 @@ public void testSTMax()
assertThat(assertions.function("ST_YMax", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')"))
.isEqualTo(4.0);
- assertThat(assertions.function("ST_XMax", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')"))
+ assertThat(assertions.function("ST_XMax", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')"))
.isEqualTo(3.0);
- assertThat(assertions.function("ST_YMax", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')"))
+ assertThat(assertions.function("ST_YMax", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')"))
.isEqualTo(1.0);
- assertThat(assertions.function("ST_XMax", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))')"))
+ assertThat(assertions.function("ST_XMax", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))')"))
.isEqualTo(6.0);
- assertThat(assertions.function("ST_YMax", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 10, 6 4)))')"))
+ assertThat(assertions.function("ST_YMax", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 10, 6 4, 2 4)))')"))
.isEqualTo(10.0);
assertThat(assertions.function("ST_XMax", "ST_GeometryFromText('POLYGON EMPTY')"))
@@ -845,16 +847,16 @@ public void testSTMin()
assertThat(assertions.function("ST_YMin", "ST_GeometryFromText('MULTILINESTRING ((1 2, 5 3), (2 4, 4 4))')"))
.isEqualTo(2.0);
- assertThat(assertions.function("ST_XMin", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')"))
+ assertThat(assertions.function("ST_XMin", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')"))
.isEqualTo(2.0);
- assertThat(assertions.function("ST_YMin", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')"))
+ assertThat(assertions.function("ST_YMin", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')"))
.isEqualTo(0.0);
- assertThat(assertions.function("ST_XMin", "ST_GeometryFromText('MULTIPOLYGON (((1 10, 1 3, 3 3, 3 10)), ((2 4, 2 6, 6 6, 6 4)))')"))
+ assertThat(assertions.function("ST_XMin", "ST_GeometryFromText('MULTIPOLYGON (((1 10, 1 3, 3 3, 3 10, 1 10)), ((2 4, 2 6, 6 6, 6 4, 2 4)))')"))
.isEqualTo(1.0);
- assertThat(assertions.function("ST_YMin", "ST_GeometryFromText('MULTIPOLYGON (((1 10, 1 3, 3 3, 3 10)), ((2 4, 2 6, 6 10, 6 4)))')"))
+ assertThat(assertions.function("ST_YMin", "ST_GeometryFromText('MULTIPOLYGON (((1 10, 1 3, 3 3, 3 10, 1 10)), ((2 4, 2 6, 6 10, 6 4, 2 4)))')"))
.isEqualTo(3.0);
assertThat(assertions.function("ST_XMin", "ST_GeometryFromText('POLYGON EMPTY')"))
@@ -904,8 +906,8 @@ public void testSTNumPoints()
assertNumPoints("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", 4);
assertNumPoints("LINESTRING (8 4, 5 7)", 2);
assertNumPoints("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", 4);
- assertNumPoints("POLYGON ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))", 6);
- assertNumPoints("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))", 8);
+ assertNumPoints("POLYGON ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))", 8);
+ assertNumPoints("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 10);
assertNumPoints("GEOMETRYCOLLECTION (POINT (1 2), LINESTRING (8 4, 5 7), POLYGON EMPTY)", 3);
}
@@ -924,25 +926,25 @@ public void testSTIsRing()
assertThat(assertions.function("ST_IsRing", "ST_GeometryFromText('LINESTRING (0 0, 1 1, 0 2, 0 0)')"))
.isEqualTo(true);
- assertTrinoExceptionThrownBy(assertions.function("ST_IsRing", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("ST_IsRing", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')")::evaluate)
.hasMessage("ST_IsRing only applies to LINE_STRING. Input type is: POLYGON");
}
@Test
public void testSTStartEndPoint()
{
- assertThat(assertions.function("ST_AsText", "ST_StartPoint(ST_GeometryFromText('LINESTRING (8 4, 4 8, 5 6)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (8 4)");
+ assertSpatialEquals(assertions,
+ "ST_StartPoint(ST_GeometryFromText('LINESTRING (8 4, 4 8, 5 6)'))",
+ "POINT (8 4)");
- assertThat(assertions.function("ST_AsText", "ST_EndPoint(ST_GeometryFromText('LINESTRING (8 4, 4 8, 5 6)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (5 6)");
+ assertSpatialEquals(assertions,
+ "ST_EndPoint(ST_GeometryFromText('LINESTRING (8 4, 4 8, 5 6)'))",
+ "POINT (5 6)");
- assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_StartPoint(ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))'))")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_StartPoint(ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))'))")::evaluate)
.hasMessage("ST_StartPoint only applies to LINE_STRING. Input type is: POLYGON");
- assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_EndPoint(ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))'))")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_EndPoint(ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))'))")::evaluate)
.hasMessage("ST_EndPoint only applies to LINE_STRING. Input type is: POLYGON");
}
@@ -960,8 +962,8 @@ public void testSTPoints()
assertThat(assertions.function("ST_Points", "ST_GeometryFromText('POLYGON EMPTY')"))
.isNull(new ArrayType(GEOMETRY));
- assertSTPoints("POLYGON ((8 4, 3 9, 5 6, 8 4))", "8 4", "5 6", "3 9", "8 4");
- assertSTPoints("POLYGON ((8 4, 3 9, 5 6, 7 2, 8 4))", "8 4", "7 2", "5 6", "3 9", "8 4");
+ assertSTPoints("POLYGON ((8 4, 3 9, 5 6, 8 4))", "8 4", "3 9", "5 6", "8 4");
+ assertSTPoints("POLYGON ((8 4, 3 9, 5 6, 7 2, 8 4))", "8 4", "3 9", "5 6", "7 2", "8 4");
assertThat(assertions.function("ST_Points", "ST_GeometryFromText('POINT EMPTY')"))
.isNull(new ArrayType(GEOMETRY));
@@ -986,7 +988,7 @@ public void testSTPoints()
.isNull(new ArrayType(GEOMETRY));
assertSTPoints("MULTIPOLYGON (((0 0, 4 0, 4 4, 0 4, 0 0), (1 1, 2 1, 2 2, 1 2, 1 1)), ((-1 -1, -1 -2, -2 -2, -2 -1, -1 -1)))",
- "0 0", "0 4", "4 4", "4 0", "0 0",
+ "0 0", "4 0", "4 4", "0 4", "0 0",
"1 1", "2 1", "2 2", "1 2", "1 1",
"-1 -1", "-1 -2", "-2 -2", "-2 -1", "-1 -1");
@@ -1007,16 +1009,15 @@ public void testSTPoints()
" (( 5 4, 5 8, 6 7, 5 4 ))",
" )",
")");
- assertSTPoints(geometryCollection, "0 1", "0 3", "3 4", "2 0", "0 2", "2 3", "2 0", "3 0", "3 3", "6 3", "6 0", "3 0",
- "5 1", "5 2", "4 2", "5 1", "0 5", "0 8", "4 8", "4 5", "0 5", "1 6", "3 6", "2 7", "1 6", "5 4", "5 8", "6 7", "5 4");
+ assertSTPoints(geometryCollection, "0 1", "0 3", "3 4", "2 0", "2 3", "0 2", "2 0", "3 0", "3 3", "6 3", "6 0", "3 0",
+ "5 1", "4 2", "5 2", "5 1", "0 5", "0 8", "4 8", "4 5", "0 5", "1 6", "3 6", "2 7", "1 6", "5 4", "5 8", "6 7", "5 4");
}
private void assertSTPoints(String wkt, String... expected)
{
- assertThat(assertions.expression("transform(ST_Points(geometry), x -> ST_AsText(x))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt)))
- .hasType(new ArrayType(VARCHAR))
- .isEqualTo(Arrays.stream(expected).map(s -> "POINT (" + s + ")").collect(toImmutableList()));
+ assertSpatialArrayEquals(assertions,
+ "ST_Points(ST_GeometryFromText('%s'))".formatted(wkt),
+ Arrays.stream(expected).map(s -> "POINT (" + s + ")").toArray(String[]::new));
}
@Test
@@ -1031,132 +1032,146 @@ public void testSTXY()
assertThat(assertions.function("ST_Y", "ST_GeometryFromText('POINT (1 2)')"))
.isEqualTo(2.0);
- assertTrinoExceptionThrownBy(assertions.function("ST_Y", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("ST_Y", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')")::evaluate)
.hasMessage("ST_Y only applies to POINT. Input type is: POLYGON");
}
@Test
public void testSTBoundary()
{
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('POINT (1 2)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('POINT (1 2)'))",
+ "GEOMETRYCOLLECTION EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('MULTIPOINT (1 2, 2 4, 3 6, 4 8)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('MULTIPOINT (1 2, 2 4, 3 6, 4 8)'))",
+ "GEOMETRYCOLLECTION EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('LINESTRING EMPTY'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('LINESTRING EMPTY'))",
+ "MULTIPOINT EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('LINESTRING (8 4, 5 7)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT ((8 4), (5 7))");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('LINESTRING (8 4, 5 7)'))",
+ "MULTIPOINT ((8 4), (5 7))");
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('LINESTRING (100 150,50 60, 70 80, 160 170)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT ((100 150), (160 170))");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('LINESTRING (100 150,50 60, 70 80, 160 170)'))",
+ "MULTIPOINT ((100 150), (160 170))");
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT ((1 1), (5 1), (2 4), (4 4))");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'))",
+ "MULTIPOINT ((1 1), (5 1), (2 4), (4 4))");
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('POLYGON ((1 1, 4 1, 1 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTILINESTRING ((1 1, 4 1, 1 4, 1 1))");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('POLYGON ((1 1, 4 1, 1 4, 1 1))'))",
+ "LINESTRING (1 1, 4 1, 1 4, 1 1)");
- assertThat(assertions.function("ST_AsText", "ST_Boundary(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTILINESTRING ((1 1, 3 1, 3 3, 1 3, 1 1), (0 0, 2 0, 2 2, 0 2, 0 0))");
+ assertSpatialEquals(assertions,
+ "ST_Boundary(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))'))",
+ "MULTILINESTRING ((1 1, 3 1, 3 3, 1 3, 1 1), (0 0, 2 0, 2 2, 0 2, 0 0))");
}
@Test
public void testSTEnvelope()
{
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('MULTIPOINT (1 2, 2 4, 3 6, 4 8)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 2, 4 2, 4 8, 1 8, 1 2))");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('MULTIPOINT (1 2, 2 4, 3 6, 4 8)'))",
+ "POLYGON ((1 2, 4 2, 4 8, 1 8, 1 2))");
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('LINESTRING EMPTY'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('LINESTRING EMPTY'))",
+ "POLYGON EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('LINESTRING (1 1, 2 2, 1 3)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 1, 2 1, 2 3, 1 3, 1 1))");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('LINESTRING (1 1, 2 2, 1 3)'))",
+ "POLYGON ((1 1, 2 1, 2 3, 1 3, 1 1))");
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('LINESTRING (8 4, 5 7)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((5 4, 8 4, 8 7, 5 7, 5 4))");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('LINESTRING (8 4, 5 7)'))",
+ "POLYGON ((5 4, 8 4, 8 7, 5 7, 5 4))");
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 1, 5 1, 5 4, 1 4, 1 1))");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'))",
+ "POLYGON ((1 1, 5 1, 5 4, 1 4, 1 1))");
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('POLYGON ((1 1, 4 1, 1 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 1, 4 1, 4 4, 1 4, 1 1))");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('POLYGON ((1 1, 4 1, 1 4, 1 1))'))",
+ "POLYGON ((1 1, 4 1, 4 4, 1 4, 1 1))");
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((0 0, 3 0, 3 3, 0 3, 0 0))");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))'))",
+ "POLYGON ((0 0, 3 0, 3 3, 0 3, 0 0))");
- assertThat(assertions.function("ST_AsText", "ST_Envelope(ST_GeometryFromText('GEOMETRYCOLLECTION (POINT (5 1), LINESTRING (3 4, 4 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((3 1, 5 1, 5 4, 3 4, 3 1))");
+ assertSpatialEquals(assertions,
+ "ST_Envelope(ST_GeometryFromText('GEOMETRYCOLLECTION (POINT (5 1), LINESTRING (3 4, 4 4))'))",
+ "POLYGON ((3 1, 5 1, 5 4, 3 4, 3 1))");
}
@Test
public void testSTEnvelopeAsPts()
{
- assertEnvelopeAsPts("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", new Point(1, 2), new Point(4, 8));
+ assertEnvelopeAsPts("MULTIPOINT ((1 2), (2 4), (3 6), (4 8))", new Coordinate(1, 2), new Coordinate(4, 8));
assertThat(assertions.function("ST_EnvelopeAsPts", "ST_GeometryFromText('LINESTRING EMPTY')"))
.isNull(new ArrayType(GEOMETRY));
- assertEnvelopeAsPts("LINESTRING (1 1, 2 2, 1 3)", new Point(1, 1), new Point(2, 3));
- assertEnvelopeAsPts("LINESTRING (8 4, 5 7)", new Point(5, 4), new Point(8, 7));
- assertEnvelopeAsPts("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", new Point(1, 1), new Point(5, 4));
- assertEnvelopeAsPts("POLYGON ((1 1, 4 1, 1 4))", new Point(1, 1), new Point(4, 4));
- assertEnvelopeAsPts("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))", new Point(0, 0), new Point(3, 3));
- assertEnvelopeAsPts("GEOMETRYCOLLECTION (POINT (5 1), LINESTRING (3 4, 4 4))", new Point(3, 1), new Point(5, 4));
- assertEnvelopeAsPts("POINT (1 2)", new Point(1, 2), new Point(1, 2));
+ assertEnvelopeAsPts("LINESTRING (1 1, 2 2, 1 3)", new Coordinate(1, 1), new Coordinate(2, 3));
+ assertEnvelopeAsPts("LINESTRING (8 4, 5 7)", new Coordinate(5, 4), new Coordinate(8, 7));
+ assertEnvelopeAsPts("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", new Coordinate(1, 1), new Coordinate(5, 4));
+ assertEnvelopeAsPts("POLYGON ((1 1, 4 1, 1 4, 1 1))", new Coordinate(1, 1), new Coordinate(4, 4));
+ assertEnvelopeAsPts("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))", new Coordinate(0, 0), new Coordinate(3, 3));
+ assertEnvelopeAsPts("GEOMETRYCOLLECTION (POINT (5 1), LINESTRING (3 4, 4 4))", new Coordinate(3, 1), new Coordinate(5, 4));
+ assertEnvelopeAsPts("POINT (1 2)", new Coordinate(1, 2), new Coordinate(1, 2));
}
- private void assertEnvelopeAsPts(String wkt, Point lowerLeftCorner, Point upperRightCorner)
+ private void assertEnvelopeAsPts(String wkt, Coordinate lowerLeftCorner, Coordinate upperRightCorner)
{
- assertThat(assertions.expression("transform(ST_EnvelopeAsPts(geometry), x -> ST_AsText(x))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt)))
- .hasType(new ArrayType(VARCHAR))
- .isEqualTo(ImmutableList.of(new OGCPoint(lowerLeftCorner, null).asText(), new OGCPoint(upperRightCorner, null).asText()));
+ assertSpatialArrayEquals(assertions,
+ "ST_EnvelopeAsPts(ST_GeometryFromText('%s'))".formatted(wkt),
+ "POINT (" + formatCoordinate(lowerLeftCorner.getX()) + " " + formatCoordinate(lowerLeftCorner.getY()) + ")",
+ "POINT (" + formatCoordinate(upperRightCorner.getX()) + " " + formatCoordinate(upperRightCorner.getY()) + ")");
+ }
+
+ private static String formatCoordinate(double value)
+ {
+ // JTS WKTWriter outputs integers without decimal point
+ if (value == Math.floor(value) && !Double.isInfinite(value)) {
+ return String.valueOf((long) value);
+ }
+ return String.valueOf(value);
}
@Test
public void testSTDifference()
{
- assertThat(assertions.function("ST_AsText", "ST_Difference(ST_GeometryFromText('POINT (50 100)'), ST_GeometryFromText('POINT (150 150)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (50 100)");
+ assertSpatialEquals(assertions,
+ "ST_Difference(ST_GeometryFromText('POINT (50 100)'), ST_GeometryFromText('POINT (150 150)'))",
+ "POINT (50 100)");
- assertThat(assertions.function("ST_AsText", "ST_Difference(ST_GeometryFromText('MULTIPOINT (50 100, 50 200)'), ST_GeometryFromText('POINT (50 100)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (50 200)");
+ assertSpatialEquals(assertions,
+ "ST_Difference(ST_GeometryFromText('MULTIPOINT (50 100, 50 200)'), ST_GeometryFromText('POINT (50 100)'))",
+ "POINT (50 200)");
- assertThat(assertions.function("ST_AsText", "ST_Difference(ST_GeometryFromText('LINESTRING (50 100, 50 200)'), ST_GeometryFromText('LINESTRING (50 50, 50 150)'))"))
- .hasType(VARCHAR)
- .isEqualTo("LINESTRING (50 150, 50 200)");
+ assertSpatialEquals(assertions,
+ "ST_Difference(ST_GeometryFromText('LINESTRING (50 100, 50 200)'), ST_GeometryFromText('LINESTRING (50 50, 50 150)'))",
+ "LINESTRING (50 150, 50 200)");
- assertThat(assertions.function("ST_AsText", "ST_Difference(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((2 1, 4 1), (3 3, 7 3))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTILINESTRING ((1 1, 2 1), (4 1, 5 1), (2 4, 4 4))");
+ assertSpatialEquals(assertions,
+ "ST_Difference(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((2 1, 4 1), (3 3, 7 3))'))",
+ "MULTILINESTRING ((1 1, 2 1), (4 1, 5 1), (2 4, 4 4))");
- assertThat(assertions.function("ST_AsText", "ST_Difference(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))'), ST_GeometryFromText('POLYGON ((2 2, 2 5, 5 5, 5 2))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 1, 4 1, 4 2, 2 2, 2 4, 1 4, 1 1))");
+ assertSpatialEquals(assertions,
+ "ST_Difference(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'), ST_GeometryFromText('POLYGON ((2 2, 2 5, 5 5, 5 2, 2 2))'))",
+ "POLYGON ((1 1, 4 1, 4 2, 2 2, 2 4, 1 4, 1 1))");
- assertThat(assertions.function("ST_AsText", "ST_Difference(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))'), ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))'))"))
- .hasType(VARCHAR)
- .isEqualTo("POLYGON ((1 1, 0 1, 0 0, 2 0, 2 1, 1 1))");
+ assertSpatialEquals(
+ assertions,
+ """
+ ST_Difference(
+ ST_Union(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))')),
+ ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')
+ )""",
+ "POLYGON ((1 1, 0 1, 0 0, 2 0, 2 1, 1 1))");
}
@Test
@@ -1180,10 +1195,10 @@ public void testSTDistance()
assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('LINESTRING (10 20, 20 50)')"))
.isEqualTo(17.08800749063506);
- assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4))')"))
+ assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))')"))
.isEqualTo(1.4142135623730951);
- assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))')", "ST_GeometryFromText('POLYGON ((10 100, 30 10))')"))
+ assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))')", "ST_GeometryFromText('POLYGON ((10 100, 30 10, 10 100))')"))
.isEqualTo(27.892651361962706);
assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('POINT EMPTY')", "ST_Point(150, 150)"))
@@ -1204,10 +1219,10 @@ public void testSTDistance()
assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('MULTILINESTRING EMPTY')", "ST_GeometryFromText('LINESTRING (10 20, 20 50)')"))
.isNull(DOUBLE);
- assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON EMPTY')"))
+ assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON EMPTY')"))
.isNull(DOUBLE);
- assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('MULTIPOLYGON EMPTY')", "ST_GeometryFromText('POLYGON ((10 100, 30 10))')"))
+ assertThat(assertions.function("ST_Distance", "ST_GeometryFromText('MULTIPOLYGON EMPTY')", "ST_GeometryFromText('POLYGON ((10 100, 30 10, 10 100))')"))
.isNull(DOUBLE);
}
@@ -1251,13 +1266,13 @@ public void testSTExteriorRing()
assertThat(assertions.function("ST_AsText", "ST_ExteriorRing(ST_GeometryFromText('POLYGON EMPTY'))"))
.isNull(VARCHAR);
- assertThat(assertions.function("ST_AsText", "ST_ExteriorRing(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 1))'))"))
- .hasType(VARCHAR)
- .isEqualTo("LINESTRING (1 1, 4 1, 1 4, 1 1)");
+ assertSpatialEquals(assertions,
+ "ST_ExteriorRing(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 1, 1 1))'))",
+ "LINESTRING (1 1, 4 1, 1 4, 1 1)");
- assertThat(assertions.function("ST_AsText", "ST_ExteriorRing(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 1 2, 2 2, 2 1, 1 1))'))"))
- .hasType(VARCHAR)
- .isEqualTo("LINESTRING (0 0, 5 0, 5 5, 0 5, 0 0)");
+ assertSpatialEquals(assertions,
+ "ST_ExteriorRing(ST_GeometryFromText('POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0), (1 1, 1 2, 2 2, 2 1, 1 1))'))",
+ "LINESTRING (0 0, 5 0, 5 5, 0 5, 0 0)");
assertTrinoExceptionThrownBy(assertions.function("ST_AsText", "ST_ExteriorRing(ST_GeometryFromText('LINESTRING (1 1, 2 2, 1 3)'))")::evaluate)
.hasMessage("ST_ExteriorRing only applies to POLYGON. Input type is: LINE_STRING");
@@ -1269,37 +1284,43 @@ public void testSTExteriorRing()
@Test
public void testSTIntersection()
{
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('POINT (50 100)'), ST_GeometryFromText('POINT (150 150)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOLYGON EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(ST_GeometryFromText('POINT (50 100)'), ST_GeometryFromText('POINT (150 150)'))",
+ "POINT EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('MULTIPOINT (50 100, 50 200)'), ST_GeometryFromText('Point (50 100)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (50 100)");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(ST_GeometryFromText('MULTIPOINT (50 100, 50 200)'), ST_GeometryFromText('Point (50 100)'))",
+ "POINT (50 100)");
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('LINESTRING (50 100, 50 200)'), ST_GeometryFromText('LINESTRING (20 150, 100 150)'))"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (50 150)");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(ST_GeometryFromText('LINESTRING (50 100, 50 200)'), ST_GeometryFromText('LINESTRING (20 150, 100 150)'))",
+ "POINT (50 150)");
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("GEOMETRYCOLLECTION (POINT (5 1), LINESTRING (3 4, 4 4))");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))'))",
+ "GEOMETRYCOLLECTION (POINT (5 1), LINESTRING (3 4, 4 4))");
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))'), ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOLYGON EMPTY");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))'))",
+ "POLYGON EMPTY");
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))'), ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3))'))"))
- .hasType(VARCHAR)
- .isEqualTo("GEOMETRYCOLLECTION (LINESTRING (1 1, 2 1), MULTIPOLYGON (((0 1, 1 1, 1 2, 0 2, 0 1)), ((2 1, 3 1, 3 3, 1 3, 1 2, 2 2, 2 1))))");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(" +
+ " ST_Union(" +
+ " ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), " +
+ " ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))')" +
+ " ), " +
+ " ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')" +
+ ")",
+ "POLYGON ((0 2, 1 2, 1 3, 3 3, 3 1, 2 1, 0 1, 0 2))");
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))'), ST_GeometryFromText('LINESTRING (2 0, 2 3)'))"))
- .hasType(VARCHAR)
- .isEqualTo("LINESTRING (2 1, 2 3)");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'), ST_GeometryFromText('LINESTRING (2 0, 2 3)'))",
+ "LINESTRING (2 1, 2 3)");
- assertThat(assertions.function("ST_AsText", "ST_Intersection(ST_GeometryFromText('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))'), ST_GeometryFromText('LINESTRING (0 0, 1 -1, 1 2)'))"))
- .hasType(VARCHAR)
- .isEqualTo("GEOMETRYCOLLECTION (POINT (0 0), LINESTRING (1 0, 1 1))");
+ assertSpatialEquals(assertions,
+ "ST_Intersection(ST_GeometryFromText('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))'), ST_GeometryFromText('LINESTRING (0 0, 1 -1, 1 2)'))",
+ "GEOMETRYCOLLECTION (POINT (0 0), LINESTRING (1 0, 1 1))");
// test intersection of envelopes
assertEnvelopeIntersection("POLYGON ((0 0, 5 0, 5 5, 0 5, 0 0))", "POLYGON ((0 0, 5 0, 5 5, 0 5, 0 0))", "POLYGON ((0 0, 5 0, 5 5, 0 5, 0 0))");
@@ -1312,41 +1333,39 @@ public void testSTIntersection()
assertEnvelopeIntersection("POLYGON ((0 0, 5 0, 5 5, 0 5, 0 0))", "POLYGON ((-1 -1, 0 -1, 0 0, -1 0, -1 -1))", "POINT (0 0)");
}
- private void assertEnvelopeIntersection(String envelope, String otherEnvelope, String intersection)
+ private void assertEnvelopeIntersection(String envelope, String otherEnvelope, String expectedWkt)
{
- assertThat(assertions.expression("ST_AsText(ST_Intersection(ST_Envelope(a), ST_Envelope(b)))")
- .binding("a", "ST_GeometryFromText('%s')".formatted(envelope))
- .binding("b", "ST_GeometryFromText('%s')".formatted(otherEnvelope)))
- .hasType(VARCHAR)
- .isEqualTo(intersection);
+ String expression = "ST_Intersection(ST_Envelope(ST_GeometryFromText('%s')), ST_Envelope(ST_GeometryFromText('%s')))"
+ .formatted(envelope, otherEnvelope);
+ assertSpatialEquals(assertions, expression, expectedWkt);
}
@Test
public void testSTSymmetricDifference()
{
- assertThat(assertions.function("ST_AsText", "ST_SymDifference(ST_GeometryFromText('POINT (50 100)'), ST_GeometryFromText('POINT (50 150)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT ((50 100), (50 150))");
+ assertSpatialEquals(assertions,
+ "ST_SymDifference(ST_GeometryFromText('POINT (50 100)'), ST_GeometryFromText('POINT (50 150)'))",
+ "MULTIPOINT ((50 100), (50 150))");
- assertThat(assertions.function("ST_AsText", "ST_SymDifference(ST_GeometryFromText('MULTIPOINT (50 100, 60 200)'), ST_GeometryFromText('MULTIPOINT (60 200, 70 150)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOINT ((50 100), (70 150))");
+ assertSpatialEquals(assertions,
+ "ST_SymDifference(ST_GeometryFromText('MULTIPOINT (50 100, 60 200)'), ST_GeometryFromText('MULTIPOINT (60 200, 70 150)'))",
+ "MULTIPOINT ((50 100), (70 150))");
- assertThat(assertions.function("ST_AsText", "ST_SymDifference(ST_GeometryFromText('LINESTRING (50 100, 50 200)'), ST_GeometryFromText('LINESTRING (50 50, 50 150)'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTILINESTRING ((50 50, 50 100), (50 150, 50 200))");
+ assertSpatialEquals(assertions,
+ "ST_SymDifference(ST_GeometryFromText('LINESTRING (50 100, 50 200)'), ST_GeometryFromText('LINESTRING (50 50, 50 150)'))",
+ "MULTILINESTRING ((50 50, 50 100), (50 150, 50 200))");
- assertThat(assertions.function("ST_AsText", "ST_SymDifference(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTILINESTRING ((5 0, 5 1), (1 1, 5 1), (5 1, 5 4), (2 4, 3 4), (4 4, 5 4), (5 4, 6 4))");
+ assertSpatialEquals(assertions,
+ "ST_SymDifference(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))'))",
+ "MULTILINESTRING ((5 0, 5 1), (1 1, 5 1), (5 1, 5 4), (2 4, 3 4), (4 4, 5 4), (5 4, 6 4))");
- assertThat(assertions.function("ST_AsText", "ST_SymDifference(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))'), ST_GeometryFromText('POLYGON ((2 2, 2 5, 5 5, 5 2))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOLYGON (((1 1, 4 1, 4 2, 2 2, 2 4, 1 4, 1 1)), ((4 2, 5 2, 5 5, 2 5, 2 4, 4 4, 4 2)))");
+ assertSpatialEquals(assertions,
+ "ST_SymDifference(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'), ST_GeometryFromText('POLYGON ((2 2, 2 5, 5 5, 5 2, 2 2))'))",
+ "MULTIPOLYGON (((1 1, 4 1, 4 2, 2 2, 2 4, 1 4, 1 1)), ((4 2, 5 2, 5 5, 2 5, 2 4, 4 4, 4 2)))");
- assertThat(assertions.function("ST_AsText", "ST_SymDifference(ST_GeometryFromText('MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0)), ((2 2, 2 4, 4 4, 4 2)))'), ST_GeometryFromText('POLYGON ((0 0, 0 3, 3 3, 3 0))'))"))
- .hasType(VARCHAR)
- .isEqualTo("MULTIPOLYGON (((2 0, 3 0, 3 2, 2 2, 2 0)), ((0 2, 2 2, 2 3, 0 3, 0 2)), ((3 2, 4 2, 4 4, 2 4, 2 3, 3 3, 3 2)))");
+ assertSpatialEquals(assertions,
+ "ST_SymDifference(ST_GeometryFromText('MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0, 0 0)), ((2 2, 2 4, 4 4, 4 2, 2 2)))'), ST_GeometryFromText('POLYGON ((0 0, 0 3, 3 3, 3 0, 0 0))'))",
+ "MULTIPOLYGON (((2 0, 3 0, 3 2, 2 2, 2 0)), ((0 2, 2 2, 2 3, 0 3, 0 2)), ((3 2, 4 2, 4 4, 2 4, 2 3, 3 3, 3 2)))");
}
@Test
@@ -1373,19 +1392,19 @@ public void testStContains()
assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('MULTILINESTRING ((3 4, 4 4), (2 1, 6 1))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')", "ST_GeometryFromText('POLYGON ((1 1, 1 2, 2 2, 2 1))')"))
+ assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')", "ST_GeometryFromText('POLYGON ((1 1, 1 2, 2 2, 2 1, 1 1))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')", "ST_GeometryFromText('POLYGON ((-1 -1, -1 2, 2 2, 2 -1))')"))
+ assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')", "ST_GeometryFromText('POLYGON ((-1 -1, -1 2, 2 2, 2 -1, -1 -1))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0)), ((2 2, 2 4, 4 4, 4 2)))')", "ST_GeometryFromText('POLYGON ((2 2, 2 3, 3 3, 3 2))')"))
+ assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0, 0 0)), ((2 2, 2 4, 4 4, 4 2, 2 2)))')", "ST_GeometryFromText('POLYGON ((2 2, 2 3, 3 3, 3 2, 2 2))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('LINESTRING (20 20, 30 30)')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')"))
+ assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('LINESTRING (20 20, 30 30)')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('LINESTRING EMPTY')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')"))
+ assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('LINESTRING EMPTY')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')"))
.isEqualTo(false);
assertThat(assertions.function("ST_Contains", "ST_GeometryFromText('LINESTRING (20 20, 30 30)')", "ST_GeometryFromText('POLYGON EMPTY')"))
@@ -1407,22 +1426,22 @@ public void testSTCrosses()
assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('LINESTRING(0 0, 1 1)')", "ST_GeometryFromText('LINESTRING (1 0, 0 1)')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')", "ST_GeometryFromText('POLYGON ((2 2, 2 5, 5 5, 5 2))')"))
+ assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')", "ST_GeometryFromText('POLYGON ((2 2, 2 5, 5 5, 5 2, 2 2))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0)), ((2 2, 2 4, 4 4, 4 2)))')", "ST_GeometryFromText('POLYGON ((2 2, 2 3, 3 3, 3 2))')"))
+ assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0, 0 0)), ((2 2, 2 4, 4 4, 4 2, 2 2)))')", "ST_GeometryFromText('POLYGON ((2 2, 2 3, 3 3, 3 2, 2 2))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('LINESTRING (-2 -2, 6 6)')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')"))
+ assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('LINESTRING (-2 -2, 6 6)')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')"))
.isEqualTo(true);
assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('POINT (20 20)')", "ST_GeometryFromText('POINT (20 20)')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')"))
+ assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')", "ST_GeometryFromText('LINESTRING (0 0, 0 4, 4 4, 4 0)')"))
+ assertThat(assertions.function("ST_Crosses", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')", "ST_GeometryFromText('LINESTRING (0 0, 0 4, 4 4, 4 0)')"))
.isEqualTo(false);
}
@@ -1450,10 +1469,10 @@ public void testSTDisjoint()
assertThat(assertions.function("ST_Disjoint", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Disjoint", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4))')"))
+ assertThat(assertions.function("ST_Disjoint", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Disjoint", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3))')"))
+ assertThat(assertions.function("ST_Disjoint", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')"))
.isEqualTo(false);
}
@@ -1475,10 +1494,10 @@ public void testSTEquals()
assertThat(assertions.function("ST_Equals", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Equals", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((3 3, 3 1, 1 1, 1 3))')"))
+ assertThat(assertions.function("ST_Equals", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((3 3, 3 1, 1 1, 1 3, 3 3))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Equals", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3))')"))
+ assertThat(assertions.function("ST_Equals", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')"))
.isEqualTo(false);
}
@@ -1500,19 +1519,19 @@ public void testSTIntersects()
assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4))')"))
+ assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3))')"))
+ assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54))')", "ST_GeometryFromText('LINESTRING (16.6 53, 16.6 56)')"))
+ assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54, 16.5 54))')", "ST_GeometryFromText('LINESTRING (16.6 53, 16.6 56)')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54))')", "ST_GeometryFromText('LINESTRING (16.6667 54.05, 16.8667 54.05)')"))
+ assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54, 16.5 54))')", "ST_GeometryFromText('LINESTRING (16.6667 54.05, 16.8667 54.05)')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54))')", "ST_GeometryFromText('LINESTRING (16.6667 54.25, 16.8667 54.25)')"))
+ assertThat(assertions.function("ST_Intersects", "ST_GeometryFromText('POLYGON ((16.5 54, 16.5 54.1, 16.51 54.1, 16.8 54, 16.5 54))')", "ST_GeometryFromText('LINESTRING (16.6667 54.25, 16.8667 54.25)')"))
.isEqualTo(false);
}
@@ -1534,19 +1553,19 @@ public void testSTOverlaps()
assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')", "ST_GeometryFromText('POLYGON ((3 3, 3 5, 5 5, 5 3))')"))
+ assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')", "ST_GeometryFromText('POLYGON ((3 3, 3 5, 5 5, 5 3, 3 3))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')"))
+ assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')", "ST_GeometryFromText('LINESTRING (1 1, 4 4)')"))
+ assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')", "ST_GeometryFromText('LINESTRING (1 1, 4 4)')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4))')"))
+ assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3))')"))
+ assertThat(assertions.function("ST_Overlaps", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')"))
.isEqualTo(true);
}
@@ -1556,10 +1575,10 @@ public void testSTRelate()
assertThat(assertions.function("ST_Relate", "ST_GeometryFromText('LINESTRING (0 0, 3 3)')", "ST_GeometryFromText('LINESTRING (1 1, 4 1)')", "'****T****'"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Relate", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')", "'****T****'"))
+ assertThat(assertions.function("ST_Relate", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')", "'****T****'"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Relate", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1))')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')", "'T********'"))
+ assertThat(assertions.function("ST_Relate", "ST_GeometryFromText('POLYGON ((2 0, 2 1, 3 1, 2 0))')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')", "'T********'"))
.isEqualTo(false);
}
@@ -1578,19 +1597,19 @@ public void testSTTouches()
assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POINT (1 2)')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')"))
+ assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POINT (1 2)')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4))')"))
+ assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('LINESTRING (0 0, 1 1)')"))
+ assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('LINESTRING (0 0, 1 1)')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((3 3, 3 5, 5 5, 5 3))')"))
+ assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((3 3, 3 5, 5 5, 5 3, 3 3))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3))')"))
+ assertThat(assertions.function("ST_Touches", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')"))
.isEqualTo(false);
}
@@ -1609,19 +1628,19 @@ public void testSTWithin()
assertThat(assertions.function("ST_Within", "ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))')", "ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Within", "ST_GeometryFromText('POINT (3 2)')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')"))
+ assertThat(assertions.function("ST_Within", "ST_GeometryFromText('POINT (3 2)')", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Within", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1))')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')"))
+ assertThat(assertions.function("ST_Within", "ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Within", "ST_GeometryFromText('LINESTRING (1 1, 3 3)')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')"))
+ assertThat(assertions.function("ST_Within", "ST_GeometryFromText('LINESTRING (1 1, 3 3)')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')"))
.isEqualTo(true);
- assertThat(assertions.function("ST_Within", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3))')"))
+ assertThat(assertions.function("ST_Within", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))')", "ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')"))
.isEqualTo(false);
- assertThat(assertions.function("ST_Within", "ST_GeometryFromText('POLYGON ((1 1, 1 5, 5 5, 5 1))')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0))')"))
+ assertThat(assertions.function("ST_Within", "ST_GeometryFromText('POLYGON ((1 1, 1 5, 5 5, 5 1, 1 1))')", "ST_GeometryFromText('POLYGON ((0 0, 0 4, 4 4, 4 0, 0 0))')"))
.isEqualTo(false);
}
@@ -1697,7 +1716,7 @@ public void testSTInteriorRings()
assertInvalidInteriorRings("LINESTRING EMPTY", "LINE_STRING");
assertInvalidInteriorRings("MULTIPOINT (30 20, 60 70)", "MULTI_POINT");
assertInvalidInteriorRings("MULTILINESTRING ((1 10, 100 1000), (2 2, 1 0, 5 6))", "MULTI_LINE_STRING");
- assertInvalidInteriorRings("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((0 0, 0 2, 2 2, 2 0)))", "MULTI_POLYGON");
+ assertInvalidInteriorRings("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))", "MULTI_POLYGON");
assertInvalidInteriorRings("GEOMETRYCOLLECTION (POINT (1 1), POINT (2 3), LINESTRING (5 8, 13 21))", "GEOMETRY_COLLECTION");
assertThat(assertions.function("ST_InteriorRings", "ST_GeometryFromText('POLYGON EMPTY')"))
@@ -1711,10 +1730,11 @@ public void testSTInteriorRings()
private void assertInteriorRings(String wkt, String... expected)
{
- assertThat(assertions.expression("transform(ST_InteriorRings(geometry), x -> ST_AsText(x))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt)))
- .hasType(new ArrayType(VARCHAR))
- .isEqualTo(ImmutableList.copyOf(expected));
+ for (int i = 0; i < expected.length; i++) {
+ // Construct the expression for the specific ring (1-based index)
+ String actualExpression = "ST_InteriorRingN(ST_GeometryFromText('%s'), %s)".formatted(wkt, i + 1);
+ assertSpatialEquals(assertions, actualExpression, expected[i]);
+ }
}
private void assertInvalidInteriorRings(String wkt, String geometryType)
@@ -1740,7 +1760,7 @@ public void testSTNumGeometries()
assertSTNumGeometries("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))", 1);
assertSTNumGeometries("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", 4);
assertSTNumGeometries("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", 2);
- assertSTNumGeometries("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))", 2);
+ assertSTNumGeometries("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 2);
assertSTNumGeometries("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 2);
}
@@ -1798,7 +1818,7 @@ public void testSTUnion()
assertUnion("LINESTRING (20 20, 30 30)", "POINT (25 25)", "LINESTRING (20 20, 25 25, 30 30)");
assertUnion("LINESTRING (20 20, 30 30)", "LINESTRING (25 25, 27 27)", "LINESTRING (20 20, 25 25, 27 27, 30 30)");
assertUnion("POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0))", "POLYGON ((1 1, 1 2, 2 2, 2 1, 1 1))", "POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0))");
- assertUnion("MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0)), ((2 2, 2 4, 4 4, 4 2)))", "POLYGON ((2 2, 2 3, 3 3, 3 2))", "MULTIPOLYGON (((2 2, 3 2, 4 2, 4 4, 2 4, 2 3, 2 2)), ((0 0, 2 0, 2 2, 0 2, 0 0)))");
+ assertUnion("MULTIPOLYGON (((0 0 , 0 2, 2 2, 2 0, 0 0)), ((2 2, 2 4, 4 4, 4 2, 2 2)))", "POLYGON ((2 2, 2 3, 3 3, 3 2, 2 2))", "MULTIPOLYGON (((2 2, 3 2, 4 2, 4 4, 2 4, 2 3, 2 2)), ((0 0, 2 0, 2 2, 0 2, 0 0)))");
assertUnion("GEOMETRYCOLLECTION (POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0)), MULTIPOINT ((20 20), (25 25)))", "GEOMETRYCOLLECTION (POLYGON ((1 1, 1 2, 2 2, 2 1, 1 1)), POINT (25 25))", "GEOMETRYCOLLECTION (MULTIPOINT ((20 20), (25 25)), POLYGON ((0 0, 4 0, 4 4, 0 4, 0 0)))");
// overlap union
@@ -1811,17 +1831,14 @@ public void testSTUnion()
private void assertUnion(String leftWkt, String rightWkt, String expectWkt)
{
- assertThat(assertions.expression("ST_AsText(ST_Union(a, b))")
- .binding("a", "ST_GeometryFromText('%s')".formatted(leftWkt))
- .binding("b", "ST_GeometryFromText('%s')".formatted(rightWkt)))
- .hasType(VARCHAR)
- .isEqualTo(expectWkt);
+ assertSpatialEquals(assertions,
+ "ST_Union(ST_GeometryFromText('%s'), ST_GeometryFromText('%s'))".formatted(leftWkt, rightWkt),
+ expectWkt);
- assertThat(assertions.expression("ST_AsText(ST_Union(a, b))")
- .binding("a", "ST_GeometryFromText('%s')".formatted(rightWkt))
- .binding("b", "ST_GeometryFromText('%s')".formatted(leftWkt)))
- .hasType(VARCHAR)
- .isEqualTo(expectWkt);
+ // ST_Union should be symmetric; the result must be spatially equal even if vertex order varies.
+ assertSpatialEquals(assertions,
+ "ST_Union(ST_GeometryFromText('%s'), ST_GeometryFromText('%s'))".formatted(rightWkt, leftWkt),
+ expectWkt);
}
@Test
@@ -1860,9 +1877,9 @@ public void testSTGeometryN()
assertSTGeometryN("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", -1, null);
assertSTGeometryN("MULTIPOLYGON (((1 1, 3 1, 3 3, 1 3, 1 1)), ((2 4, 6 4, 6 6, 2 6, 2 4)))", 1, "POLYGON ((1 1, 3 1, 3 3, 1 3, 1 1))");
assertSTGeometryN("MULTIPOLYGON (((1 1, 3 1, 3 3, 1 3, 1 1)), ((2 4, 6 4, 6 6, 2 6, 2 4)))", 2, "POLYGON ((2 4, 6 4, 6 6, 2 6, 2 4))");
- assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))", 0, null);
- assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))", 3, null);
- assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))", -1, null);
+ assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 0, null);
+ assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 3, null);
+ assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", -1, null);
assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 1, "POINT (2 3)");
assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 2, "LINESTRING (2 3, 3 4)");
assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 3, null);
@@ -1872,11 +1889,16 @@ public void testSTGeometryN()
private void assertSTGeometryN(String wkt, int index, String expected)
{
- assertThat(assertions.expression("ST_AsText(ST_GeometryN(geometry, index))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt))
- .binding("index", Integer.toString(index)))
- .hasType(VARCHAR)
- .isEqualTo(expected);
+ if (expected == null) {
+ assertThat(assertions.expression("ST_GeometryN(geometry, index)")
+ .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt))
+ .binding("index", Integer.toString(index)))
+ .isNull(GEOMETRY);
+ return;
+ }
+ assertSpatialEquals(assertions,
+ "ST_GeometryN(ST_GeometryFromText('%s'), %d)".formatted(wkt, index),
+ expected);
}
@Test
@@ -2007,27 +2029,32 @@ private void assertInvalidMultiPoint(String errorMessage, String... pointWkts)
@Test
public void testSTPointN()
{
- assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8)", 1, "POINT (1 2)");
- assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8)", 3, "POINT (5 6)");
- assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8)", 10, null);
- assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8)", 0, null);
- assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8)", -1, null);
+ assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8, 1 2)", 1, "POINT (1 2)");
+ assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8, 1 2)", 3, "POINT (5 6)");
+ assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8, 1 2)", 10, null);
+ assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8, 1 2)", 0, null);
+ assertPointN("LINESTRING(1 2, 3 4, 5 6, 7 8, 1 2)", -1, null);
assertInvalidPointN("POINT (1 2)", "POINT");
assertInvalidPointN("MULTIPOINT (1 1, 2 2)", "MULTI_POINT");
assertInvalidPointN("MULTILINESTRING ((1 1, 2 2), (3 3, 4 4))", "MULTI_LINE_STRING");
assertInvalidPointN("POLYGON ((0 0, 1 0, 1 1, 0 1, 0 0))", "POLYGON");
- assertInvalidPointN("MULTIPOLYGON (((1 1, 1 4, 4 4, 4 1)), ((1 1, 1 4, 4 4, 4 1)))", "MULTI_POLYGON");
+ assertInvalidPointN("MULTIPOLYGON (((1 1, 1 4, 4 4, 4 1, 1 1)), ((1 1, 1 4, 4 4, 4 1, 1 1)))", "MULTI_POLYGON");
assertInvalidPointN("GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6, 7 10))", "GEOMETRY_COLLECTION");
}
private void assertPointN(String wkt, int index, String expected)
{
- assertThat(assertions.expression("ST_AsText(ST_PointN(geometry, index))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt))
- .binding("index", Integer.toString(index)))
- .hasType(VARCHAR)
- .isEqualTo(expected);
+ if (expected == null) {
+ assertThat(assertions.expression("ST_PointN(geometry, index)")
+ .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt))
+ .binding("index", Integer.toString(index)))
+ .isNull(GEOMETRY);
+ return;
+ }
+ assertSpatialEquals(assertions,
+ "ST_PointN(ST_GeometryFromText('%s'), %d)".formatted(wkt, index),
+ expected);
}
private void assertInvalidPointN(String wkt, String type)
@@ -2056,20 +2083,19 @@ public void testSTGeometries()
private void assertSTGeometries(String wkt, String... expected)
{
- assertThat(assertions.expression("transform(ST_Geometries(geometry), x -> ST_AsText(x))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt)))
- .hasType(new ArrayType(VARCHAR))
- .isEqualTo(ImmutableList.copyOf(expected));
+ assertSpatialArrayEquals(assertions,
+ "ST_Geometries(ST_GeometryFromText('%s'))".formatted(wkt),
+ expected);
}
@Test
public void testSTInteriorRingN()
{
assertInvalidInteriorRingN("POINT EMPTY", 0, "POINT");
- assertInvalidInteriorRingN("LINESTRING (1 2, 2 3, 3 4)", 1, "LINE_STRING");
- assertInvalidInteriorRingN("MULTIPOINT (1 1, 2 3, 5 8)", -1, "MULTI_POINT");
+ assertInvalidInteriorRingN("LINESTRING (1 2, 2 3, 3 4, 1 2)", 1, "LINE_STRING");
+ assertInvalidInteriorRingN("MULTIPOINT (1 1, 2 3, 5 8, 1 1)", -1, "MULTI_POINT");
assertInvalidInteriorRingN("MULTILINESTRING ((2 4, 4 2), (3 5, 5 3))", 0, "MULTI_LINE_STRING");
- assertInvalidInteriorRingN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1)), ((2 4, 2 6, 6 6, 6 4)))", 2, "MULTI_POLYGON");
+ assertInvalidInteriorRingN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 2, "MULTI_POLYGON");
assertInvalidInteriorRingN("GEOMETRYCOLLECTION (POINT (2 2), POINT (10 20))", 1, "GEOMETRY_COLLECTION");
assertInteriorRingN("POLYGON ((0 0, 1 0, 1 1, 0 1, 0 0))", 1, null);
@@ -2082,11 +2108,14 @@ public void testSTInteriorRingN()
private void assertInteriorRingN(String wkt, int index, String expected)
{
- assertThat(assertions.expression("ST_AsText(ST_InteriorRingN(geometry, index))")
- .binding("geometry", "ST_GeometryFromText('%s')".formatted(wkt))
- .binding("index", Integer.toString(index)))
- .hasType(VARCHAR)
- .isEqualTo(expected);
+ String expression = "ST_InteriorRingN(ST_GeometryFromText('%s'), %d)".formatted(wkt, index);
+ if (expected == null) {
+ assertThat(assertions.expression(expression))
+ .isNull(GEOMETRY);
+ }
+ else {
+ assertSpatialEquals(assertions, expression, expected);
+ }
}
private void assertInvalidInteriorRingN(String wkt, int index, String geometryType)
@@ -2109,7 +2138,7 @@ public void testSTGeometryType()
.hasType(VARCHAR)
.isEqualTo("ST_LineString");
- assertThat(assertions.function("ST_GeometryType", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1))')"))
+ assertThat(assertions.function("ST_GeometryType", "ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))')"))
.hasType(VARCHAR)
.isEqualTo("ST_Polygon");
@@ -2121,7 +2150,7 @@ public void testSTGeometryType()
.hasType(VARCHAR)
.isEqualTo("ST_MultiLineString");
- assertThat(assertions.function("ST_GeometryType", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 4, 4 4, 4 1)), ((1 1, 1 4, 4 4, 4 1)))')"))
+ assertThat(assertions.function("ST_GeometryType", "ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 4, 4 4, 4 1, 1 1)), ((1 1, 1 4, 4 4, 4 1, 1 1)))')"))
.hasType(VARCHAR)
.isEqualTo("ST_MultiPolygon");
@@ -2160,16 +2189,14 @@ public void testSTGeometryFromBinary()
assertGeomFromBinary("GEOMETRYCOLLECTION (POINT (1 2), LINESTRING (0 0, 1 2, 3 4), POLYGON ((0 0, 1 0, 1 1, 0 1, 0 0)))");
// The EWKB representation of "SRID=4326;POINT (1 1)".
- assertThat(assertions.expression("ST_AsText(ST_GeomFromBinary(wkb))")
- .binding("wkb", "x'0101000020E6100000000000000000F03F000000000000F03F'"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (1 1)");
+ assertSpatialEquals(assertions,
+ "ST_GeomFromBinary(x'0101000020E6100000000000000000F03F000000000000F03F')",
+ "POINT (1 1)");
// array of geometries
- assertThat(assertions.expression("transform(a, wkb -> ST_AsText(ST_GeomFromBinary(wkb)))")
- .binding("a", "ARRAY[ST_AsBinary(ST_Point(1, 2)), ST_AsBinary(ST_Point(3, 4))]"))
- .hasType(new ArrayType(VARCHAR))
- .isEqualTo(ImmutableList.of("POINT (1 2)", "POINT (3 4)"));
+ assertSpatialArrayEquals(assertions,
+ "transform(ARRAY[ST_AsBinary(ST_Point(1, 2)), ST_AsBinary(ST_Point(3, 4))], wkb -> ST_GeomFromBinary(wkb))",
+ "POINT (1 2)", "POINT (3 4)");
// invalid geometries
assertGeomFromBinary("MULTIPOINT ((0 0), (0 1), (1 1), (0 1))");
@@ -2182,10 +2209,9 @@ public void testSTGeometryFromBinary()
private void assertGeomFromBinary(String wkt)
{
- assertThat(assertions.expression("ST_AsText(ST_GeomFromBinary(geometry))")
- .binding("geometry", "ST_AsBinary(ST_GeometryFromText('%s'))".formatted(wkt)))
- .hasType(VARCHAR)
- .isEqualTo(wkt);
+ assertSpatialEquals(assertions,
+ "ST_GeomFromBinary(ST_AsBinary(ST_GeometryFromText('%s')))".formatted(wkt),
+ wkt);
}
@Test
@@ -2209,6 +2235,8 @@ public void testGeometryFromHadoopShape()
assertGeometryFromHadoopShape("000000000408000000000000000000F03F00000000000000400000000000000840000000000000104002000000000000000000F03F000000000000004000000000000008400000000000001040", "MULTIPOINT ((1 2), (3 4))");
assertGeometryFromHadoopShape("000000000503000000000000000000F03F000000000000F03F0000000000001440000000000000104002000000040000000000000002000000000000000000F03F000000000000F03F0000000000001440000000000000F03F0000000000000040000000000000104000000000000010400000000000001040", "MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))");
assertGeometryFromHadoopShape("000000000605000000000000000000F03F000000000000F03F00000000000018400000000000001840020000000A0000000000000005000000000000000000F03F000000000000F03F000000000000F03F0000000000000840000000000000084000000000000008400000000000000840000000000000F03F000000000000F03F000000000000F03F0000000000000040000000000000104000000000000000400000000000001840000000000000184000000000000018400000000000001840000000000000104000000000000000400000000000001040", "MULTIPOLYGON (((1 1, 3 1, 3 3, 1 3, 1 1)), ((2 4, 6 4, 6 6, 2 6, 2 4)))");
+ assertGeometryFromHadoopShape("0000000006050000000000000000000000000000000000000000000000000014400000000000001440020000000A0000000000000005000000000000000000000000000000000000000000000000000000000000000000F03F000000000000F03F000000000000F03F000000000000F03F0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014400000000000000000000000000000144000000000000014400000000000000000000000000000144000000000000000000000000000000000", "POLYGON ((0 0, 1 0, 1 1, 0 1, 0 0), (0 0, 0 5, 5 5, 5 0, 0 0))");
+ assertGeometryFromHadoopShape("0000000006050000000000000000000000000000000000000000000000000018400000000000001840020000000A0000000000000005000000000000000000000000000000000000000000000000000000000000000000F03F000000000000F03F000000000000F03F000000000000F03F0000000000000000000000000000000000000000000000000000000000000040000000000000004000000000000000400000000000001840000000000000184000000000000018400000000000001840000000000000004000000000000000400000000000000040", "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 2, 6 2, 6 6, 2 6, 2 2)))");
// given hadoop shape is too short
assertTrinoExceptionThrownBy(assertions.function("geometry_from_hadoop_shape", "from_hex('1234')")::evaluate)
@@ -2228,14 +2256,20 @@ public void testGeometryFromHadoopShape()
// shape type is invalid for given shape
assertTrinoExceptionThrownBy(assertions.function("geometry_from_hadoop_shape", "from_hex('000000000501000000000000000000F03F0000000000000040')")::evaluate)
.hasMessage("Invalid Hadoop shape");
+
+ // part indices invalid
+ assertTrinoExceptionThrownBy(assertions.function("geometry_from_hadoop_shape", "from_hex('000000000503000000000000000000F03F000000000000F03F0000000000001440000000000000104002000000040000000200000001000000000000000000F03F000000000000F03F0000000000001440000000000000F03F0000000000000040000000000000104000000000000010400000000000001040')")::evaluate)
+ .hasMessage("Invalid Hadoop shape");
+
+ assertTrinoExceptionThrownBy(assertions.function("geometry_from_hadoop_shape", "from_hex('000000000605000000000000000000F03F000000000000F03F00000000000018400000000000001840020000000A000000000000000B0000000000000000F03F000000000000F03F000000000000F03F0000000000000840000000000000084000000000000008400000000000000840000000000000F03F000000000000F03F000000000000F03F0000000000000040000000000000104000000000000000400000000000001840000000000000184000000000000018400000000000001840000000000000104000000000000000400000000000001040')")::evaluate)
+ .hasMessage("Invalid Hadoop shape");
}
private void assertGeometryFromHadoopShape(String hadoopHex, String expectedWkt)
{
- assertThat(assertions.expression("ST_AsText(geometry_from_hadoop_shape(geometry))")
- .binding("geometry", "from_hex('%s')".formatted(hadoopHex)))
- .hasType(VARCHAR)
- .isEqualTo(expectedWkt);
+ assertSpatialEquals(assertions,
+ "geometry_from_hadoop_shape(from_hex('%s'))".formatted(hadoopHex),
+ expectedWkt);
}
@Test
@@ -2274,7 +2308,7 @@ public void testSphericalGeographyJsonConversion()
// invalid geometries should return as is.
assertGeographyToAndFromJson("MULTIPOINT ((0 0), (0 1), (1 1), (0 1))");
assertGeographyToAndFromJson("LINESTRING (0 0, 0 1, 0 1, 1 1, 1 0, 0 0)");
- assertGeographyToAndFromJson("LINESTRING (0 0, 1 1, 1 0, 0 1)");
+ assertGeographyToAndFromJson("LINESTRING (0 0, 1 1, 1 0, 0 1, 0 0)");
// extra properties are stripped from JSON
assertValidGeometryJson("{\"type\":\"Point\", \"coordinates\":[0,0], \"mykey\":\"myvalue\"}", "POINT (0 0)");
@@ -2308,16 +2342,16 @@ public void testSphericalGeographyJsonConversion()
private void assertGeographyToAndFromJson(String wkt)
{
- assertThat(assertions.function("ST_AsText", "to_geometry(from_geojson_geometry(to_geojson_geometry(to_spherical_geography(ST_GeometryFromText('%s')))))".formatted(wkt)))
- .hasType(VARCHAR)
- .isEqualTo(wkt);
+ assertSpatialEquals(assertions,
+ "to_geometry(from_geojson_geometry(to_geojson_geometry(to_spherical_geography(ST_GeometryFromText('%s')))))".formatted(wkt),
+ wkt);
}
private void assertValidGeometryJson(String json, String wkt)
{
- assertThat(assertions.function("ST_AsText", "to_geometry(from_geojson_geometry('%s'))".formatted(json)))
- .hasType(VARCHAR)
- .isEqualTo(wkt);
+ assertSpatialEquals(assertions,
+ "to_geometry(from_geojson_geometry('%s'))".formatted(json),
+ wkt);
}
private void assertInvalidGeometryJson(String json, String message)
@@ -2362,25 +2396,140 @@ public void testGeometryJsonConversion()
// invalid geometries should return as is.
assertGeometryToAndFromJson("MULTIPOINT ((0 0), (0 1), (1 1), (0 1))");
assertGeometryToAndFromJson("LINESTRING (0 0, 0 1, 0 1, 1 1, 1 0, 0 0)");
- assertGeometryToAndFromJson("LINESTRING (0 0, 1 1, 1 0, 0 1)");
+ assertGeometryToAndFromJson("LINESTRING (0 0, 1 1, 1 0, 0 1, 0 0)");
}
private void assertGeometryToAndFromJson(String wkt)
{
- assertThat(assertions.function("ST_AsText", "to_geometry(from_geojson_geometry(to_geojson_geometry(ST_GeometryFromText('%s'))))".formatted(wkt)))
- .hasType(VARCHAR)
- .isEqualTo(wkt);
+ assertSpatialEquals(assertions,
+ "to_geometry(from_geojson_geometry(to_geojson_geometry(ST_GeometryFromText('%s'))))".formatted(wkt),
+ wkt);
}
@Test
public void testSTGeomFromKML()
{
- assertThat(assertions.expression("ST_AsText(ST_GeomFromKML(geometry))")
- .binding("geometry", "'-2,2'"))
- .hasType(VARCHAR)
- .isEqualTo("POINT (-2 2)");
+ assertSpatialEquals(assertions,
+ "ST_GeomFromKML('-2,2')",
+ "POINT (-2 2)");
assertTrinoExceptionThrownBy(assertions.function("ST_GeomFromKML", "''")::evaluate)
.hasMessage("Invalid KML: ");
}
+
+ @Test
+ public void testSridFunctions()
+ {
+ // ST_SRID - default SRID is 0
+ assertThat(assertions.function("ST_SRID", "ST_Point(1, 2)"))
+ .hasType(INTEGER)
+ .isEqualTo(0);
+
+ // ST_SetSRID and ST_SRID - set and retrieve SRID
+ assertThat(assertions.function("ST_SRID", "ST_SetSRID(ST_Point(1, 2), 4326)"))
+ .hasType(INTEGER)
+ .isEqualTo(4326);
+
+ // SRID propagation through unary operations
+ assertThat(assertions.function("ST_SRID", "ST_Buffer(ST_SetSRID(ST_Point(1, 2), 3857), 1.0)"))
+ .hasType(INTEGER)
+ .isEqualTo(3857);
+
+ assertThat(assertions.function("ST_SRID", "ST_Centroid(ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))'), 4326))"))
+ .hasType(INTEGER)
+ .isEqualTo(4326);
+
+ assertThat(assertions.function("ST_SRID", "ST_ConvexHull(ST_SetSRID(ST_GeometryFromText('MULTIPOINT ((0 0), (1 1), (0 1))'), 4326))"))
+ .hasType(INTEGER)
+ .isEqualTo(4326);
+
+ assertThat(assertions.function("ST_SRID", "ST_Envelope(ST_SetSRID(ST_GeometryFromText('LINESTRING (0 0, 1 1)'), 4326))"))
+ .hasType(INTEGER)
+ .isEqualTo(4326);
+
+ // SRID propagation through binary operations - matching SRIDs
+ assertThat(assertions.function("ST_SRID", "ST_Intersection(ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), 4326), ST_SetSRID(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), 4326))"))
+ .hasType(INTEGER)
+ .isEqualTo(4326);
+
+ // SRID 0 is a wildcard - matches any SRID
+ assertThat(assertions.function("ST_SRID", "ST_Intersection(ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), 4326), ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'))"))
+ .hasType(INTEGER)
+ .isEqualTo(4326);
+
+ assertThat(assertions.function("ST_SRID", "ST_Intersection(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), ST_SetSRID(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), 3857))"))
+ .hasType(INTEGER)
+ .isEqualTo(3857);
+
+ // MaxInt stress test - large SRID values
+ assertThat(assertions.function("ST_SRID", "ST_SetSRID(ST_Point(1, 2), 2147483647)"))
+ .hasType(INTEGER)
+ .isEqualTo(2147483647);
+
+ // ST_AsEWKB - pass-through since internal format is EWKB
+ assertThat(assertions.function("ST_AsEWKB", "ST_Point(1, 2)"))
+ .hasType(VARBINARY);
+
+ // ST_AsEWKT - returns EWKT with SRID prefix when SRID is non-zero
+ assertThat(assertions.function("ST_AsEWKT", "ST_SetSRID(ST_Point(1, 2), 4326)"))
+ .hasType(VARCHAR)
+ .isEqualTo("SRID=4326;POINT (1 2)");
+
+ // ST_AsEWKT - returns plain WKT when SRID is 0
+ assertThat(assertions.function("ST_AsEWKT", "ST_Point(1, 2)"))
+ .hasType(VARCHAR)
+ .isEqualTo("POINT (1 2)");
+ }
+
+ @Test
+ public void testSridMismatchValidation()
+ {
+ // Binary operations with mismatched SRIDs should throw
+ assertTrinoExceptionThrownBy(() -> assertions.function("ST_Intersection",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), 4326)",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), 3857)").evaluate())
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+
+ assertTrinoExceptionThrownBy(() -> assertions.function("ST_Difference",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), 4326)",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), 3857)").evaluate())
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+
+ assertTrinoExceptionThrownBy(() -> assertions.function("ST_Union",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), 4326)",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), 3857)").evaluate())
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+
+ // Boolean operations with mismatched SRIDs should throw
+ assertTrinoExceptionThrownBy(() -> assertions.function("ST_Contains",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), 4326)",
+ "ST_SetSRID(ST_Point(1, 1), 3857)").evaluate())
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+
+ assertTrinoExceptionThrownBy(() -> assertions.function("ST_Intersects",
+ "ST_SetSRID(ST_GeometryFromText('POLYGON ((0 0, 0 2, 2 2, 2 0, 0 0))'), 4326)",
+ "ST_SetSRID(ST_Point(1, 1), 3857)").evaluate())
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+
+ assertTrinoExceptionThrownBy(() -> assertions.function("ST_Distance",
+ "ST_SetSRID(ST_Point(0, 0), 4326)",
+ "ST_SetSRID(ST_Point(1, 1), 3857)").evaluate())
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+ }
+
+ @Test
+ public void testSridWithAsBinary()
+ {
+ // ST_AsBinary should strip SRID (produce OGC WKB, not EWKB)
+ // Re-reading from WKB should have SRID 0
+ assertThat(assertions.function("ST_SRID", "ST_GeomFromBinary(ST_AsBinary(ST_SetSRID(ST_Point(1, 2), 4326)))"))
+ .hasType(INTEGER)
+ .isEqualTo(0);
+
+ // ST_AsEWKB should preserve SRID
+ // The internal format is EWKB, so ST_AsEWKB is a pass-through
+ assertThat(assertions.function("ST_SRID", "ST_GeomFromBinary(ST_AsEWKB(ST_SetSRID(ST_Point(1, 2), 4326)))"))
+ .hasType(INTEGER)
+ .isEqualTo(4326);
+ }
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoSpatialQueries.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoSpatialQueries.java
index 5e96edaa3efd..d1c55fce409e 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoSpatialQueries.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestGeoSpatialQueries.java
@@ -21,6 +21,7 @@
import org.junit.jupiter.api.Test;
import static io.airlift.testing.Closeables.closeAllSuppress;
+import static io.trino.plugin.geospatial.GeoTestUtils.spatiallyEquals;
import static io.trino.plugin.geospatial.GeometryType.GEOMETRY;
import static io.trino.plugin.geospatial.SphericalGeographyType.SPHERICAL_GEOGRAPHY;
import static io.trino.testing.TestingSession.testSessionBuilder;
@@ -68,18 +69,18 @@ public void testGeometryResult()
.row("POINT (52.233 21.016)")
.build());
- assertThat(query("SELECT ST_GeometryFromText('POLYGON((0 0, 0 1, 1 1, 1 1, 1 0, 0 0))')"))
- .result().matches(MaterializedResult.resultBuilder(getSession(), GEOMETRY)
- .row("POLYGON ((0 0, 1 0, 1 1, 1 1, 0 1, 0 0))")
- .build());
+ MaterializedResult actual = computeActual("SELECT ST_GeometryFromText('POLYGON((0 0, 0 1, 1 1, 1 1, 1 0, 0 0))')");
+ assertThat(actual.getTypes()).containsExactly(GEOMETRY);
+ String actualWkt = (String) actual.getOnlyValue();
+ assertThat(spatiallyEquals(actualWkt, "POLYGON ((0 0, 0 1, 1 1, 1 1, 1 0, 0 0))")).isTrue();
}
@Test
public void testSphericalGeographyResult()
{
- assertThat(query("SELECT to_spherical_geography(ST_GeometryFromText('POLYGON((0 0, 0 1, 1 1, 1 1, 1 0, 0 0))'))"))
- .result().matches(MaterializedResult.resultBuilder(getSession(), SPHERICAL_GEOGRAPHY)
- .row("POLYGON ((0 0, 1 0, 1 1, 1 1, 0 1, 0 0))")
- .build());
+ MaterializedResult actual = computeActual("SELECT to_spherical_geography(ST_GeometryFromText('POLYGON((0 0, 0 1, 1 1, 1 1, 1 0, 0 0))'))");
+ assertThat(actual.getTypes()).containsExactly(SPHERICAL_GEOGRAPHY);
+ String actualWkt = (String) actual.getOnlyValue();
+ assertThat(spatiallyEquals(actualWkt, "POLYGON ((0 0, 0 1, 1 1, 1 1, 1 0, 0 0))")).isTrue();
}
}
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialJoinOperator.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialJoinOperator.java
index 91565ebe934b..01e460a95b55 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialJoinOperator.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialJoinOperator.java
@@ -48,6 +48,7 @@
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
+import org.locationtech.jts.geom.Geometry;
import java.util.List;
import java.util.Optional;
@@ -92,14 +93,14 @@ public class TestSpatialJoinOperator
newLeaf(new Rectangle(6, -2, 15, 15), 0))));
// 2 intersecting polygons: A and B
- private static final Slice POLYGON_A = stGeometryFromText(Slices.utf8Slice("POLYGON ((0 0, -0.5 2.5, 0 5, 2.5 5.5, 5 5, 5.5 2.5, 5 0, 2.5 -0.5, 0 0))"));
- private static final Slice POLYGON_B = stGeometryFromText(Slices.utf8Slice("POLYGON ((4 4, 3.5 7, 4 10, 7 10.5, 10 10, 10.5 7, 10 4, 7 3.5, 4 4))"));
+ private static final Geometry POLYGON_A = stGeometryFromText(Slices.utf8Slice("POLYGON ((0 0, -0.5 2.5, 0 5, 2.5 5.5, 5 5, 5.5 2.5, 5 0, 2.5 -0.5, 0 0))"));
+ private static final Geometry POLYGON_B = stGeometryFromText(Slices.utf8Slice("POLYGON ((4 4, 3.5 7, 4 10, 7 10.5, 10 10, 10.5 7, 10 4, 7 3.5, 4 4))"));
// A set of points: X in A, Y in A and B, Z in B, W outside of A and B
- private static final Slice POINT_X = stPoint(1, 1);
- private static final Slice POINT_Y = stPoint(4.5, 4.5);
- private static final Slice POINT_Z = stPoint(6, 6);
- private static final Slice POINT_W = stPoint(20, 20);
+ private static final Geometry POINT_X = stPoint(1, 1);
+ private static final Geometry POINT_Y = stPoint(4.5, 4.5);
+ private static final Geometry POINT_Z = stPoint(6, 6);
+ private static final Geometry POINT_W = stPoint(20, 20);
private ExecutorService executor;
private ScheduledExecutorService scheduledExecutor;
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java
index 548f1c08fa14..5d2cedf13c25 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSpatialPartitioningInternalAggregation.java
@@ -13,10 +13,6 @@
*/
package io.trino.plugin.geospatial;
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import io.airlift.slice.Slice;
@@ -36,13 +32,18 @@
import io.trino.testing.QueryRunner;
import io.trino.testing.StandaloneQueryRunner;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
import java.util.List;
import java.util.OptionalInt;
import static com.google.common.math.DoubleMath.roundToInt;
import static io.trino.geospatial.KdbTree.buildKdbTree;
-import static io.trino.geospatial.serde.GeometrySerde.serialize;
+import static io.trino.geospatial.serde.JtsGeometrySerde.serialize;
import static io.trino.operator.aggregation.AggregationTestUtils.createGroupByIdBlock;
import static io.trino.operator.aggregation.AggregationTestUtils.getFinalBlock;
import static io.trino.operator.aggregation.AggregationTestUtils.getGroupValue;
@@ -56,6 +57,8 @@
public class TestSpatialPartitioningInternalAggregation
{
+ private static final GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
+
@Test
public void test()
{
@@ -71,7 +74,7 @@ public void test(int partitionCount)
TestingAggregationFunction function = new TestingFunctionResolution(runner)
.getAggregateFunction("spatial_partitioning", fromTypes(GEOMETRY, INTEGER));
- List geometries = makeGeometries();
+ List geometries = makeGeometries();
Block geometryBlock = makeGeometryBlock(geometries);
BlockBuilder blockBuilder = INTEGER.createFixedSizeBlockBuilder(1);
@@ -95,52 +98,51 @@ public void test(int partitionCount)
assertThat(groupValue).isEqualTo(expectedValue.toStringUtf8());
}
- private List makeGeometries()
+ private List makeGeometries()
{
- ImmutableList.Builder geometries = ImmutableList.builder();
+ ImmutableList.Builder geometries = ImmutableList.builder();
for (int i = 0; i < 10; i++) {
for (int j = 0; j < 10; j++) {
- geometries.add(new OGCPoint(new Point(-10 + i, -10 + j), null));
+ geometries.add(GEOMETRY_FACTORY.createPoint(new Coordinate(-10 + i, -10 + j)));
}
}
for (int i = 0; i < 5; i++) {
for (int j = 0; j < 5; j++) {
- geometries.add(new OGCPoint(new Point(-10 + 2 * i, 2 * j), null));
+ geometries.add(GEOMETRY_FACTORY.createPoint(new Coordinate(-10 + 2 * i, 2 * j)));
}
}
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
- geometries.add(new OGCPoint(new Point(2.5 * i, -10 + 2.5 * j), null));
+ geometries.add(GEOMETRY_FACTORY.createPoint(new Coordinate(2.5 * i, -10 + 2.5 * j)));
}
}
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
- geometries.add(new OGCPoint(new Point(5 * i, 5 * j), null));
+ geometries.add(GEOMETRY_FACTORY.createPoint(new Coordinate(5 * i, 5 * j)));
}
}
return geometries.build();
}
- private Block makeGeometryBlock(List geometries)
+ private Block makeGeometryBlock(List geometries)
{
BlockBuilder builder = GEOMETRY.createBlockBuilder(null, geometries.size());
- for (OGCGeometry geometry : geometries) {
+ for (Geometry geometry : geometries) {
GEOMETRY.writeSlice(builder, serialize(geometry));
}
return builder.build();
}
- private Slice getSpatialPartitioning(Rectangle extent, List geometries, int partitionCount)
+ private Slice getSpatialPartitioning(Rectangle extent, List geometries, int partitionCount)
{
ImmutableList.Builder rectangles = ImmutableList.builder();
- for (OGCGeometry geometry : geometries) {
- Envelope envelope = new Envelope();
- geometry.getEsriGeometry().queryEnvelope(envelope);
- rectangles.add(new Rectangle(envelope.getXMin(), envelope.getYMin(), envelope.getXMax(), envelope.getYMax()));
+ for (Point geometry : geometries) {
+ Envelope envelope = geometry.getEnvelopeInternal();
+ rectangles.add(new Rectangle(envelope.getMinX(), envelope.getMinY(), envelope.getMaxX(), envelope.getMaxY()));
}
return KdbTreeUtils.toJson(buildKdbTree(roundToInt(geometries.size() * 1.0 / partitionCount, CEILING), extent, rectangles.build()));
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSphericalGeoFunctions.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSphericalGeoFunctions.java
index fe6f69de8523..cc8dcb33c197 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSphericalGeoFunctions.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/TestSphericalGeoFunctions.java
@@ -34,6 +34,8 @@
import static com.google.common.io.Resources.getResource;
import static io.airlift.slice.Slices.utf8Slice;
+import static io.trino.geospatial.serde.JtsGeometrySerde.serialize;
+import static io.trino.plugin.geospatial.GeoTestUtils.spatiallyEquals;
import static io.trino.plugin.geospatial.GeometryType.GEOMETRY;
import static io.trino.plugin.geospatial.SphericalGeographyType.SPHERICAL_GEOGRAPHY;
import static io.trino.spi.type.DoubleType.DOUBLE;
@@ -78,18 +80,22 @@ public void testGetObjectValue()
"MULTIPOINT ((-40.2 28.9), (-40.2 31.9))",
"LINESTRING (-40.2 28.9, -40.2 31.9, -37.2 31.9)",
"MULTILINESTRING ((-40.2 28.9, -40.2 31.9), (-40.2 31.9, -37.2 31.9))",
- "POLYGON ((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9))",
- "POLYGON ((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9), (-39.2 29.9, -39.2 30.9, -38.2 30.9, -38.2 29.9, -39.2 29.9))",
- "MULTIPOLYGON (((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9)), ((-39.2 29.9, -38.2 29.9, -38.2 30.9, -39.2 30.9, -39.2 29.9)))",
- "GEOMETRYCOLLECTION (POINT (-40.2 28.9), LINESTRING (-40.2 28.9, -40.2 31.9, -37.2 31.9), POLYGON ((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9)))");
+ "POLYGON ((-40.2 28.9, -40.2 31.9, -37.2 31.9, -37.2 28.9, -40.2 28.9))",
+ "POLYGON ((-40.2 28.9, -40.2 31.9, -37.2 31.9, -37.2 28.9, -40.2 28.9), (-39.2 29.9, -38.2 29.9, -38.2 30.9, -39.2 30.9, -39.2 29.9))",
+ "MULTIPOLYGON (((-40.2 28.9, -40.2 31.9, -37.2 31.9, -37.2 28.9, -40.2 28.9)), ((-36.2 28.9, -36.2 31.9, -33.2 31.9, -33.2 28.9, -36.2 28.9)))",
+ "GEOMETRYCOLLECTION (POINT (-40.2 28.9), LINESTRING (-40.2 28.9, -40.2 31.9, -37.2 31.9), POLYGON ((-40.2 28.9, -40.2 31.9, -37.2 31.9, -37.2 28.9, -40.2 28.9)))");
BlockBuilder builder = SPHERICAL_GEOGRAPHY.createBlockBuilder(null, wktList.size());
for (String wkt : wktList) {
- SPHERICAL_GEOGRAPHY.writeSlice(builder, GeoFunctions.toSphericalGeography(GeoFunctions.stGeometryFromText(utf8Slice(wkt))));
+ SPHERICAL_GEOGRAPHY.writeSlice(builder, serialize(GeoFunctions.toSphericalGeography(GeoFunctions.stGeometryFromText(utf8Slice(wkt)))));
}
Block block = builder.build();
for (int i = 0; i < wktList.size(); i++) {
- assertThat(wktList.get(i)).isEqualTo(SPHERICAL_GEOGRAPHY.getObjectValue(block, i));
+ String expected = wktList.get(i);
+ String actual = (String) SPHERICAL_GEOGRAPHY.getObjectValue(block, i);
+ assertThat(spatiallyEquals(expected, actual))
+ .withFailMessage("Geometry mismatch at index %d!\nExpected: %s\nActual: %s", i, expected, actual)
+ .isTrue();
}
}
@@ -150,9 +156,9 @@ public void testToAndFromSphericalGeography()
.hasType(GEOMETRY)
.matches("ST_GeometryFromText('POLYGON ((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9), (-39.2 29.9, -39.2 30.9, -38.2 30.9, -38.2 29.9, -39.2 29.9))')");
- assertThat(assertions.function("to_geometry", toSphericalGeography("MULTIPOLYGON (((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9)), ((-39.2 29.9, -38.2 29.9, -38.2 30.9, -39.2 30.9, -39.2 29.9)))")))
+ assertThat(assertions.function("to_geometry", toSphericalGeography("MULTIPOLYGON (((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9)), ((-36.2 28.9, -33.2 28.9, -33.2 31.9, -36.2 31.9, -36.2 28.9)))")))
.hasType(GEOMETRY)
- .matches("ST_GeometryFromText('MULTIPOLYGON (((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9)), ((-39.2 29.9, -38.2 29.9, -38.2 30.9, -39.2 30.9, -39.2 29.9)))')");
+ .matches("ST_GeometryFromText('MULTIPOLYGON (((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9)), ((-36.2 28.9, -33.2 28.9, -33.2 31.9, -36.2 31.9, -36.2 28.9)))')");
assertThat(assertions.function("to_geometry", toSphericalGeography("GEOMETRYCOLLECTION (POINT (-40.2 28.9), LINESTRING (-40.2 28.9, -40.2 31.9, -37.2 31.9), POLYGON ((-40.2 28.9, -37.2 28.9, -37.2 31.9, -40.2 31.9, -40.2 28.9)))")))
.hasType(GEOMETRY)
@@ -177,7 +183,7 @@ public void testToAndFromSphericalGeography()
assertTrinoExceptionThrownBy(assertions.function("to_spherical_geography", "ST_GeometryFromText('POLYGON ((-40.2 28.9, -40.2 31.9, -37.2 131.9, -37.2 28.9, -40.2 28.9), (-39.2 29.9, -39.2 30.9, -38.2 30.9, -38.2 29.9, -39.2 29.9))')")::evaluate)
.hasMessage("Latitude must be between -90 and 90");
- assertTrinoExceptionThrownBy(assertions.function("to_spherical_geography", "ST_GeometryFromText('MULTIPOLYGON (((-40.2 28.9, -40.2 31.9, -37.2 31.9, -37.2 28.9, -40.2 28.9)), ((-39.2 29.9, -39.2 30.9, 238.2 30.9, -38.2 29.9, -39.2 29.9)))')")::evaluate)
+ assertTrinoExceptionThrownBy(assertions.function("to_spherical_geography", "ST_GeometryFromText('MULTIPOLYGON (((-40.2 28.9, -40.2 31.9, -37.2 31.9, -37.2 28.9, -40.2 28.9)), ((-36.2 28.9, -36.2 31.9, 238.2 31.9, -33.2 28.9, -36.2 28.9)))')")::evaluate)
.hasMessage("Longitude must be between -180 and 180");
assertTrinoExceptionThrownBy(assertions.function("to_spherical_geography", "ST_GeometryFromText('GEOMETRYCOLLECTION (POINT (-40.2 28.9), LINESTRING (-40.2 28.9, -40.2 131.9, -37.2 31.9), POLYGON ((-40.2 28.9, -40.2 31.9, -37.2 31.9, -37.2 28.9, -40.2 28.9)))')")::evaluate)
@@ -218,9 +224,9 @@ public void testArea()
.hasType(DOUBLE)
.isEqualTo((Object) null);
- // Invalid polygon (too few vertices)
- assertTrinoExceptionThrownBy(assertions.expression("ST_Area(to_spherical_geography(ST_GeometryFromText('POLYGON((90 0, 0 0))')))")::evaluate)
- .hasMessage("Polygon is not valid: a loop contains less then 3 vertices.");
+ // Invalid polygon (consecutive identical vertices)
+ assertTrinoExceptionThrownBy(assertions.expression("ST_Area(to_spherical_geography(ST_GeometryFromText('POLYGON((90 0, 0 0, 0 0, 90 0))')))")::evaluate)
+ .hasMessage("Polygon is not valid: it has two identical consecutive vertices");
// Invalid data type (point)
assertTrinoExceptionThrownBy(assertions.expression("ST_Area(to_spherical_geography(ST_GeometryFromText('POINT (0 1)')))")::evaluate)
@@ -234,22 +240,22 @@ public void testArea()
assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((-135 85, -45 85, 45 85, 135 85, -135 85))")))
.satisfies(approximatelyEqualTo(619.00E9, 0.00001));
- assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((0 0, 0 1, 1 1, 1 0))")))
+ assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))")))
.satisfies(approximatelyEqualTo(123.64E8, 0.00001));
- assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((-122.150124 37.486095, -122.149201 37.486606, -122.145725 37.486580, -122.145923 37.483961 , -122.149324 37.482480 , -122.150837 37.483238, -122.150901 37.485392))")))
+ assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((-122.150124 37.486095, -122.149201 37.486606, -122.145725 37.486580, -122.145923 37.483961 , -122.149324 37.482480 , -122.150837 37.483238, -122.150901 37.485392, -122.150124 37.486095))")))
.satisfies(approximatelyEqualTo(163290.93943446054, 0.00001));
double angleOfOneKm = 0.008993201943349;
- assertThat(assertions.function("ST_Area", toSphericalGeography(format("POLYGON((0 0, %.15f 0, %.15f %.15f, 0 %.15f))", angleOfOneKm, angleOfOneKm, angleOfOneKm, angleOfOneKm))))
+ assertThat(assertions.function("ST_Area", toSphericalGeography(format("POLYGON((0 0, %.15f 0, %.15f %.15f, 0 %.15f, 0 0))", angleOfOneKm, angleOfOneKm, angleOfOneKm, angleOfOneKm))))
.satisfies(approximatelyEqualTo(1E6, 0.00001));
// 1/4th of an hemisphere, ie 1/8th of the planet, should be close to 4PiR2/8 = 637.58E11
- assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((90 0, 0 0, 0 90))")))
+ assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((90 0, 0 0, 0 90, 90 0))")))
.satisfies(approximatelyEqualTo(637.58E11, 0.00001));
//A Polygon with a large hole
- assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((90 0, 0 0, 0 90), (89 1, 1 1, 1 89))")))
+ assertThat(assertions.function("ST_Area", toSphericalGeography("POLYGON((90 0, 0 0, 0 90, 90 0), (89 1, 1 1, 1 89, 89 1))")))
.satisfies(approximatelyEqualTo(348.04E10, 0.00001));
Path geometryPath = new File(getResource("us-states.tsv").toURI()).toPath();
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/AbstractTestGeoAggregationFunctions.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/AbstractTestGeoAggregationFunctions.java
index 54dbe7072df6..c14b93f67c7f 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/AbstractTestGeoAggregationFunctions.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/AbstractTestGeoAggregationFunctions.java
@@ -13,10 +13,9 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import com.esri.core.geometry.ogc.OGCGeometry;
import io.airlift.slice.Slice;
import io.trino.block.BlockAssertions;
-import io.trino.geospatial.serde.GeometrySerde;
+import io.trino.geospatial.serde.JtsGeometrySerde;
import io.trino.metadata.TestingFunctionResolution;
import io.trino.plugin.geospatial.GeoPlugin;
import io.trino.spi.Page;
@@ -26,6 +25,9 @@
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.parallel.Execution;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
import java.util.Arrays;
import java.util.Collections;
@@ -65,9 +67,19 @@ public final void destroyTestFunctions()
protected void assertAggregatedGeometries(String testDescription, String expectedWkt, String... wkts)
{
+ WKTReader wktReader = new WKTReader();
List geometrySlices = Arrays.stream(wkts)
- .map(text -> text == null ? null : OGCGeometry.fromText(text))
- .map(input -> input == null ? null : GeometrySerde.serialize(input))
+ .map(text -> {
+ if (text == null) {
+ return null;
+ }
+ try {
+ return JtsGeometrySerde.serialize(wktReader.read(text));
+ }
+ catch (ParseException e) {
+ throw new RuntimeException(e);
+ }
+ })
.collect(Collectors.toList());
// Add a custom equality assertion because the resulting geometry may have
@@ -79,11 +91,17 @@ protected void assertAggregatedGeometries(String testDescription, String expecte
if (left == null || right == null) {
return false;
}
- OGCGeometry leftGeometry = OGCGeometry.fromText(left.toString());
- OGCGeometry rightGeometry = OGCGeometry.fromText(right.toString());
- // Check for equality by getting the difference
- return leftGeometry.difference(rightGeometry).isEmpty() &&
- rightGeometry.difference(leftGeometry).isEmpty();
+ try {
+ Geometry leftGeometry = wktReader.read(left.toString());
+ Geometry rightGeometry = wktReader.read(right.toString());
+ if (leftGeometry.isEmpty() && rightGeometry.isEmpty()) {
+ return leftGeometry.getGeometryType().equals(rightGeometry.getGeometryType());
+ }
+ return leftGeometry.equalsTopo(rightGeometry);
+ }
+ catch (ParseException e) {
+ throw new RuntimeException(e);
+ }
};
// Test in forward and reverse order to verify that ordering doesn't affect the output
assertAggregation(
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryConvexHullGeoAggregation.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryConvexHullGeoAggregation.java
index aa0725ebd11e..3a7a60ad6f31 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryConvexHullGeoAggregation.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryConvexHullGeoAggregation.java
@@ -14,6 +14,9 @@
package io.trino.plugin.geospatial.aggregation;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
import java.io.File;
import java.nio.file.Files;
@@ -21,6 +24,8 @@
import java.util.List;
import static com.google.common.io.Resources.getResource;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class TestGeometryConvexHullGeoAggregation
extends AbstractTestGeoAggregationFunctions
@@ -342,6 +347,40 @@ public void testGeometryCollection()
"POLYGON ((1 1, 3 1, 3 3, 1 3, 1 1))", "POINT (5 2)");
}
+ @Test
+ public void testSridMismatchWithEmptyGeometryInput()
+ throws ParseException
+ {
+ GeometryState state = new GeometryStateFactory.SingleGeometryState();
+ state.setGeometry(geometry("POINT (1 2)", 4326));
+
+ assertThatThrownBy(() -> ConvexHullAggregation.input(state, geometry("POINT EMPTY", 3857)))
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+ }
+
+ @Test
+ public void testEmptyGeometryCombinePropagatesWildcardSrid()
+ throws ParseException
+ {
+ GeometryState state = new GeometryStateFactory.SingleGeometryState();
+ state.setGeometry(geometry("POINT (1 2)", 0));
+
+ GeometryState otherState = new GeometryStateFactory.SingleGeometryState();
+ otherState.setGeometry(geometry("POINT EMPTY", 4326));
+
+ ConvexHullAggregation.combine(state, otherState);
+
+ assertThat(state.getGeometry().getSRID()).isEqualTo(4326);
+ }
+
+ private static Geometry geometry(String wkt, int srid)
+ throws ParseException
+ {
+ Geometry geometry = new WKTReader().read(wkt);
+ geometry.setSRID(srid);
+ return geometry;
+ }
+
@Override
protected String getFunctionName()
{
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateFactory.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateFactory.java
index 592f4b106d70..5d359e4ca4c8 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateFactory.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateFactory.java
@@ -13,8 +13,9 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import com.esri.core.geometry.ogc.OGCGeometry;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
import static org.assertj.core.api.Assertions.assertThat;
@@ -32,10 +33,11 @@ public void testCreateSingleStateEmpty()
@Test
public void testCreateSingleStatePresent()
+ throws ParseException
{
GeometryState state = factory.createSingleState();
- state.setGeometry(OGCGeometry.fromText("POINT (1 2)"));
- assertThat(OGCGeometry.fromText("POINT (1 2)")).isEqualTo(state.getGeometry());
+ state.setGeometry(new WKTReader().read("POINT (1 2)"));
+ assertThat(state.getGeometry().toText()).isEqualTo("POINT (1 2)");
assertThat(state.getEstimatedSize() > 0)
.describedAs("Estimated memory size was " + state.getEstimatedSize())
.isTrue();
@@ -53,6 +55,7 @@ public void testCreateGroupedStateEmpty()
@Test
public void testCreateGroupedStatePresent()
+ throws ParseException
{
GeometryState state = factory.createGroupedState();
assertThat(state.getGeometry()).isNull();
@@ -61,13 +64,13 @@ public void testCreateGroupedStatePresent()
groupedState.setGroupId(1);
assertThat(state.getGeometry()).isNull();
- groupedState.setGeometry(OGCGeometry.fromText("POINT (1 2)"));
- assertThat(state.getGeometry()).isEqualTo(OGCGeometry.fromText("POINT (1 2)"));
+ groupedState.setGeometry(new WKTReader().read("POINT (1 2)"));
+ assertThat(state.getGeometry().toText()).isEqualTo("POINT (1 2)");
groupedState.setGroupId(2);
assertThat(state.getGeometry()).isNull();
- groupedState.setGeometry(OGCGeometry.fromText("POINT (3 4)"));
- assertThat(state.getGeometry()).isEqualTo(OGCGeometry.fromText("POINT (3 4)"));
+ groupedState.setGeometry(new WKTReader().read("POINT (3 4)"));
+ assertThat(state.getGeometry().toText()).isEqualTo("POINT (3 4)");
groupedState.setGroupId(1);
assertThat(state.getGeometry()).isNotNull();
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateSerializer.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateSerializer.java
index 82a8196f3fd2..86d7441c2a5e 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateSerializer.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryStateSerializer.java
@@ -13,7 +13,6 @@
*/
package io.trino.plugin.geospatial.aggregation;
-import com.esri.core.geometry.ogc.OGCGeometry;
import io.trino.operator.aggregation.state.StateCompiler;
import io.trino.plugin.geospatial.GeometryType;
import io.trino.spi.block.Block;
@@ -21,20 +20,25 @@
import io.trino.spi.function.AccumulatorStateFactory;
import io.trino.spi.function.AccumulatorStateSerializer;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
import static io.trino.plugin.geospatial.aggregation.GeometryStateFactory.GroupedGeometryState;
import static org.assertj.core.api.Assertions.assertThat;
public class TestGeometryStateSerializer
{
+ private static final WKTReader WKT_READER = new WKTReader();
+
@Test
public void testSerializeDeserialize()
+ throws ParseException
{
AccumulatorStateFactory factory = StateCompiler.generateStateFactory(GeometryState.class);
AccumulatorStateSerializer serializer = StateCompiler.generateStateSerializer(GeometryState.class);
GeometryState state = factory.createSingleState();
- state.setGeometry(OGCGeometry.fromText("POINT (1 2)"));
+ state.setGeometry(WKT_READER.read("POINT (1 2)"));
BlockBuilder builder = GeometryType.GEOMETRY.createBlockBuilder(null, 1);
serializer.serialize(state, builder);
@@ -45,11 +49,12 @@ public void testSerializeDeserialize()
state.setGeometry(null);
serializer.deserialize(block, 0, state);
- assertThat(state.getGeometry().asText()).isEqualTo("POINT (1 2)");
+ assertThat(state.getGeometry().toText()).isEqualTo("POINT (1 2)");
}
@Test
public void testSerializeDeserializeGrouped()
+ throws ParseException
{
AccumulatorStateFactory factory = StateCompiler.generateStateFactory(GeometryState.class);
AccumulatorStateSerializer serializer = StateCompiler.generateStateSerializer(GeometryState.class);
@@ -57,10 +62,10 @@ public void testSerializeDeserializeGrouped()
// Add state to group 1
state.setGroupId(1);
- state.setGeometry(OGCGeometry.fromText("POINT (1 2)"));
+ state.setGeometry(WKT_READER.read("POINT (1 2)"));
// Add another state to group 2, to show that this doesn't affect the group under test (group 1)
state.setGroupId(2);
- state.setGeometry(OGCGeometry.fromText("POINT (2 3)"));
+ state.setGeometry(WKT_READER.read("POINT (2 3)"));
// Return to group 1
state.setGroupId(1);
@@ -74,10 +79,10 @@ public void testSerializeDeserializeGrouped()
serializer.deserialize(block, 0, state);
// Assert the state of group 1
- assertThat(state.getGeometry().asText()).isEqualTo("POINT (1 2)");
+ assertThat(state.getGeometry().toText()).isEqualTo("POINT (1 2)");
// Verify nothing changed in group 2
state.setGroupId(2);
- assertThat(state.getGeometry().asText()).isEqualTo("POINT (2 3)");
+ assertThat(state.getGeometry().toText()).isEqualTo("POINT (2 3)");
// Groups we did not touch are null
state.setGroupId(3);
assertThat(state.getGeometry()).isNull();
diff --git a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryUnionGeoAggregation.java b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryUnionGeoAggregation.java
index dfcc8a7f87f2..6fe1eff402af 100644
--- a/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryUnionGeoAggregation.java
+++ b/plugin/trino-geospatial/src/test/java/io/trino/plugin/geospatial/aggregation/TestGeometryUnionGeoAggregation.java
@@ -20,15 +20,20 @@
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
import java.util.Arrays;
import java.util.List;
-import static io.trino.plugin.geospatial.GeometryType.GEOMETRY;
+import static io.trino.plugin.geospatial.GeoTestUtils.assertSpatialEquals;
+import static io.trino.testing.assertions.TrinoExceptionAssert.assertTrinoExceptionThrownBy;
import static java.lang.String.format;
import static java.util.Collections.reverse;
import static java.util.stream.Collectors.toList;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
@TestInstance(PER_CLASS)
@@ -316,17 +321,63 @@ protected String getFunctionName()
private void assertArrayAggAndGeometryUnion(String expectedWkt, String[] wkts)
{
+ if (wkts.length == 0) {
+ return;
+ }
List wktList = Arrays.stream(wkts).map(wkt -> format("ST_GeometryFromText('%s')", wkt)).collect(toList());
- String wktArray = format("ARRAY[%s]", COMMA_JOINER.join(wktList));
- // ST_Union(ARRAY[ST_GeometryFromText('...'), ...])
- assertThat(assertions.function("geometry_union", wktArray))
- .hasType(GEOMETRY)
- .isEqualTo(expectedWkt);
+ String wktArray = "ARRAY[" + COMMA_JOINER.join(wktList) + "]";
+ assertSpatialEquals(assertions, "geometry_union(" + wktArray + ")", expectedWkt);
reverse(wktList);
- wktArray = format("ARRAY[%s]", COMMA_JOINER.join(wktList));
- assertThat(assertions.function("geometry_union", wktArray))
- .hasType(GEOMETRY)
- .isEqualTo(expectedWkt);
+ wktArray = "ARRAY[" + COMMA_JOINER.join(wktList) + "]";
+ assertSpatialEquals(assertions, "geometry_union(" + wktArray + ")", expectedWkt);
+ }
+
+ @Test
+ public void testSridMismatchInAggregation()
+ {
+ // geometry_union (array version) should throw when geometries have mismatched SRIDs
+ assertTrinoExceptionThrownBy(() -> assertions.function("geometry_union",
+ "ARRAY[ST_SetSRID(ST_Point(1, 2), 4326), ST_SetSRID(ST_Point(3, 4), 3857)]").evaluate())
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+
+ // Matching SRIDs should preserve SRID
+ assertThat(assertions.function("ST_SRID",
+ "geometry_union(ARRAY[ST_SetSRID(ST_Point(1, 2), 4326), ST_SetSRID(ST_Point(3, 4), 4326)])"))
+ .isEqualTo(4326);
+ }
+
+ @Test
+ public void testSridMismatchWithEmptyGeometryInput()
+ throws ParseException
+ {
+ GeometryState state = new GeometryStateFactory.SingleGeometryState();
+ state.setGeometry(geometry("POINT (1 2)", 4326));
+
+ assertThatThrownBy(() -> GeometryUnionAgg.input(state, geometry("POINT EMPTY", 3857)))
+ .hasMessage("SRID mismatch: 4326 vs 3857");
+ }
+
+ @Test
+ public void testEmptyGeometryCombinePropagatesWildcardSrid()
+ throws ParseException
+ {
+ GeometryState state = new GeometryStateFactory.SingleGeometryState();
+ state.setGeometry(geometry("POINT (1 2)", 0));
+
+ GeometryState otherState = new GeometryStateFactory.SingleGeometryState();
+ otherState.setGeometry(geometry("POINT EMPTY", 4326));
+
+ GeometryUnionAgg.combine(state, otherState);
+
+ assertThat(state.getGeometry().getSRID()).isEqualTo(4326);
+ }
+
+ private static Geometry geometry(String wkt, int srid)
+ throws ParseException
+ {
+ Geometry geometry = new WKTReader().read(wkt);
+ geometry.setSRID(srid);
+ return geometry;
}
}
diff --git a/plugin/trino-hive/pom.xml b/plugin/trino-hive/pom.xml
index fc96c8a7eade..05610aaa10bd 100644
--- a/plugin/trino-hive/pom.xml
+++ b/plugin/trino-hive/pom.xml
@@ -577,7 +577,13 @@
software.amazon.awssdk:retries
+
+ org.locationtech.jts:jts-core
+
+
+ org.locationtech.jts:jts-core
+
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java
index f7f6236ead7c..fd460d8c4b14 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java
@@ -614,7 +614,7 @@ private ListenableFuture getTransactionalSplits(Location path, boolean spl
for (FileEntry entry : acidState.originalFiles()) {
// Hive requires "original" files of transactional tables to conform to the bucketed tables naming pattern, to match them with delete deltas.
- acidInfoBuilder.addOriginalFile(entry.location(), entry.length(), getRequiredBucketNumber(entry.location()));
+ getBucketNumber(entry.location()).ifPresent(bucketId -> acidInfoBuilder.addOriginalFile(entry.location(), entry.length(), bucketId));
}
if (tableBucketInfo.isPresent()) {
@@ -662,7 +662,7 @@ private static Optional acidInfo(boolean fullAcid, AcidInfo.Builder bu
private static Optional acidInfoForOriginalFiles(boolean fullAcid, AcidInfo.Builder builder, Location location)
{
- return fullAcid ? Optional.of(builder.buildWithRequiredOriginalFiles(getRequiredBucketNumber(location))) : Optional.empty();
+ return fullAcid ? getBucketNumber(location).map(builder::buildWithRequiredOriginalFiles) : Optional.empty();
}
private Iterator createInternalHiveSplitIterator(TrinoFileSystem fileSystem, Location location, InternalHiveSplitFactory splitFactory, boolean splittable, Optional acidInfo)
@@ -718,9 +718,9 @@ private List getBucketedSplits(
ListMultimap bucketFiles = ArrayListMultimap.create();
for (TrinoFileStatus file : files) {
String fileName = Location.of(file.getPath()).fileName();
- OptionalInt bucket = getBucketNumber(fileName);
+ Optional bucket = getBucketNumber(fileName);
if (bucket.isPresent()) {
- bucketFiles.put(bucket.getAsInt(), file);
+ bucketFiles.put(bucket.get(), file);
continue;
}
@@ -814,22 +814,21 @@ static void validateFileBuckets(ListMultimap bucketFil
}
}
- private static int getRequiredBucketNumber(Location location)
+ private static Optional getBucketNumber(Location location)
{
- return getBucketNumber(location.fileName())
- .orElseThrow(() -> new IllegalStateException("Cannot get bucket number from location: " + location));
+ return getBucketNumber(location.fileName());
}
@VisibleForTesting
- static OptionalInt getBucketNumber(String name)
+ static Optional getBucketNumber(String name)
{
for (Pattern pattern : BUCKET_PATTERNS) {
Matcher matcher = pattern.matcher(name);
if (matcher.matches()) {
- return OptionalInt.of(parseInt(matcher.group(1)));
+ return Optional.of(parseInt(matcher.group(1)));
}
}
- return OptionalInt.empty();
+ return Optional.empty();
}
public static boolean hasAttemptId(String bucketFilename)
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
index 55410295dd05..1f9203d0671d 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
@@ -324,7 +324,8 @@ public Map getTableColumnStatistics(String databas
.stopOnIllegalExceptions()
.run("getTableColumnStatistics", stats.getGetTableColumnStatistics().wrap(() -> {
try (ThriftMetastoreClient client = createMetastoreClient()) {
- return groupStatisticsByColumn(client.getTableColumnStatistics(databaseName, tableName, ImmutableList.copyOf(columnNames)));
+ List tableColumnStatistics = client.getTableColumnStatistics(databaseName, tableName, ImmutableList.copyOf(columnNames));
+ return groupStatisticsByColumn(databaseName, tableName, tableColumnStatistics);
}
}));
}
@@ -346,7 +347,7 @@ public Map> getPartitionColumnStatisti
.filter(entry -> !entry.getValue().isEmpty())
.collect(toImmutableMap(
Map.Entry::getKey,
- entry -> groupStatisticsByColumn(entry.getValue())));
+ entry -> groupStatisticsByColumn(databaseName, tableName, entry.getValue())));
}
@Override
@@ -402,11 +403,11 @@ private Map> getPartitionColumnStatistics(Stri
}
}
- private static Map groupStatisticsByColumn(List statistics)
+ private static Map groupStatisticsByColumn(String databaseName, String tableName, List statistics)
{
Map statisticsByColumn = new HashMap<>();
for (ColumnStatisticsObj stats : statistics) {
- HiveColumnStatistics newColumnStatistics = ThriftMetastoreUtil.fromMetastoreApiColumnStatistics(stats);
+ HiveColumnStatistics newColumnStatistics = ThriftMetastoreUtil.fromMetastoreApiColumnStatistics(databaseName, tableName, stats);
if (statisticsByColumn.containsKey(stats.getColName())) {
HiveColumnStatistics existingColumnStatistics = statisticsByColumn.get(stats.getColName());
if (!newColumnStatistics.equals(existingColumnStatistics)) {
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
index 77a7b47e924f..6a0f8057c44a 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
@@ -23,6 +23,7 @@
import com.google.common.primitives.Shorts;
import io.airlift.compress.v3.zstd.ZstdDecompressor;
import io.airlift.json.JsonCodec;
+import io.airlift.log.Logger;
import io.trino.hive.thrift.metastore.BinaryColumnStatsData;
import io.trino.hive.thrift.metastore.BooleanColumnStatsData;
import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
@@ -165,6 +166,8 @@
public final class ThriftMetastoreUtil
{
+ private static final Logger log = Logger.get(ThriftMetastoreUtil.class);
+
private static final JsonCodec LANGUAGE_FUNCTION_CODEC = jsonCodec(LanguageFunction.class);
private static final String PUBLIC_ROLE_NAME = "public";
private static final String ADMIN_ROLE_NAME = "admin";
@@ -331,11 +334,11 @@ public static Stream listEnabledRoles(ConnectorIdentity identity, Functi
}
return Stream.concat(
- roles,
- listApplicableRoles(principal, listRoleGrants)
- .map(RoleGrant::getRoleName)
- // The admin role must be enabled explicitly. If it is, it was added above.
- .filter(Predicate.isEqual(ADMIN_ROLE_NAME).negate()))
+ roles,
+ listApplicableRoles(principal, listRoleGrants)
+ .map(RoleGrant::getRoleName)
+ // The admin role must be enabled explicitly. If it is, it was added above.
+ .filter(Predicate.isEqual(ADMIN_ROLE_NAME).negate()))
// listApplicableRoles may return role which was already added explicitly above.
.distinct();
}
@@ -435,8 +438,8 @@ public static boolean isAvroTableWithSchemaSet(io.trino.hive.thrift.metastore.Ta
return serdeInfo.getSerializationLib() != null &&
((table.getParameters().get(AVRO_SCHEMA_URL_KEY) != null ||
(serdeInfo.getParameters() != null && serdeInfo.getParameters().get(AVRO_SCHEMA_URL_KEY) != null)) ||
- (table.getParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null ||
- (serdeInfo.getParameters() != null && serdeInfo.getParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null))) &&
+ (table.getParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null ||
+ (serdeInfo.getParameters() != null && serdeInfo.getParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null))) &&
serdeInfo.getSerializationLib().equals(AVRO.getSerde());
}
@@ -533,7 +536,7 @@ public static Partition fromMetastoreApiPartition(io.trino.hive.thrift.metastore
* Both formats store values as seconds since epoch in HMS, which we convert to microseconds
* for Trino's internal representation.
*/
- public static HiveColumnStatistics fromMetastoreApiColumnStatistics(ColumnStatisticsObj columnStatistics)
+ public static HiveColumnStatistics fromMetastoreApiColumnStatistics(String databaseName, String tableName, ColumnStatisticsObj columnStatistics)
{
if (columnStatistics.getStatsData().isSetLongStats()) {
LongColumnStatsData longStatsData = columnStatistics.getStatsData().getLongStats();
@@ -623,7 +626,17 @@ public static HiveColumnStatistics fromMetastoreApiColumnStatistics(ColumnStatis
OptionalLong distinctValuesWithNullCount = timestampStatsData.isSetNumDVs() ? OptionalLong.of(timestampStatsData.getNumDVs()) : OptionalLong.empty();
return createIntegerColumnStatistics(min, max, nullsCount, distinctValuesWithNullCount);
}
- throw new TrinoException(HIVE_INVALID_METADATA, "Invalid column statistics data: " + columnStatistics);
+ log.warn("Unsupported column statistics data in table %s.%s: %s", databaseName, tableName, columnStatistics);
+ return new HiveColumnStatistics(
+ Optional.empty(),
+ Optional.empty(),
+ Optional.empty(),
+ Optional.empty(),
+ Optional.empty(),
+ OptionalLong.empty(),
+ OptionalDouble.empty(),
+ OptionalLong.empty(),
+ OptionalLong.empty());
}
private static Optional fromMetastoreDate(Date date)
@@ -748,6 +761,9 @@ private static StorageDescriptor makeStorageDescriptor(String tableName, List bucketProperty = storage.getBucketProperty();
if (bucketProperty.isPresent()) {
@@ -1050,7 +1066,7 @@ public static boolean isAvroTableWithSchemaSet(Table table)
return AVRO.getSerde().equals(table.getStorage().getStorageFormat().getSerDeNullable()) &&
((table.getParameters().get(AVRO_SCHEMA_URL_KEY) != null ||
(table.getStorage().getSerdeParameters().get(AVRO_SCHEMA_URL_KEY) != null)) ||
- (table.getParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null ||
- (table.getStorage().getSerdeParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null)));
+ (table.getParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null ||
+ (table.getStorage().getSerdeParameters().get(AVRO_SCHEMA_LITERAL_KEY) != null)));
}
}
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java
index 14f0572711c2..5645e82f8231 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java
@@ -68,7 +68,6 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
-import java.util.OptionalInt;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
@@ -413,27 +412,27 @@ public void testCachedDirectoryLister()
public void testGetBucketNumber()
{
// legacy Presto naming pattern
- assertThat(getBucketNumber("20190526_072952_00009_fn7s5_bucket-00234")).isEqualTo(OptionalInt.of(234));
- assertThat(getBucketNumber("20190526_072952_00009_fn7s5_bucket-00234.txt")).isEqualTo(OptionalInt.of(234));
- assertThat(getBucketNumber("20190526_235847_87654_fn7s5_bucket-56789")).isEqualTo(OptionalInt.of(56789));
+ assertThat(getBucketNumber("20190526_072952_00009_fn7s5_bucket-00234")).isEqualTo(Optional.of(234));
+ assertThat(getBucketNumber("20190526_072952_00009_fn7s5_bucket-00234.txt")).isEqualTo(Optional.of(234));
+ assertThat(getBucketNumber("20190526_235847_87654_fn7s5_bucket-56789")).isEqualTo(Optional.of(56789));
// Hive
- assertThat(getBucketNumber("0234_0")).isEqualTo(OptionalInt.of(234));
- assertThat(getBucketNumber("000234_0")).isEqualTo(OptionalInt.of(234));
- assertThat(getBucketNumber("0234_99")).isEqualTo(OptionalInt.of(234));
- assertThat(getBucketNumber("0234_0.txt")).isEqualTo(OptionalInt.of(234));
- assertThat(getBucketNumber("0234_0_copy_1")).isEqualTo(OptionalInt.of(234));
+ assertThat(getBucketNumber("0234_0")).isEqualTo(Optional.of(234));
+ assertThat(getBucketNumber("000234_0")).isEqualTo(Optional.of(234));
+ assertThat(getBucketNumber("0234_99")).isEqualTo(Optional.of(234));
+ assertThat(getBucketNumber("0234_0.txt")).isEqualTo(Optional.of(234));
+ assertThat(getBucketNumber("0234_0_copy_1")).isEqualTo(Optional.of(234));
// starts with non-zero
- assertThat(getBucketNumber("234_99")).isEqualTo(OptionalInt.of(234));
- assertThat(getBucketNumber("1234_0_copy_1")).isEqualTo(OptionalInt.of(1234));
+ assertThat(getBucketNumber("234_99")).isEqualTo(Optional.of(234));
+ assertThat(getBucketNumber("1234_0_copy_1")).isEqualTo(Optional.of(1234));
// Hive ACID
- assertThat(getBucketNumber("bucket_1234")).isEqualTo(OptionalInt.of(1234));
- assertThat(getBucketNumber("bucket_01234")).isEqualTo(OptionalInt.of(1234));
+ assertThat(getBucketNumber("bucket_1234")).isEqualTo(Optional.of(1234));
+ assertThat(getBucketNumber("bucket_01234")).isEqualTo(Optional.of(1234));
// not matching
- assertThat(getBucketNumber("0234.txt")).isEqualTo(OptionalInt.empty());
- assertThat(getBucketNumber("0234.txt")).isEqualTo(OptionalInt.empty());
+ assertThat(getBucketNumber("0234.txt")).isEqualTo(Optional.empty());
+ assertThat(getBucketNumber("0234.txt")).isEqualTo(Optional.empty());
}
@Test
@@ -632,6 +631,42 @@ public void testFullAcidTableWithOriginalFiles()
assertThat(splits).contains(fileLocations.get(1).toString());
}
+ @Test
+ public void testFullAcidTableWithOriginalFilesWithoutBuckets()
+ throws Exception
+ {
+ TrinoFileSystemFactory fileSystemFactory = new MemoryFileSystemFactory();
+ TrinoFileSystem fileSystem = fileSystemFactory.create(ConnectorIdentity.ofUser("test"));
+ Location tableLocation = Location.of("memory:///my_table");
+
+ Table table = table(
+ tableLocation.toString(),
+ List.of(),
+ Optional.empty(),
+ Map.of(TRANSACTIONAL, "true"));
+
+ Location originalFile = tableLocation.appendPath("data.csv");
+ try (OutputStream outputStream = fileSystem.newOutputFile(originalFile).create()) {
+ outputStream.write("test".getBytes(UTF_8));
+ }
+
+ // ValidWriteIdsList is of format $.::::
+ // This writeId list has high watermark transaction=3
+ ValidWriteIdList validWriteIdsList = new ValidWriteIdList(format("4$%s.%s:3:9223372036854775807::", table.getDatabaseName(), table.getTableName()));
+
+ BackgroundHiveSplitLoader backgroundHiveSplitLoader = backgroundHiveSplitLoader(
+ fileSystemFactory,
+ TupleDomain.all(),
+ Optional.empty(),
+ table,
+ Optional.empty(),
+ Optional.of(validWriteIdsList));
+ HiveSplitSource hiveSplitSource = hiveSplitSource(backgroundHiveSplitLoader);
+ backgroundHiveSplitLoader.start(hiveSplitSource);
+ List splits = drain(hiveSplitSource);
+ assertThat(splits).contains(originalFile.toString());
+ }
+
@Test
public void testVersionValidationNoOrcAcidVersionFile()
throws Exception
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestEsriTable.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestEsriTable.java
index 16bc7ded4ab9..a77dcf312bf5 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestEsriTable.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestEsriTable.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
import com.google.common.io.Resources;
import io.trino.filesystem.Location;
import io.trino.filesystem.TrinoFileSystem;
@@ -25,19 +26,24 @@
import io.trino.testing.QueryRunner;
import org.intellij.lang.annotations.Language;
import org.junit.jupiter.api.Test;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKBReader;
+import org.locationtech.jts.io.WKTWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
-import java.util.Arrays;
-import java.util.HexFormat;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import java.util.Set;
import java.util.UUID;
+import java.util.stream.Collectors;
-import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.plugin.hive.TestingHiveUtils.getConnectorService;
-import static io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder;
-import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.assertj.core.api.Assertions.assertThat;
public class TestEsriTable
extends AbstractTestQueryFramework
@@ -53,7 +59,7 @@ protected QueryRunner createQueryRunner()
@Test
public void testCreateExternalTableWithData()
- throws IOException
+ throws IOException, ParseException
{
URL resourceLocation = Resources.getResource("esri/counties.json");
TrinoFileSystem fileSystem = getConnectorService(getQueryRunner(), TrinoFileSystemFactory.class).create(ConnectorIdentity.ofUser("test"));
@@ -67,8 +73,6 @@ public void testCreateExternalTableWithData()
Resources.copy(resourceLocation, out);
}
- List expected = readExpectedResults("esri/counties_expected.txt");
-
// ESRI format is read-only, so create data files using the text file format
@Language("SQL") String createCountiesTableSql =
"""
@@ -81,8 +85,36 @@ CREATE TABLE counties (
assertUpdate(createCountiesTableSql);
MaterializedResult result = computeActual("SELECT * FROM counties");
-
- assertEqualsIgnoreOrder(result.getMaterializedRows(), expected);
+ List rows = result.getMaterializedRows();
+
+ // Verify we got the expected counties
+ assertThat(rows).hasSize(3);
+
+ // Verify we have all expected county names
+ Set countyNames = rows.stream()
+ .map(row -> (String) row.getField(0))
+ .collect(Collectors.toSet());
+ assertThat(countyNames).isEqualTo(ImmutableSet.of("San Francisco", "Madera", "San Mateo"));
+
+ // Load expected WKT values
+ Map expectedWkt = loadExpectedWkt("esri/counties_expected.txt");
+
+ // Verify each county has a valid geometry by converting WKB to WKT and comparing
+ WKBReader wkbReader = new WKBReader();
+ WKTWriter wktWriter = new WKTWriter();
+ for (MaterializedRow row : rows) {
+ String name = (String) row.getField(0);
+ byte[] bytes = (byte[]) row.getField(1);
+
+ // Parse WKB and convert to WKT
+ Geometry geometry = wkbReader.read(bytes);
+ String actualWkt = wktWriter.write(geometry);
+
+ // Verify WKT matches expected value
+ assertThat(actualWkt)
+ .describedAs("WKT for county: %s", name)
+ .isEqualTo(expectedWkt.get(name));
+ }
assertQueryFails(
"INSERT INTO counties VALUES ('esri fails writes', X'0102030405')",
@@ -92,29 +124,36 @@ CREATE TABLE counties (
assertUpdate("DROP TABLE counties");
}
- private static List readExpectedResults(String resourcePath)
- throws IOException
+ @Test
+ public void testLoadExpectedWktHandlesLineEndings()
{
- URL resourceUrl = Resources.getResource(resourcePath);
- List lines = Resources.readLines(resourceUrl, UTF_8);
-
- return lines.stream()
- .map(line -> {
- String[] parts = line.split("\t"); // Assuming tab-separated values
- return new MaterializedRow(Arrays.asList(
- parts[0], // name
- hexToBytes(parts[1]) // hex string for boundaryshape
- ));
- })
- .collect(toImmutableList());
+ assertThat(parseExpectedWkt("San Francisco\tPOINT (1 2)\nMadera\tPOINT (3 4)\n"))
+ .isEqualTo(ImmutableMap.of(
+ "San Francisco", "POINT (1 2)",
+ "Madera", "POINT (3 4)"));
+
+ assertThat(parseExpectedWkt("San Francisco\tPOINT (1 2)\r\nMadera\tPOINT (3 4)\r\ninvalid\r\n"))
+ .isEqualTo(ImmutableMap.of(
+ "San Francisco", "POINT (1 2)",
+ "Madera", "POINT (3 4)"));
}
- private static byte[] hexToBytes(String hex)
+ private static Map loadExpectedWkt(String resourceName)
+ throws IOException
{
- // Remove 'X' prefix, spaces, and single quotes if present
- hex = hex.replaceAll("^X'|'$", "") // Remove X' and trailing '
- .replaceAll("\\s+", ""); // Remove all whitespace
+ String content = Resources.toString(Resources.getResource(resourceName), StandardCharsets.UTF_8);
+ return parseExpectedWkt(content);
+ }
- return HexFormat.of().parseHex(hex);
+ private static Map parseExpectedWkt(String content)
+ {
+ Map expected = new HashMap<>();
+ for (String line : content.split("\\R")) {
+ String[] parts = line.split("\t", 2);
+ if (parts.length == 2) {
+ expected.put(parts[0], parts[1]);
+ }
+ }
+ return expected;
}
}
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java
index e2297be2f189..b422867ab51d 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java
@@ -231,6 +231,27 @@ public void testPartitionRoundTrip()
assertThat(metastoreApiPartition).isEqualTo(TEST_PARTITION);
}
+ @Test
+ public void testUnbucketedTableUsesHiveSentinelBucketCount()
+ {
+ Table bucketed = ThriftMetastoreUtil.fromMetastoreApiTable(TEST_TABLE, TEST_SCHEMA);
+ Table.Builder builder = Table.builder(bucketed);
+ builder.getStorageBuilder().setBucketProperty(Optional.empty());
+ Table unbucketed = builder.build();
+
+ io.trino.hive.thrift.metastore.Table metastoreApiTable = ThriftMetastoreUtil.toMetastoreApiTable(unbucketed, NO_PRIVILEGES);
+ assertThat(metastoreApiTable.getSd().getNumBuckets()).isEqualTo(-1);
+ }
+
+ @Test
+ public void testBucketedTablePreservesBucketCount()
+ {
+ Table table = ThriftMetastoreUtil.fromMetastoreApiTable(TEST_TABLE, TEST_SCHEMA);
+
+ io.trino.hive.thrift.metastore.Table metastoreApiTable = ThriftMetastoreUtil.toMetastoreApiTable(table, NO_PRIVILEGES);
+ assertThat(metastoreApiTable.getSd().getNumBuckets()).isEqualTo(TEST_TABLE.getSd().getNumBuckets());
+ }
+
@Test
public void testHiveSchemaTable()
{
@@ -275,7 +296,7 @@ public void testLongStatsToColumnStatistics()
longColumnStatsData.setNumNulls(1);
longColumnStatsData.setNumDVs(20);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", BIGINT_TYPE_NAME, longStats(longColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setIntegerStatistics(new IntegerStatistics(OptionalLong.of(0), OptionalLong.of(100)))
@@ -289,7 +310,7 @@ public void testEmptyLongStatsToColumnStatistics()
{
LongColumnStatsData emptyLongColumnStatsData = new LongColumnStatsData();
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", BIGINT_TYPE_NAME, longStats(emptyLongColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setIntegerStatistics(new IntegerStatistics(OptionalLong.empty(), OptionalLong.empty()))
@@ -305,7 +326,7 @@ public void testDoubleStatsToColumnStatistics()
doubleColumnStatsData.setNumNulls(1);
doubleColumnStatsData.setNumDVs(20);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", DOUBLE_TYPE_NAME, doubleStats(doubleColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setDoubleStatistics(new DoubleStatistics(OptionalDouble.of(0), OptionalDouble.of(100)))
@@ -319,7 +340,7 @@ public void testEmptyDoubleStatsToColumnStatistics()
{
DoubleColumnStatsData emptyDoubleColumnStatsData = new DoubleColumnStatsData();
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", DOUBLE_TYPE_NAME, doubleStats(emptyDoubleColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setDoubleStatistics(new DoubleStatistics(OptionalDouble.empty(), OptionalDouble.empty()))
@@ -337,7 +358,7 @@ public void testDecimalStatsToColumnStatistics()
decimalColumnStatsData.setNumNulls(1);
decimalColumnStatsData.setNumDVs(20);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", DECIMAL_TYPE_NAME, decimalStats(decimalColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setDecimalStatistics(new DecimalStatistics(Optional.of(low), Optional.of(high)))
@@ -351,7 +372,7 @@ public void testEmptyDecimalStatsToColumnStatistics()
{
DecimalColumnStatsData emptyDecimalColumnStatsData = new DecimalColumnStatsData();
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", DECIMAL_TYPE_NAME, decimalStats(emptyDecimalColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setDecimalStatistics(new DecimalStatistics(Optional.empty(), Optional.empty()))
@@ -366,7 +387,7 @@ public void testBooleanStatsToColumnStatistics()
booleanColumnStatsData.setNumFalses(10);
booleanColumnStatsData.setNumNulls(0);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", BOOLEAN_TYPE_NAME, booleanStats(booleanColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setBooleanStatistics(new BooleanStatistics(OptionalLong.of(100), OptionalLong.of(10)))
@@ -379,7 +400,7 @@ public void testImpalaGeneratedBooleanStatistics()
{
BooleanColumnStatsData statsData = new BooleanColumnStatsData(1L, -1L, 2L);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", BOOLEAN_TYPE_NAME, booleanStats(statsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setBooleanStatistics(new BooleanStatistics(OptionalLong.empty(), OptionalLong.empty()))
@@ -392,7 +413,7 @@ public void testEmptyBooleanStatsToColumnStatistics()
{
BooleanColumnStatsData emptyBooleanColumnStatsData = new BooleanColumnStatsData();
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", BOOLEAN_TYPE_NAME, booleanStats(emptyBooleanColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setBooleanStatistics(new BooleanStatistics(OptionalLong.empty(), OptionalLong.empty()))
@@ -408,7 +429,7 @@ public void testDateStatsToColumnStatistics()
dateColumnStatsData.setNumNulls(1);
dateColumnStatsData.setNumDVs(20);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", DATE_TYPE_NAME, dateStats(dateColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setDateStatistics(new DateStatistics(Optional.of(LocalDate.ofEpochDay(1000)), Optional.of(LocalDate.ofEpochDay(2000))))
@@ -422,7 +443,7 @@ public void testEmptyDateStatsToColumnStatistics()
{
DateColumnStatsData emptyDateColumnStatsData = new DateColumnStatsData();
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", DATE_TYPE_NAME, dateStats(emptyDateColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setDateStatistics(new DateStatistics(Optional.empty(), Optional.empty()))
@@ -438,7 +459,7 @@ public void testStringStatsToColumnStatistics()
stringColumnStatsData.setNumNulls(1);
stringColumnStatsData.setNumDVs(20);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", STRING_TYPE_NAME, stringStats(stringColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setMaxValueSizeInBytes(100)
@@ -453,7 +474,7 @@ public void testEmptyStringColumnStatsData()
{
StringColumnStatsData emptyStringColumnStatsData = new StringColumnStatsData();
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", STRING_TYPE_NAME, stringStats(emptyStringColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder().build());
}
@@ -466,7 +487,7 @@ public void testBinaryStatsToColumnStatistics()
binaryColumnStatsData.setAvgColLen(22.2);
binaryColumnStatsData.setNumNulls(2);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", BINARY_TYPE_NAME, binaryStats(binaryColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder()
.setMaxValueSizeInBytes(100)
@@ -480,7 +501,7 @@ public void testEmptyBinaryStatsToColumnStatistics()
{
BinaryColumnStatsData emptyBinaryColumnStatsData = new BinaryColumnStatsData();
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", BINARY_TYPE_NAME, binaryStats(emptyBinaryColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual).isEqualTo(HiveColumnStatistics.builder().build());
}
@@ -492,7 +513,7 @@ public void testSingleDistinctValue()
doubleColumnStatsData.setNumNulls(10);
doubleColumnStatsData.setNumDVs(1);
ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj("my_col", DOUBLE_TYPE_NAME, doubleStats(doubleColumnStatsData));
- HiveColumnStatistics actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ HiveColumnStatistics actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual.getNullsCount()).isEqualTo(OptionalLong.of(10));
assertThat(actual.getDistinctValuesWithNullCount()).isEqualTo(OptionalLong.of(1));
@@ -501,7 +522,7 @@ public void testSingleDistinctValue()
doubleColumnStatsData.setNumNulls(10);
doubleColumnStatsData.setNumDVs(1);
columnStatisticsObj = new ColumnStatisticsObj("my_col", DOUBLE_TYPE_NAME, doubleStats(doubleColumnStatsData));
- actual = fromMetastoreApiColumnStatistics(columnStatisticsObj);
+ actual = fromMetastoreApiColumnStatistics("fake_db", "fake_tbl", columnStatisticsObj);
assertThat(actual.getNullsCount()).isEqualTo(OptionalLong.of(10));
assertThat(actual.getDistinctValuesWithNullCount()).isEqualTo(OptionalLong.of(1));
diff --git a/plugin/trino-hive/src/test/resources/esri/counties_expected.txt b/plugin/trino-hive/src/test/resources/esri/counties_expected.txt
index 43bb687594fd..0425b33194a3 100644
--- a/plugin/trino-hive/src/test/resources/esri/counties_expected.txt
+++ b/plugin/trino-hive/src/test/resources/esri/counties_expected.txt
@@ -1,3 +1,3 @@
-San Francisco X'00 00 00 00 03 05 00 00 00 45 d6 1a 4a ed a0 5e c0 23 a0 c2 11 a4 da 42 40 03 d9 6b 8d d5 96 5e c0 0b 12 b8 4b dd e7 42 40 01 00 00 00 5f 00 00 00 00 00 00 00 98 85 61 c2 27 a0 5e c0 0e b1 b0 14 a4 da 42 40 b8 3e ac 37 6a a0 5e c0 fa 0c a8 37 a3 dc 42 40 89 2b 4a 45 80 a0 5e c0 f1 5d 82 53 1f de 42 40 16 0f 22 ef 99 a0 5e c0 b5 21 54 a9 d9 df 42 40 f8 27 0e 02 9a a0 5e c0 aa fd 78 ef da df 42 40 80 24 54 6a b6 a0 5e c0 f0 6d 60 90 c4 e1 42 40 e9 46 58 54 c4 a0 5e c0 74 7b 49 63 b4 e2 42 40 45 d6 1a 4a ed a0 5e c0 5a 62 65 34 f2 e3 42 40 74 04 8f 76 a1 a0 5e c0 ea 1b dd ea 6d e4 42 40 df 15 c1 ff 56 a0 5e c0 ad 85 59 68 e7 e4 42 40 11 bc 16 4e 8d 9f 5e c0 42 d7 a6 f9 da e4 42 40 48 70 23 65 8b 9f 5e c0 f5 48 83 db da e4 42 40 fa 7c 94 11 17 9f 5e c0 75 22 c1 54 33 e5 42 40 82 71 70 e9 98 9e 5e c0 2e 57 3f 36 c9 e7 42 40 db 70 00 fd 19 9e 5e c0 a8 b4 4e 8b 82 e7 42 40 ac c4 da c6 ae 9d 5e c0 7e 89 84 dd fe e6 42 40 6a 10 d3 53 b0 9c 5e c0 7f 50 d4 ec 53 e7 42 40 48 a2 32 d2 57 9c 5e c0 d9 d0 0a 83 71 e7 42 40 81 76 fc a3 42 9b 5e c0 6c 91 a5 2b ce e7 42 40 0a e9 f0 9c 26 9b 5e c0 ef 54 df 91 cf e7 42 40 38 e6 82 07 e6 9a 5e c0 c4 94 54 cb d2 e7 42 40 d8 41 b2 b0 13 9a 5e c0 0b 12 b8 4b dd e7 42 40 44 d8 6a fe 93 99 5e c0 e5 a0 5a f4 3d e7 42 40 b3 c5 56 1a 7b 99 5e c0 e2 96 30 e5 1e e7 42 40 5f 00 c7 b4 3d 99 5e c0 db ff e0 11 90 e6 42 40 ac 33 5a c6 f2 98 5e c0 17 28 5b c2 e1 e5 42 40 b4 b9 82 33 de 98 5e c0 05 00 41 e6 b1 e5 42 40 20 c3 41 86 ad 98 5e c0 79 87 ff a9 40 e5 42 40 f0 63 e4 20 a9 98 5e c0 20 37 18 70 36 e5 42 40 20 6b ad 80 94 98 5e c0 3f 2e 73 94 43 e4 42 40 1c 7c 4c 26 68 98 5e c0 a3 b6 d7 58 39 e2 42 40 fd a6 24 94 41 98 5e c0 2e a7 0d 32 73 e0 42 40 9e d8 9f 6f 1d 98 5e c0 90 b6 b2 a2 c9 de 42 40 38 18 35 8b 0e 98 5e c0 94 f3 91 49 1a de 42 40 d4 19 c0 fd 0d 98 5e c0 7a ba fd c7 13 de 42 40 e2 3f 95 0b e5 97 5e c0 6e 69 35 24 ee dd 42 40 e7 50 61 9d af 97 5e c0 f7 8b 74 06 bd dd 42 40 b6 f3 31 8a af 97 5e c0 8c ea 42 30 bd dd 42 40 07 58 22 df a7 97 5e c0 fb 56 f9 e5 cd dd 42 40 67 57 0f 57 88 97 5e c0 f4 7d db 9b 12 de 42 40 ad 7e 7a 2f 88 97 5e c0 e8 b2 5d 90 12 de 42 40 a0 36 12 ff 63 97 5e c0 90 46 b3 0e 08 de 42 40 03 d9 6b 8d d5 96 5e c0 4f d0 47 68 60 dd 42 40 30 d1 00 e5 26 97 5e c0 ce b2 9a 6e 85 db 42 40 2a eb 6a d1 b4 97 5e c0 66 32 12 69 d9 db 42 40 07 c4 d0 60 03 98 5e c0 39 44 d9 24 94 db 42 40 c7 40 b5 44 0c 99 5e c0 1a df 17 97 aa da 42 40 95 ee ae b3 21 99 5e c0 1a df 17 97 aa da 42 40 39 f0 6a b9 33 99 5e c0 b6 81 3b 50 a7 da 42 40 f2 7d 71 a9 4a 99 5e c0 1a df 17 97 aa da 42 40 1c 96 06 7e 54 99 5e c0 1a df 17 97 aa da 42 40 4a 5f 08 39 ef 99 5e c0 45 da c6 9f a8 da 42 40 2a c4 23 f1 f2 99 5e c0 1b 47 ac c5 a7 da 42 40 29 76 34 0e f5 99 5e c0 b6 81 3b 50 a7 da 42 40 9c 50 88 80 43 9a 5e c0 09 a8 70 04 a9 da 42 40 a1 81 58 36 73 9a 5e c0 1a df 17 97 aa da 42 40 18 e9 45 ed 7e 9a 5e c0 ce a9 64 00 a8 da 42 40 e1 25 38 f5 81 9a 5e c0 b6 81 3b 50 a7 da 42 40 f7 b1 82 df 86 9a 5e c0 b6 81 3b 50 a7 da 42 40 d3 f8 85 57 92 9a 5e c0 b6 81 3b 50 a7 da 42 40 13 9d 65 16 a1 9a 5e c0 1a df 17 97 aa da 42 40 d9 e8 9c 9f e2 9a 5e c0 b6 81 3b 50 a7 da 42 40 8b a8 89 3e 1f 9b 5e c0 b6 81 3b 50 a7 da 42 40 a0 54 fb 74 3c 9b 5e c0 44 a6 7c 08 aa da 42 40 80 b9 16 2d 40 9b 5e c0 67 7c 5f 5c aa da 42 40 20 7d 93 a6 41 9b 5e c0 a9 6b ed 7d aa da 42 40 16 88 9e 94 49 9b 5e c0 61 8b dd 3e ab da 42 40 a1 f6 5b 3b 51 9b 5e c0 49 2f 6a f7 ab da 42 40 67 80 0b b2 65 9b 5e c0 7d 3c f4 dd ad da 42 40 68 59 f7 8f 85 9b 5e c0 68 b0 a9 f3 a8 da 42 40 d8 9b 18 92 93 9b 5e c0 b6 81 3b 50 a7 da 42 40 34 2b db 87 bc 9b 5e c0 87 fd 9e 58 a7 da 42 40 18 d1 76 4c dd 9b 5e c0 23 a0 c2 11 a4 da 42 40 04 8f 6f ef 1a 9c 5e c0 f7 3c 7f da a8 da 42 40 bb 63 b1 4d 2a 9c 5e c0 ea 5a 7b 9f aa da 42 40 97 aa b4 c5 35 9c 5e c0 ea 5a 7b 9f aa da 42 40 10 94 db f6 3d 9c 5e c0 ea 5a 7b 9f aa da 42 40 9e 09 4d 12 4b 9c 5e c0 87 fd 9e 58 a7 da 42 40 8d b7 95 5e 9b 9c 5e c0 87 fd 9e 58 a7 da 42 40 80 bb ec d7 9d 9c 5e c0 87 fd 9e 58 a7 da 42 40 85 e9 7b 0d c1 9c 5e c0 87 fd 9e 58 a7 da 42 40 a9 33 f7 90 f0 9c 5e c0 23 a0 c2 11 a4 da 42 40 28 7c b6 0e 0e 9d 5e c0 87 fd 9e 58 a7 da 42 40 04 54 38 82 54 9d 5e c0 87 fd 9e 58 a7 da 42 40 8b fb 8f 4c 87 9d 5e c0 87 fd 9e 58 a7 da 42 40 92 eb a6 94 d7 9d 5e c0 23 a0 c2 11 a4 da 42 40 d2 8f 86 53 e6 9d 5e c0 87 fd 9e 58 a7 da 42 40 ae d6 89 cb f1 9d 5e c0 87 fd 9e 58 a7 da 42 40 a0 a9 d7 2d 02 9e 5e c0 87 fd 9e 58 a7 da 42 40 83 4f 73 f2 22 9e 5e c0 87 fd 9e 58 a7 da 42 40 ae 67 08 c7 2c 9e 5e c0 ea 5a 7b 9f aa da 42 40 57 ea 59 10 ca 9e 5e c0 87 fd 9e 58 a7 da 42 40 33 c2 db 83 10 9f 5e c0 87 fd 9e 58 a7 da 42 40 0f 09 df fb 1b 9f 5e c0 87 fd 9e 58 a7 da 42 40 98 85 61 c2 27 a0 5e c0 0e b1 b0 14 a4 da 42 40'
-Madera X'00 00 00 00 03 05 00 00 00 84 d8 99 42 e7 22 5e c0 c0 79 71 e2 ab 61 42 40 fd 68 38 65 6e c1 5d c0 78 5f 95 0b 95 e3 42 40 01 00 00 00 30 03 00 00 00 00 00 00 e4 13 b2 f3 36 d1 5d c0 6d 1c b1 16 9f de 42 40 dd 23 9b ab e6 d0 5d c0 bf 9c d9 ae d0 dd 42 40 ba d9 1f 28 b7 d0 5d c0 f8 e1 20 21 ca dd 42 40 f6 d4 ea ab ab d0 5d c0 ea 23 f0 87 9f dd 42 40 17 f2 08 6e a4 d0 5d c0 31 40 a2 09 14 dd 42 40 e8 16 ba 12 81 d0 5d c0 7a 55 67 b5 c0 dc 42 40 28 2a 1b d6 54 d0 5d c0 32 e3 6d a5 d7 dc 42 40 bd 6d a6 42 3c d0 5d c0 07 ed d5 c7 43 dd 42 40 e1 26 a3 ca 30 d0 5d c0 23 d8 b8 fe 5d dd 42 40 fe 80 07 06 10 d0 5d c0 76 6c 04 e2 75 dd 42 40 34 9d 9d 0c 8e cf 5d c0 a9 4c 31 07 41 dd 42 40 7f de 54 a4 c2 ce 5d c0 cf f5 7d 38 48 dc 42 40 dd 95 5d 30 b8 ce 5d c0 22 fa b5 f5 d3 db 42 40 e3 df 67 5c 38 ce 5d c0 6e fc 89 ca 86 db 42 40 eb 02 5e 66 d8 cd 5d c0 24 29 e9 61 68 db 42 40 e2 73 27 d8 7f cd 5d c0 84 11 fb 04 50 dc 42 40 24 9b ab e6 39 cd 5d c0 c3 ba f1 ee c8 dc 42 40 06 83 6b ee e8 cc 5d c0 eb 74 20 eb a9 dd 42 40 8d 62 b9 a5 d5 cc 5d c0 50 a7 3c ba 11 de 42 40 39 63 98 13 b4 cc 5d c0 4b af cd c6 4a de 42 40 4f 1e 16 6a 4d cc 5d c0 41 45 d5 af 74 de 42 40 08 aa 46 af 06 cc 5d c0 b5 e0 45 5f 41 de 42 40 18 43 39 d1 ae cb 5d c0 26 8e 3c 10 59 de 42 40 52 ba f4 2f 49 cb 5d c0 e7 8c 28 ed 0d de 42 40 b1 fd 64 8c 0f cb 5d c0 c0 ec 9e 3c 2c de 42 40 3f 1c 24 44 f9 ca 5d c0 55 4c a5 9f 70 de 42 40 ee ec 2b 0f d2 ca 5d c0 88 da 36 8c 82 de 42 40 4e d2 fc 31 ad ca 5d c0 da e3 85 74 78 de 42 40 d6 c5 6d 34 80 ca 5d c0 ad 6a 49 47 39 de 42 40 44 88 2b 67 ef c9 5d c0 7f a6 5e b7 08 de 42 40 79 ad 84 ee 92 c9 5d c0 72 33 dc 80 cf dd 42 40 b3 cd 8d e9 09 c9 5d c0 3e 77 82 fd d7 dd 42 40 6b 10 e6 76 2f c8 5d c0 dd 7a 4d 0f 0a de 42 40 06 a0 51 ba f4 c7 5d c0 37 8c 82 e0 f1 dd 42 40 1d ca 50 15 53 c7 5d c0 2c 0e 67 7e 35 dd 42 40 51 a4 fb 39 05 c7 5d c0 1f a2 d1 1d c4 dc 42 40 e1 27 0e a0 df c6 5d c0 c6 dd 20 5a 2b dc 42 40 9f aa 42 03 b1 c6 5d c0 94 be 10 72 de db 42 40 cd 04 c3 b9 86 c6 5d c0 29 26 6f 80 99 db 42 40 89 28 26 6f 80 c6 5d c0 8b 6d 52 d1 58 db 42 40 5b d2 51 0e 66 c6 5d c0 5e 11 fc 6f 25 db 42 40 10 5d 50 df 32 c6 5d c0 67 0e 49 2d 94 da 42 40 11 1a c1 c6 f5 c5 5d c0 92 ca 14 73 10 da 42 40 e9 49 99 d4 d0 c5 5d c0 73 be d8 7b f1 d9 42 40 9e 42 ae d4 b3 c4 5d c0 bc 96 90 0f 7a d8 42 40 8d 80 0a 47 90 c4 5d c0 5b 5b 78 5e 2a d8 42 40 0c 3c f7 1e 2e c4 5d c0 19 3a 76 50 89 d7 42 40 99 84 0b 79 04 c4 5d c0 14 3f c6 dc b5 d6 42 40 7e 3b 89 08 ff c3 5d c0 fb 3c 46 79 e6 d5 42 40 0a a1 83 2e e1 c3 5d c0 e2 ca d9 3b a3 d5 42 40 54 e6 e6 1b d1 c3 5d c0 c6 a6 95 42 20 d5 42 40 b3 25 ab 22 dc c3 5d c0 cf dc 43 c2 f7 d4 42 40 eb 36 a8 fd d6 c3 5d c0 c2 2f f5 f3 a6 d4 42 40 33 34 9e 08 e2 c3 5d c0 77 4c dd 95 5d d4 42 40 9c 1a 68 3e e7 c3 5d c0 ec f7 c4 3a 55 d4 42 40 f1 82 88 d4 b4 c3 5d c0 46 45 9c 4e b2 d3 42 40 2d 0a bb 28 7a c3 5d c0 d8 d6 4f ff 59 d3 42 40 96 21 8e 75 71 c3 5d c0 38 2c 0d fc a8 d2 42 40 2a 6f 47 38 2d c3 5d c0 56 9e 40 d8 29 d2 42 40 16 2f 16 86 c8 c2 5d c0 ac ad d8 5f 76 d1 42 40 0d e3 6e 10 ad c2 5d c0 98 15 8a 74 3f d1 42 40 96 ed 43 de 72 c2 5d c0 a7 02 ee 79 fe d0 42 40 bf 46 92 20 5c c2 5d c0 a5 49 29 e8 f6 d0 42 40 57 b5 a4 a3 1c c2 5d c0 9c a6 cf 0e b8 d0 42 40 7a 54 fc df 11 c2 5d c0 f8 36 fd d9 8f d0 42 40 83 18 e8 da 17 c2 5d c0 22 c4 95 b3 77 d0 42 40 d1 af ad 9f fe c1 5d c0 78 0c 8f fd 2c d0 42 40 c8 b7 77 0d fa c1 5d c0 8c 2c 99 63 79 cf 42 40 5f 25 1f bb 0b c2 5d c0 c1 8d 94 2d 92 ce 42 40 7e c6 85 03 21 c2 5d c0 d0 98 49 d4 0b ce 42 40 54 e5 7b 46 22 c2 5d c0 39 62 2d 3e 05 ce 42 40 19 1e fb 59 2c c2 5d c0 3d 28 28 45 2b cd 42 40 dd 7a 4d 0f 0a c2 5d c0 ea b1 2d 03 ce cc 42 40 0f d0 7d 39 b3 c1 5d c0 4f 5c 8e 57 20 cc 42 40 48 15 c5 ab ac c1 5d c0 41 9e 5d be f5 cb 42 40 57 42 77 49 9c c1 5d c0 6c 94 f5 9b 89 cb 42 40 fd 68 38 65 6e c1 5d c0 51 87 15 6e f9 ca 42 40 1e fe 9a ac 51 c8 5d c0 0a 12 db dd 03 c0 42 40 75 ad bd 4f 55 c8 5d c0 fb 76 12 11 fe bf 42 40 7f 50 17 29 94 cb 5d c0 8e 5c 37 a5 bc ba 42 40 c0 ae 26 4f 59 d2 5d c0 ae 0d 15 e3 fc af 42 40 6d 00 36 20 42 d3 5d c0 45 49 48 a4 6d ae 42 40 05 6a 31 78 98 d3 5d c0 90 f5 d4 ea ab ad 42 40 4b 93 52 d0 ed d3 5d c0 97 e1 3f dd 40 ad 42 40 ba f8 db 9e 20 d4 5d c0 eb c8 91 ce c0 ac 42 40 d1 57 90 66 2c d4 5d c0 59 db 14 8f 8b ac 42 40 f7 af ac 34 29 d4 5d c0 5d 1a bf f0 4a ac 42 40 6d 1b 46 41 f0 d3 5d c0 f0 c2 d6 6c e5 ab 42 40 7a fd 49 7c ee d3 5d c0 56 0e 2d b2 9d ab 42 40 7f bd c2 82 fb d3 5d c0 a3 20 78 7c 7b ab 42 40 3b c6 15 17 47 d4 5d c0 58 8e 90 81 3c ab 42 40 6d 59 be 2e c3 d4 5d c0 17 65 36 c8 24 ab 42 40 c6 f8 30 7b d9 d4 5d c0 df c2 ba f1 ee aa 42 40 9c 31 cc 09 da d4 5d c0 aa 0d 4e 44 bf aa 42 40 24 45 64 58 c5 d4 5d c0 e6 cb 0b b0 8f aa 42 40 15 03 24 9a 40 d4 5d c0 a1 f7 c6 10 00 aa 42 40 da 1b 7c 61 32 d4 5d c0 86 00 e0 d8 b3 a9 42 40 0f 46 ec 13 40 d4 5d c0 39 64 03 e9 62 a9 42 40 c5 54 fa 09 67 d4 5d c0 cb 12 9d 65 16 a9 42 40 08 8e cb b8 a9 d4 5d c0 bc 94 ba 64 1c a9 42 40 65 17 0c ae b9 d4 5d c0 da 90 7f 66 10 a9 42 40 4c c6 31 92 3d d5 5d c0 d9 24 3f e2 57 a8 42 40 7b a4 c1 6d 6d d5 5d c0 b8 ce bf 5d f6 a7 42 40 b4 00 6d ab 59 d5 5d c0 aa 81 e6 73 ee a6 42 40 52 49 9d 80 26 d5 5d c0 6e 6d e1 79 a9 a6 42 40 72 6d a8 18 e7 d4 5d c0 f8 16 d6 8d 77 a5 42 40 34 bc 59 83 f7 d4 5d c0 97 91 7a 4f e5 a4 42 40 6e a6 42 3c 12 d5 5d c0 71 ac 8b db 68 a4 42 40 2e 59 15 e1 26 d5 5d c0 17 9c c1 df 2f a4 42 40 cf d8 97 6c 3c d5 5d c0 ca 18 1f 66 2f a3 42 40 6c 7b bb 25 39 d5 5d c0 34 10 cb 66 0e a3 42 40 be 6a 65 c2 2f d5 5d c0 00 c9 74 e8 f4 a2 42 40 70 b4 e3 86 df d4 5d c0 28 0c ca 34 9a a2 42 40 68 ea 75 8b c0 d4 5d c0 1a db 6b 41 ef a1 42 40 1b 67 d3 11 c0 d4 5d c0 7e c6 85 03 21 a1 42 40 bc 41 b4 56 b4 d4 5d c0 28 d1 92 c7 d3 a0 42 40 b1 db 67 95 99 d4 5d c0 8a 76 15 52 7e a0 42 40 eb 71 df 6a 9d d4 5d c0 3d 10 59 a4 89 9f 42 40 d5 08 fd 4c bd d4 5d c0 e7 1a 66 68 3c 9f 42 40 bc 40 49 81 05 d5 5d c0 4c 4f 58 e2 01 9f 42 40 cf 87 67 09 32 d5 5d c0 34 d7 69 a4 a5 9e 42 40 9c 87 13 98 4e d5 5d c0 24 b5 50 32 39 9d 42 40 2b 31 cf 4a 5a d5 5d c0 f9 a3 a8 33 f7 9c 42 40 de ad 2c d1 59 d5 5d c0 eb 8e c5 36 a9 9c 42 40 31 b3 cf 63 94 d5 5d c0 9d 4a 06 80 2a 9c 42 40 f8 6d 88 f1 9a d5 5d c0 b5 fe 96 00 fc 9b 42 40 e8 f6 92 c6 68 d5 5d c0 97 e6 56 08 ab 9b 42 40 61 18 b0 e4 2a d5 5d c0 19 3a 76 50 89 9b 42 40 55 16 85 5d 14 d5 5d c0 d4 2b 65 19 e2 9a 42 40 cd b1 bc ab 1e d5 5d c0 3b 71 39 5e 81 9a 42 40 6f 65 89 ce 32 d5 5d c0 c7 7f 81 20 40 9a 42 40 8a ca 86 35 95 d5 5d c0 52 b9 89 5a 9a 99 42 40 6c 08 8e cb b8 d5 5d c0 cf 85 91 5e d4 98 42 40 3c 15 70 cf f3 d5 5d c0 34 9e 08 e2 3c 98 42 40 2d 96 22 f9 4a d6 5d c0 bd 6e 11 18 eb 97 42 40 20 b3 b3 e8 9d d6 5d c0 58 6f d4 0a d3 97 42 40 d0 61 be bc 00 d7 5d c0 75 73 f1 b7 3d 97 42 40 8b 8b a3 72 13 d7 5d c0 af 3e 1e fa ee 96 42 40 ce fc 6a 0e 10 d7 5d c0 b0 c7 44 4a b3 95 42 40 af 06 28 0d 35 d7 5d c0 f1 b7 3d 41 62 95 42 40 44 c1 8c 29 58 d7 5d c0 b2 0d dc 81 3a 95 42 40 23 2f 6b 62 81 d7 5d c0 4e 0e 9f 74 22 95 42 40 07 eb ff 1c e6 d7 5d c0 db 15 fa 60 19 95 42 40 95 2c 27 a1 f4 d7 5d c0 41 f2 ce a1 0c 95 42 40 38 48 88 f2 05 d8 5d c0 93 8d 07 5b ec 94 42 40 bf 61 a2 41 0a d8 5d c0 54 e3 a5 9b c4 94 42 40 c0 af 91 24 08 d8 5d c0 6f 2d 93 e1 78 94 42 40 6e 6c 76 a4 fa d7 5d c0 3d 7c 99 28 42 94 42 40 9b 20 ea 3e 00 d8 5d c0 51 15 53 e9 27 94 42 40 73 d7 12 f2 41 d8 5d c0 9e d0 eb 4f e2 93 42 40 1b 0d e0 2d 90 d8 5d c0 a8 3a e4 66 b8 93 42 40 43 55 4c a5 9f d8 5d c0 ca 51 80 28 98 93 42 40 03 08 1f 4a b4 d8 5d c0 a0 51 ba f4 2f 93 42 40 93 56 7c 43 e1 d8 5d c0 1e 8a 02 7d 22 93 42 40 da ca 4b fe 27 d9 5d c0 30 46 24 0a 2d 93 42 40 6c 97 36 1c 96 d9 5d c0 e9 65 14 cb 2d 93 42 40 4b ad f7 1b ed d9 5d c0 2e e7 52 5c 55 94 42 40 bc 94 ba 64 1c da 5d c0 88 48 4d bb 98 94 42 40 90 2f a1 82 c3 da 5d c0 18 42 ce fb ff 94 42 40 4a 5e 9d 63 40 db 5d c0 5b ea 20 af 07 95 42 40 0f 5f 26 8a 90 db 5d c0 51 4d 49 d6 e1 94 42 40 2d b2 9d ef a7 db 5d c0 96 23 64 20 cf 94 42 40 2b 4d 4a 41 b7 db 5d c0 ef e3 68 8e ac 94 42 40 5a b7 41 ed b7 db 5d c0 00 3c a2 42 75 93 42 40 5f 5d 15 a8 c5 db 5d c0 44 a5 11 33 fb 92 42 40 46 b6 f3 fd d4 db 5d c0 19 02 80 63 cf 92 42 40 3f 52 44 86 55 dc 5d c0 61 fa 5e 43 70 92 42 40 28 b7 ed 7b d4 dc 5d c0 cf d7 2c 97 8d 92 42 40 95 0e d6 ff 39 dd 5d c0 51 82 fe 42 8f 92 42 40 bf 80 5e b8 73 dd 5d c0 e2 e4 7e 87 a2 92 42 40 60 00 e1 43 89 dd 5d c0 02 66 be 83 9f 92 42 40 8a e4 2b 81 94 dd 5d c0 be a0 85 04 8c 92 42 40 3a b1 87 f6 b1 dd 5d c0 28 60 3b 18 b1 91 42 40 e1 95 24 cf f5 dd 5d c0 fe f3 34 60 90 90 42 40 d5 04 51 f7 01 de 5d c0 4c dd 95 5d 30 90 42 40 8f 72 30 9b 00 de 5d c0 ae d9 ca 4b fe 8f 42 40 cf 6b ec 12 d5 dd 5d c0 f8 16 d6 8d 77 8f 42 40 98 f6 cd fd d5 dd 5d c0 d7 68 39 d0 43 8f 42 40 92 ca 14 73 10 de 5d c0 6f 7f 2e 1a 32 8e 42 40 4b 58 1b 63 27 de 5d c0 93 1b 45 d6 1a 8e 42 40 d4 0b 3e cd c9 de 5d c0 0d e2 03 3b fe 8d 42 40 fb 1f 60 ad da de 5d c0 56 47 8e 74 06 8e 42 40 8f a6 7a 32 ff de 5d c0 b7 7c 24 25 3d 8e 42 40 65 ff 3c 0d 18 df 5d c0 60 3a ad db a0 8e 42 40 a6 62 63 5e 47 df 5d c0 7d 41 0b 09 18 8f 42 40 d3 87 2e a8 6f df 5d c0 2f 35 42 3f 53 8f 42 40 40 4b 57 b0 8d df 5d c0 29 25 04 ab ea 8f 42 40 54 e4 10 71 73 df 5d c0 ac c6 12 d6 c6 90 42 40 bc 59 83 f7 55 df 5d c0 39 99 b8 55 10 91 42 40 fe 5f 75 e4 48 df 5d c0 6e 88 f1 9a 57 91 42 40 ae 0c aa 0d 4e df 5d c0 90 bb 08 53 94 91 42 40 b3 7e 33 31 5d df 5d c0 99 f0 4b fd bc 91 42 40 a1 66 48 15 c5 df 5d c0 cf 69 16 68 77 92 42 40 7d 04 fe f0 f3 df 5d c0 f1 f4 4a 59 86 92 42 40 bb 80 97 19 36 e0 5d c0 4e 60 3a ad db 92 42 40 f8 51 0d fb 3d e0 5d c0 55 bf d2 f9 f0 92 42 40 d6 e6 ff 55 47 e0 5d c0 67 81 76 87 14 93 42 40 33 8d 26 17 63 e0 5d c0 fe 0e 45 81 3e 93 42 40 e1 79 a9 d8 98 e0 5d c0 f5 81 e4 9d 43 93 42 40 11 a8 fe 41 24 e1 5d c0 d6 ab c8 e8 80 92 42 40 70 b2 0d dc 81 e1 5d c0 23 66 f6 79 8c 90 42 40 0b 0d c4 b2 99 e1 5d c0 23 db f9 7e 6a 90 42 40 fb cb ee c9 c3 e1 5d c0 04 c8 d0 b1 83 90 42 40 c9 90 63 eb 19 e2 5d c0 5b ea 20 af 07 91 42 40 4c df 6b 08 8e e2 5d c0 19 75 ad bd 4f 91 42 40 f0 fa cc 59 9f e2 5d c0 e7 df 2e fb 75 91 42 40 47 c7 d5 c8 ae e2 5d c0 4c 53 04 38 bd 91 42 40 42 ec 4c a1 f3 e2 5d c0 eb 55 64 74 40 92 42 40 1a a6 b6 d4 41 e3 5d c0 a2 0c 55 31 95 92 42 40 35 29 05 dd 5e e3 5d c0 4b ae 62 f1 9b 92 42 40 8a 5b 05 31 d0 e3 5d c0 b2 9b 19 fd 68 92 42 40 cd 5a 0a 48 fb e3 5d c0 25 c9 73 7d 1f 92 42 40 6e a6 42 3c 12 e4 5d c0 36 b0 55 82 c5 91 42 40 a7 eb 89 ae 0b e4 5d c0 a0 8b 86 8c 47 91 42 40 bc 5d 2f 4d 11 e4 5d c0 1a 4e 99 9b 6f 90 42 40 3b 8f 8a ff 3b e4 5d c0 09 c2 15 50 a8 8f 42 40 12 a4 52 ec 68 e4 5d c0 e5 61 a1 d6 34 8f 42 40 12 a4 52 ec 68 e4 5d c0 06 f4 c2 9d 0b 8f 42 40 99 d4 d0 06 60 e4 5d c0 f1 2d ac 1b ef 8e 42 40 7c b5 a3 38 47 e4 5d c0 e5 45 26 e0 d7 8e 42 40 33 4f ae 29 90 e3 5d c0 03 42 eb e1 cb 8e 42 40 09 a6 9a 59 4b e3 5d c0 b4 b0 a7 1d fe 8e 42 40 da e4 f0 49 27 e3 5d c0 eb 8d 5a 61 fa 8e 42 40 5a 68 e7 34 0b e3 5d c0 1e a8 53 1e dd 8e 42 40 7b 49 63 b4 8e e2 5d c0 56 2a a8 a8 fa 8d 42 40 6b bb 09 be 69 e2 5d c0 36 e5 0a ef 72 8d 42 40 0a 9f ad 83 83 e2 5d c0 a8 71 6f 7e c3 8c 42 40 52 d6 6f 26 a6 e2 5d c0 3c 31 eb c5 50 8c 42 40 d2 52 79 3b c2 e2 5d c0 3f c7 47 8b 33 8c 42 40 b6 f8 14 00 e3 e2 5d c0 f7 e6 37 4c 34 8c 42 40 a9 f8 bf 23 2a e3 5d c0 d0 98 49 d4 0b 8c 42 40 2a 3a 92 cb 7f e3 5d c0 a3 02 27 db c0 8b 42 40 82 ff ad 64 c7 e3 5d c0 e6 03 02 9d 49 8b 42 40 46 0a 65 e1 eb e3 5d c0 cb 49 28 7d 21 8a 42 40 40 be 84 0a 0e e4 5d c0 ba 82 6d c4 93 89 42 40 2b 18 95 d4 09 e4 5d c0 23 f5 9e ca 69 89 42 40 e1 44 f4 6b eb e3 5d c0 4a 27 12 4c 35 89 42 40 db 36 8c 82 e0 e3 5d c0 ac 74 77 9d 0d 89 42 40 d2 c6 11 6b f1 e3 5d c0 6b d2 6d 89 5c 88 42 40 6d 55 12 d9 07 e4 5d c0 27 6b d4 43 34 88 42 40 7b a1 80 ed 60 e4 5d c0 0c 05 6c 07 23 88 42 40 d2 8b da fd 2a e5 5d c0 70 95 27 10 76 88 42 40 7a 8a 1c 22 6e e5 5d c0 f5 f2 3b 4d 66 88 42 40 e9 0f cd 3c b9 e5 5d c0 3e 5e 48 87 87 88 42 40 d6 3b dc 0e 0d e6 5d c0 5b cd 3a e3 fb 88 42 40 32 ff e8 9b 34 e6 5d c0 8b a8 89 3e 1f 89 42 40 78 b7 b2 44 67 e6 5d c0 0e a4 8b 4d 2b 89 42 40 ae 45 0b d0 b6 e6 5d c0 12 bf 62 0d 17 89 42 40 e6 76 2f f7 c9 e6 5d c0 5e ba 49 0c 02 89 42 40 8c d6 51 d5 04 e7 5d c0 0b 28 d4 d3 47 88 42 40 c6 89 af 76 14 e7 5d c0 67 44 69 6f f0 87 42 40 10 06 9e 7b 0f e7 5d c0 89 b3 22 6a a2 87 42 40 61 aa 99 b5 14 e7 5d c0 af 60 1b f1 64 87 42 40 9e 06 0c 92 3e e7 5d c0 a3 22 4e 27 d9 86 42 40 a7 b0 52 41 45 e7 5d c0 d4 9a e6 1d a7 86 42 40 f6 cf d3 80 41 e7 5d c0 9b ad bc e4 7f 86 42 40 3f 70 95 27 10 e7 5d c0 a3 3c f3 72 d8 85 42 40 2a 1b d6 54 16 e7 5d c0 ba d9 1f 28 b7 85 42 40 25 08 57 40 a1 e7 5d c0 21 07 25 cc b4 83 42 40 24 42 23 d8 b8 e7 5d c0 2e 8c f4 a2 76 83 42 40 81 b4 ff 01 d6 e7 5d c0 51 a3 90 64 56 83 42 40 5d 89 40 f5 0f e8 5d c0 84 bd 89 21 39 83 42 40 9a 5c 8c 81 75 e8 5d c0 d0 62 29 92 af 82 42 40 8a e4 2b 81 94 e8 5d c0 ef e3 68 8e ac 82 42 40 eb 74 20 eb a9 e8 5d c0 38 32 8f fc c1 82 42 40 a2 97 51 2c b7 e8 5d c0 e4 85 74 78 08 83 42 40 c2 32 36 74 b3 e8 5d c0 fe 0e 45 81 3e 83 42 40 98 35 b1 c0 57 e8 5d c0 d2 51 0e 66 13 84 42 40 b5 c6 a0 13 42 e8 5d c0 fd 68 38 65 6e 84 42 40 bd 6e 11 18 eb e8 5d c0 c5 ab ac 6d 8a 85 42 40 43 e1 b3 75 70 e9 5d c0 f2 b0 50 6b 9a 85 42 40 39 0c e6 af 90 e9 5d c0 b0 3c 48 4f 91 85 42 40 57 7c 43 e1 b3 e9 5d c0 71 00 fd be 7f 85 42 40 78 27 9f 1e db e9 5d c0 77 2c b6 49 45 85 42 40 04 af 96 3b 33 ea 5d c0 2c 49 9e eb fb 84 42 40 9a ef e0 27 0e ea 5d c0 ed 65 db 69 6b 82 42 40 cd ae 7b 2b 12 ea 5d c0 c3 d3 2b 65 19 82 42 40 4b 72 c0 ae 26 ea 5d c0 21 07 25 cc b4 81 42 40 c8 24 23 67 61 ea 5d c0 82 37 a4 51 81 81 42 40 0e 13 0d 52 f0 ea 5d c0 9a 94 82 6e 2f 81 42 40 67 63 25 e6 59 eb 5d c0 e4 10 71 73 2a 81 42 40 cf 9f 36 aa d3 eb 5d c0 99 b7 ea 3a 54 81 42 40 df 70 1f b9 35 ec 5d c0 13 7d 3e ca 88 81 42 40 83 31 22 51 68 ec 5d c0 25 5d 33 f9 66 81 42 40 ef c9 c3 42 ad ec 5d c0 e4 f3 8a a7 1e 81 42 40 f0 85 c9 54 c1 ec 5d c0 d9 05 83 6b ee 80 42 40 c9 ab 73 0c c8 ec 5d c0 4f cf bb b1 a0 80 42 40 91 b7 5c fd d8 ec 5d c0 9f 94 49 0d 6d 80 42 40 d1 3d eb 1a 2d ed 5d c0 7e 55 2e 54 fe 7f 42 40 d0 5f e8 11 a3 ed 5d c0 39 0e bc 5a ee 7e 42 40 90 32 e2 02 d0 ee 5d c0 0f 80 b8 ab 57 7d 42 40 84 0c e4 d9 e5 ee 5d c0 83 89 3f 8a 3a 7d 42 40 fc 17 08 02 64 ef 5d c0 da e4 f0 49 27 7c 42 40 43 1b 80 0d 88 ef 5d c0 de cb 7d 72 14 7a 42 40 6f 82 6f 9a 3e ef 5d c0 29 cd e6 71 18 7a 42 40 1d 3c 13 9a 24 ef 5d c0 9a 06 45 f3 00 7a 42 40 8e 21 00 38 f6 ee 5d c0 a6 d3 ba 0d 6a 79 42 40 7e 55 2e 54 fe ee 5d c0 e8 31 ca 33 2f 79 42 40 8c d9 92 55 11 ef 5d c0 c5 e7 4e b0 ff 78 42 40 8b fc fa 21 36 ef 5d c0 c4 0b 22 52 d3 78 42 40 d2 ff 72 2d 5a ef 5d c0 8b c6 da df d9 78 42 40 97 00 fc 53 aa ef 5d c0 7d 3d 5f b3 5c 78 42 40 83 50 de c7 d1 ef 5d c0 61 fb c9 18 1f 78 42 40 d8 11 87 6c 20 f0 5d c0 45 7f 68 e6 c9 77 42 40 26 e3 18 c9 1e f0 5d c0 e3 ff 8e a8 50 77 42 40 6d 55 12 d9 07 f0 5d c0 80 80 b5 6a d7 76 42 40 66 87 f8 87 2d f0 5d c0 d6 fd 63 21 3a 76 42 40 90 30 0c 58 72 f0 5d c0 c9 ae b4 8c d4 75 42 40 e5 f1 b4 fc c0 f0 5d c0 d7 db 66 2a c4 75 42 40 a9 87 68 74 07 f1 5d c0 2d 7b 12 d8 9c 75 42 40 cc 62 62 f3 71 f1 5d c0 02 63 7d 03 93 75 42 40 36 1f d7 86 8a f1 5d c0 3c 86 c7 7e 16 75 42 40 8b 4f 01 30 9e f1 5d c0 da 75 6f 45 62 74 42 40 1f d7 86 8a 71 f2 5d c0 c0 24 95 29 e6 72 42 40 d8 64 8d 7a 88 f2 5d c0 88 bd 50 c0 76 72 42 40 4a ef 1b 5f 7b f2 5d c0 ec 89 ae 0b 3f 72 42 40 35 63 d1 74 76 f2 5d c0 c1 e0 9a 3b fa 71 42 40 11 19 56 f1 46 f2 5d c0 18 5e 49 f2 5c 71 42 40 60 ea e7 4d 45 f2 5d c0 ef 92 38 2b a2 70 42 40 5f 7b 66 49 80 f2 5d c0 61 fb c9 18 1f 70 42 40 f8 70 c9 71 a7 f2 5d c0 61 fb c9 18 1f 70 42 40 1f 68 05 86 ac f2 5d c0 61 fb c9 18 1f 70 42 40 cb d9 3b a3 ad f2 5d c0 de 1c ae d5 1e 70 42 40 05 c4 24 5c c8 f2 5d c0 29 cd e6 71 18 70 42 40 9f e3 a3 c5 19 f3 5d c0 65 c6 db 4a af 6f 42 40 7a 19 c5 72 4b f3 5d c0 68 23 d7 4d 29 6f 42 40 80 9b c5 8b 85 f3 5d c0 77 2e 8c f4 a2 6e 42 40 14 97 e3 15 88 f3 5d c0 b0 73 d3 66 9c 6e 42 40 8c 11 89 42 cb f3 5d c0 3f 58 c6 86 6e 6e 42 40 af 5b 04 c6 fa f3 5d c0 95 d5 74 3d d1 6d 42 40 28 45 2b f7 02 f4 5d c0 33 c5 1c 04 1d 6d 42 40 d1 e6 38 b7 09 f4 5d c0 5c 3b 51 12 12 6d 42 40 84 65 6c e8 66 f4 5d c0 7b 15 19 1d 90 6c 42 40 c3 9a ca a2 b0 f4 5d c0 6c e8 66 7f a0 6c 42 40 2e ad 86 c4 3d f5 5d c0 b4 b0 a7 1d fe 6c 42 40 dc 82 a5 ba 80 f5 5d c0 4f b0 ff 3a 37 6d 42 40 aa 9c f6 94 9c f5 5d c0 eb e3 a1 ef 6e 6d 42 40 c6 87 d9 cb b6 f5 5d c0 b1 c0 57 74 eb 6d 42 40 31 44 4e 5f cf f5 5d c0 3f c7 47 8b 33 6e 42 40 78 47 c6 6a f3 f5 5d c0 6a df dc 5f 3d 6e 42 40 76 ff 58 88 0e f6 5d c0 0c 22 52 d3 2e 6e 42 40 d8 7f 9d 9b 36 f6 5d c0 23 dc 64 54 19 6e 42 40 df de 35 e8 4b f6 5d c0 b1 c0 57 74 eb 6d 42 40 1f 83 15 a7 5a f6 5d c0 5c ff ae cf 9c 6d 42 40 d0 42 02 46 97 f6 5d c0 7a 37 16 14 06 6d 42 40 50 8b c1 c3 b4 f6 5d c0 b3 7c 5d 86 ff 6c 42 40 82 02 ef e4 d3 f6 5d c0 96 22 f9 4a 20 6d 42 40 82 02 ef e4 d3 f6 5d c0 d4 d3 47 e0 0f 6d 42 40 a3 73 7e 8a e3 f6 5d c0 3f 90 bc 73 28 6d 42 40 25 95 29 e6 20 f7 5d c0 45 80 d3 bb 78 6d 42 40 3a b2 f2 cb 60 f7 5d c0 54 8b 88 62 f2 6c 42 40 64 ca 87 a0 6a f7 5d c0 e3 de fc 86 89 6c 42 40 30 0e 2e 1d 73 f7 5d c0 bf d4 cf 9b 8a 6c 42 40 d3 2f 11 6f 9d f7 5d c0 aa 99 b5 14 90 6c 42 40 21 01 a3 cb 9b f7 5d c0 38 a0 a5 2b d8 6c 42 40 e8 dd 58 50 18 f8 5d c0 b7 0a 62 a0 6b 6d 42 40 24 ee b1 f4 a1 f8 5d c0 1a 8a 3b de e4 6d 42 40 f2 d0 77 b7 b2 f8 5d c0 47 38 2d 78 d1 6d 42 40 f9 66 9b 1b d3 f8 5d c0 1b 68 3e e7 6e 6d 42 40 7c 2b 12 13 d4 f8 5d c0 6c b2 46 3d 44 6d 42 40 b2 f4 a1 0b ea f8 5d c0 71 76 6b 99 0c 6d 42 40 23 a1 2d e7 52 f9 5d c0 38 31 24 27 13 6d 42 40 ea 5b e6 74 59 f9 5d c0 29 04 72 89 23 6d 42 40 46 7c 27 66 bd f9 5d c0 d4 d3 47 e0 0f 6d 42 40 91 28 b4 ac fb f9 5d c0 38 a0 a5 2b d8 6c 42 40 df 8a c4 04 35 fa 5d c0 ab 08 37 19 55 6c 42 40 6d 91 b4 1b 7d fa 5d c0 00 a8 e2 c6 2d 6c 42 40 5e f5 80 79 c8 fa 5d c0 5b 44 14 93 37 6c 42 40 b3 b6 29 1e 17 fb 5d c0 13 63 99 7e 89 6c 42 40 62 c0 92 ab 58 fb 5d c0 cc f0 9f 6e a0 6c 42 40 4c c5 c6 bc 8e fb 5d c0 69 02 45 2c 62 6c 42 40 2f 6b 62 81 af fb 5d c0 14 41 9c 87 13 6c 42 40 b6 81 3b 50 a7 fb 5d c0 f8 55 b9 50 f9 6b 42 40 2f 6b 62 81 af fb 5d c0 a2 25 8f a7 e5 6b 42 40 c4 3f 6c e9 d1 fb 5d c0 32 e8 84 d0 41 6b 42 40 bd 71 52 98 f7 fb 5d c0 96 b4 e2 1b 0a 6b 42 40 e0 4c 4c 17 62 fc 5d c0 6b 0b cf 4b c5 6a 42 40 9b 1d a9 be f3 fc 5d c0 7e c7 f0 d8 cf 6a 42 40 7e 54 c3 7e 4f fd 5d c0 dd 94 f2 5a 09 6b 42 40 cf a0 a1 7f 82 fd 5d c0 a5 2c 43 1c eb 6a 42 40 54 3a 58 ff e7 fd 5d c0 ad 35 94 da 8b 6a 42 40 9b 20 ea 3e 00 fe 5d c0 cc b6 d3 d6 88 6a 42 40 74 b4 aa 25 1d fe 5d c0 1b 2a c6 f9 9b 6a 42 40 15 ff 77 44 85 fe 5d c0 86 8e 1d 54 e2 6a 42 40 8d 0b 07 42 b2 fe 5d c0 7f 16 4b 91 7c 6b 42 40 29 5f d0 42 02 ff 5d c0 99 82 35 ce a6 6b 42 40 ff b3 e6 c7 5f ff 5d c0 ea 07 75 91 42 6b 42 40 4d d9 e9 07 75 ff 5d c0 a1 f4 85 90 f3 6a 42 40 a7 58 35 08 73 ff 5d c0 ae 9d 28 09 89 6a 42 40 5a b8 ac c2 66 ff 5d c0 f7 8e 1a 13 62 6a 42 40 9c f8 6a 47 71 ff 5d c0 36 06 9d 10 3a 6a 42 40 63 9c bf 09 85 ff 5d c0 0e a1 4a cd 1e 6a 42 40 90 d7 83 49 f1 ff 5d c0 8f 52 09 4f e8 69 42 40 8d 43 fd 2e 6c 00 5e c0 71 39 5e 81 e8 69 42 40 19 20 d1 04 8a 00 5e c0 85 7a fa 08 fc 69 42 40 49 d7 4c be d9 00 5e c0 87 33 bf 9a 03 6a 42 40 86 93 34 7f 4c 01 5e c0 5d e2 c8 03 91 69 42 40 a8 52 b3 07 5a 01 5e c0 64 93 fc 88 5f 69 42 40 00 53 06 0e 68 01 5e c0 2d 26 36 1f d7 68 42 40 43 38 66 d9 93 01 5e c0 e5 7a db 4c 85 68 42 40 ba 9f 53 90 9f 01 5e c0 61 dd 78 77 64 68 42 40 5c 56 61 33 c0 01 5e c0 d5 b2 b5 be 48 68 42 40 50 51 f5 2b 9d 02 5e c0 79 e8 bb 5b 59 68 42 40 c7 66 47 aa ef 02 5e c0 76 fa 41 5d a4 68 42 40 5c e3 33 d9 3f 03 5e c0 3f e1 ec d6 32 69 42 40 ec f9 9a e5 b2 03 5e c0 4b e6 58 de 55 69 42 40 db a7 e3 31 03 04 5e c0 d8 0b 05 6c 07 69 42 40 6c 77 0f d0 7d 04 5e c0 55 17 f0 32 c3 68 42 40 75 af 93 fa b2 04 5e c0 86 c8 e9 eb f9 68 42 40 88 10 57 ce de 04 5e c0 d7 16 9e 97 8a 69 42 40 9e 95 b4 e2 1b 05 5e c0 0b 5e f4 15 a4 69 42 40 b5 70 59 85 cd 05 5e c0 a6 42 3c 12 2f 69 42 40 b9 a7 ab 3b 16 06 5e c0 78 7f bc 57 ad 68 42 40 0e be 30 99 2a 06 5e c0 40 34 f3 e4 9a 68 42 40 60 ea e7 4d 45 06 5e c0 fa 2a f9 d8 5d 68 42 40 82 56 60 c8 ea 06 5e c0 a2 7c 41 0b 09 68 42 40 c3 9f e1 cd 1a 07 5e c0 a3 01 bc 05 12 68 42 40 05 6e dd cd 53 07 5e c0 e5 7b 46 22 34 68 42 40 5b 61 fa 5e 43 08 5e c0 59 dd ea 39 e9 67 42 40 a7 06 9a cf b9 08 5e c0 26 56 46 23 9f 67 42 40 6c af 05 bd 37 09 5e c0 9b 54 34 d6 fe 66 42 40 f2 ee c8 58 6d 09 5e c0 df a5 d4 25 e3 66 42 40 a6 d2 4f 38 bb 09 5e c0 c9 e6 aa 79 8e 66 42 40 b3 96 02 d2 fe 09 5e c0 6b 47 71 8e 3a 66 42 40 24 0d 6e 6b 0b 0a 5e c0 65 ff 3c 0d 18 66 42 40 de 05 4a 0a 2c 0a 5e c0 6e dd cd 53 1d 66 42 40 9f ae ee 58 6c 0a 5e c0 8e 06 f0 16 48 66 42 40 8c 2c 99 63 79 0a 5e c0 85 d0 41 97 70 66 42 40 15 e1 26 a3 ca 0a 5e c0 3e ae 0d 15 e3 66 42 40 57 41 0c 74 ed 0a 5e c0 c1 8c 29 58 e3 66 42 40 94 32 a9 a1 0d 0b 5e c0 b4 1f 29 22 c3 66 42 40 e5 0d 30 f3 1d 0b 5e c0 4f ac 53 e5 7b 66 42 40 72 fb e5 93 15 0b 5e c0 c9 05 67 f0 f7 65 42 40 51 f8 6c 1d 1c 0b 5e c0 a2 b7 78 78 cf 65 42 40 3e 42 cd 90 2a 0b 5e c0 fc ab c7 7d ab 65 42 40 28 47 01 a2 60 0b 5e c0 13 60 58 fe 7c 65 42 40 a4 c5 19 c3 9c 0b 5e c0 12 bf 62 0d 17 65 42 40 ff 95 95 26 a5 0b 5e c0 3a 02 b8 59 bc 64 42 40 fb 3d b1 4e 95 0b 5e c0 f7 3c 7f da a8 64 42 40 8d b7 95 5e 9b 0b 5e c0 64 ca 87 a0 6a 64 42 40 6f 0c 01 c0 b1 0b 5e c0 8f 17 d2 e1 21 64 42 40 ca de 52 ce 17 0c 5e c0 a9 c0 c9 36 70 63 42 40 19 a9 f7 54 4e 0c 5e c0 d3 a4 14 74 7b 63 42 40 78 d3 2d 3b c4 0c 5e c0 df a8 15 a6 ef 63 42 40 c0 21 54 a9 d9 0c 5e c0 9e 95 b4 e2 1b 64 42 40 6b 99 0c c7 f3 0c 5e c0 16 be be d6 a5 64 42 40 e2 78 3e 03 ea 0c 5e c0 d9 5c 35 cf 11 65 42 40 25 3b 36 02 f1 0c 5e c0 2c a0 50 4f 1f 65 42 40 f9 65 30 46 24 0d 5e c0 c8 cd 70 03 3e 65 42 40 95 d6 df 12 80 0d 5e c0 ed f5 ee 8f f7 64 42 40 2d 98 f8 a3 a8 0d 5e c0 24 47 3a 03 23 65 42 40 d3 da 34 b6 d7 0d 5e c0 ce 6d c2 bd 32 65 42 40 cb 2c 42 b1 15 0e 5e c0 ea 3c 2a fe ef 64 42 40 76 50 89 eb 18 0e 5e c0 08 1d 74 09 87 64 42 40 ea 41 41 29 5a 0e 5e c0 aa 49 f0 86 34 64 42 40 ff 3e e3 c2 81 0e 5e c0 de e8 63 3e 20 64 42 40 52 61 6c 21 c8 0e 5e c0 df 6d de 38 29 64 42 40 43 37 fb 03 e5 0e 5e c0 88 67 09 32 02 64 42 40 cd 23 7f 30 f0 0e 5e c0 e7 fb a9 f1 d2 63 42 40 cd 57 c9 c7 ee 0e 5e c0 31 ed 9b fb ab 63 42 40 70 97 fd ba d3 0e 5e c0 de 8d 05 85 41 63 42 40 90 12 bb b6 b7 0e 5e c0 20 96 cd 1c 92 62 42 40 f4 a3 e1 94 b9 0e 5e c0 b4 71 c4 5a 7c 62 42 40 cf 9f 36 aa d3 0e 5e c0 71 ac 8b db 68 62 42 40 80 d7 67 ce fa 0e 5e c0 87 4f 3a 91 60 62 42 40 0f 0c 20 7c 28 0f 5e c0 02 66 be 83 9f 62 42 40 16 88 9e 94 49 0f 5e c0 bd 8f a3 39 b2 62 42 40 dc bc 71 52 98 0f 5e c0 01 32 74 ec a0 62 42 40 23 d7 4d 29 af 0f 5e c0 4a 97 fe 25 a9 62 42 40 84 bb b3 76 db 0f 5e c0 36 72 dd 94 f2 62 42 40 4c aa b6 9b e0 0f 5e c0 e9 65 14 cb 2d 63 42 40 1c 7d cc 07 04 10 5e c0 32 cb 9e 04 36 63 42 40 72 18 cc 5f 21 10 5e c0 4a d0 5f e8 11 63 42 40 e4 4a 3d 0b 42 10 5e c0 01 4f 5a b8 ac 62 42 40 39 7e a8 34 62 10 5e c0 e5 63 77 81 92 62 42 40 bd 01 66 be 83 10 5e c0 cf c0 c8 cb 9a 62 42 40 25 76 6d 6f b7 10 5e c0 93 70 21 8f e0 62 42 40 de 3a ff 76 d9 10 5e c0 c5 ad 82 18 e8 62 42 40 3d 65 35 5d 4f 11 5e c0 e9 7d e3 6b cf 62 42 40 24 25 3d 0c ad 11 5e c0 53 24 5f 09 a4 62 42 40 05 35 7c 0b eb 11 5e c0 d4 d6 88 60 1c 62 42 40 09 8d 60 e3 fa 11 5e c0 8b 71 fe 26 14 62 42 40 3a cd 02 ed 0e 12 5e c0 95 d4 09 68 22 62 42 40 75 02 9a 08 1b 12 5e c0 55 c1 a8 a4 4e 62 42 40 ad df 4c 4c 17 12 5e c0 bf d4 cf 9b 8a 62 42 40 26 8f a7 e5 07 12 5e c0 70 43 8c d7 bc 62 42 40 ef e5 3e 39 0a 12 5e c0 3d 29 93 1a da 62 42 40 d7 fa 22 a1 2d 12 5e c0 8c d6 51 d5 04 63 42 40 d1 91 5c fe 43 12 5e c0 34 f3 e4 9a 02 63 42 40 ba f7 70 c9 71 12 5e c0 ef 57 01 be db 62 42 40 15 e5 d2 f8 85 12 5e c0 cc 24 ea 05 9f 62 42 40 f2 42 3a 3c 84 12 5e c0 6a 13 27 f7 3b 62 42 40 6f f6 07 ca 6d 12 5e c0 43 1d 56 b8 e5 61 42 40 12 f5 82 4f 73 12 5e c0 4a eb 6f 09 c0 61 42 40 41 7f a1 47 8c 12 5e c0 c0 79 71 e2 ab 61 42 40 1e 4e 60 3a ad 12 5e c0 fb 94 63 b2 b8 61 42 40 9e 96 1f b8 ca 12 5e c0 75 ab e7 a4 f7 61 42 40 17 66 a1 9d d3 12 5e c0 57 06 d5 06 27 62 42 40 3f 8e e6 c8 ca 12 5e c0 f5 0f 22 19 72 62 42 40 93 e2 e3 13 b2 12 5e c0 e5 9d 43 19 aa 62 42 40 b1 e1 e9 95 b2 12 5e c0 70 43 8c d7 bc 62 42 40 ae 48 4c 50 c3 12 5e c0 2c f2 eb 87 d8 62 42 40 b2 4b 54 6f 0d 13 5e c0 c0 07 af 5d da 62 42 40 11 39 7d 3d 5f 13 5e c0 08 1d 74 09 87 62 42 40 b8 5b 92 03 76 13 5e c0 22 e1 7b 7f 83 62 42 40 77 2b 4b 74 96 13 5e c0 71 20 24 0b 98 62 42 40 92 74 cd e4 9b 13 5e c0 0c 03 96 5c c5 62 42 40 42 cd 90 2a 8a 13 5e c0 62 be bc 00 fb 62 42 40 ea 21 1a dd 41 13 5e c0 ef ff e3 84 09 63 42 40 59 8b 4f 01 30 13 5e c0 db 32 e0 2c 25 63 42 40 30 2c 7f be 2d 13 5e c0 e5 b8 53 3a 58 63 42 40 fb 07 91 0c 39 13 5e c0 2b a5 67 7a 89 63 42 40 47 92 20 5c 01 14 5e c0 f3 57 c8 5c 19 64 42 40 0d 50 1a 6a 14 14 5e c0 b3 d0 ce 69 16 64 42 40 5a d6 fd 63 21 14 5e c0 1d 03 b2 d7 bb 63 42 40 5b 44 14 93 37 14 5e c0 d2 fb c6 d7 9e 63 42 40 a5 4b ff 92 54 14 5e c0 a1 a1 7f 82 8b 63 42 40 d6 19 df 17 97 14 5e c0 42 99 46 93 8b 63 42 40 dd 61 13 99 b9 14 5e c0 37 fb 03 e5 b6 63 42 40 61 df 4e 22 c2 14 5e c0 8e b0 a8 88 d3 63 42 40 7d 76 c0 75 c5 14 5e c0 dc 46 03 78 0b 64 42 40 67 2b 2f f9 9f 14 5e c0 33 de 56 7a 6d 64 42 40 4a 97 fe 25 a9 14 5e c0 40 4b 57 b0 8d 64 42 40 c2 35 77 f4 bf 14 5e c0 64 5b 06 9c a5 64 42 40 20 09 fb 76 12 15 5e c0 d6 ab c8 e8 80 64 42 40 4f 1e 16 6a 4d 15 5e c0 8a c8 b0 8a 37 64 42 40 29 b2 d6 50 6a 15 5e c0 e5 47 fc 8a 35 64 42 40 d9 af 3b dd 79 15 5e c0 8e 40 bc ae 5f 64 42 40 35 7d 76 c0 75 15 5e c0 dc f3 fc 69 a3 64 42 40 bb 0e d5 94 64 15 5e c0 af 27 ba 2e fc 64 42 40 49 4d bb 98 66 15 5e c0 ef e1 92 e3 4e 65 42 40 25 7a 19 c5 72 15 5e c0 74 96 59 84 62 65 42 40 b6 2d ca 6c 90 15 5e c0 98 bd 6c 3b 6d 65 42 40 7b 2e 53 93 e0 15 5e c0 7a fc de a6 3f 65 42 40 21 20 5f 42 05 16 5e c0 c1 c5 8a 1a 4c 65 42 40 04 c6 fa 06 26 16 5e c0 77 82 fd d7 b9 65 42 40 38 6b f0 be 2a 16 5e c0 b2 4b 54 6f 0d 66 42 40 c7 7f 81 20 40 16 5e c0 79 74 23 2c 2a 66 42 40 38 4a 5e 9d 63 16 5e c0 f2 0c 1a fa 27 66 42 40 f6 29 c7 64 71 16 5e c0 9c dc ef 50 14 66 42 40 ff 21 fd f6 75 16 5e c0 d1 07 cb d8 d0 65 42 40 58 91 d1 01 49 16 5e c0 75 e8 f4 bc 1b 65 42 40 a6 d1 e4 62 0c 16 5e c0 ce 34 61 fb c9 64 42 40 90 f7 aa 95 09 16 5e c0 75 92 ad 2e a7 64 42 40 24 44 f9 82 16 16 5e c0 f6 60 52 7c 7c 64 42 40 f5 13 ce 6e 2d 16 5e c0 28 64 e7 6d 6c 64 42 40 30 a0 17 ee 5c 16 5e c0 f4 f8 bd 4d 7f 64 42 40 64 58 c5 1b 99 16 5e c0 19 c8 b3 cb b7 64 42 40 f4 8c 7d c9 c6 16 5e c0 17 49 bb d1 c7 64 42 40 7f a1 47 8c 9e 17 5e c0 5f 5e 80 7d 74 64 42 40 d6 ad 9e 93 de 17 5e c0 b9 6d df a3 fe 64 42 40 59 4f ad be ba 17 5e c0 f1 0e f0 a4 85 65 42 40 c3 7d e4 d6 a4 17 5e c0 59 18 22 a7 af 65 42 40 33 8c bb 41 b4 17 5e c0 95 2c 27 a1 f4 65 42 40 b6 2f a0 17 ee 17 5e c0 29 98 31 05 6b 66 42 40 b5 fb 55 80 ef 17 5e c0 66 bd 18 ca 89 66 42 40 6b 45 9b e3 dc 17 5e c0 3c f5 48 83 db 66 42 40 d4 2c d0 ee 90 17 5e c0 39 2a 37 51 4b 67 42 40 2b f9 d8 5d a0 17 5e c0 01 69 ff 03 ac 67 42 40 c2 86 a7 57 ca 17 5e c0 ac 8f 87 be bb 67 42 40 ad bf 25 00 ff 17 5e c0 78 25 c9 73 7d 67 42 40 f6 44 d7 85 1f 18 5e c0 70 98 68 90 82 67 42 40 75 3c 66 a0 32 18 5e c0 1f 82 aa d1 ab 67 42 40 9e cf 80 7a 33 18 5e c0 cd 76 85 3e 58 68 42 40 d1 77 b7 b2 44 18 5e c0 c9 03 91 45 9a 68 42 40 4c c4 5b e7 df 18 5e c0 1b d6 54 16 85 69 42 40 52 0c 90 68 02 19 5e c0 3d 10 59 a4 89 69 42 40 0a 68 22 6c 78 19 5e c0 01 30 9e 41 43 69 42 40 8b 4f 01 30 9e 19 5e c0 e2 ae 5e 45 46 69 42 40 41 bc ae 5f b0 19 5e c0 4e d3 67 07 5c 69 42 40 83 6d c4 93 dd 19 5e c0 8f aa 26 88 ba 69 42 40 4d 2c f0 15 dd 19 5e c0 b6 a0 f7 c6 10 6a 42 40 ee 26 f8 a6 e9 19 5e c0 a6 b9 15 c2 6a 6a 42 40 ff 76 d9 af 3b 1a 5e c0 08 03 cf bd 87 6b 42 40 24 26 a8 e1 5b 1a 5e c0 20 5f 42 05 87 6b 42 40 53 96 21 8e 75 1a 5e c0 de 02 09 8a 1f 6b 42 40 99 48 69 36 8f 1a 5e c0 e1 98 65 4f 02 6b 42 40 a9 85 92 c9 a9 1a 5e c0 50 53 cb d6 fa 6a 42 40 82 e5 08 19 c8 1a 5e c0 3d ba 11 16 15 6b 42 40 9c 17 27 be da 1a 5e c0 20 eb a9 d5 57 6b 42 40 68 5e 0e bb ef 1a 5e c0 0e d7 6a 0f 7b 6b 42 40 62 f5 47 18 06 1b 5e c0 3e 78 ed d2 86 6b 42 40 72 86 e2 8e 37 1b 5e c0 69 73 9c db 84 6b 42 40 28 f3 8f be 49 1b 5e c0 cb b9 14 57 95 6b 42 40 b2 9b 19 fd 68 1b 5e c0 a5 87 a1 d5 c9 6b 42 40 43 e1 b3 75 70 1b 5e c0 6e 52 d1 58 fb 6b 42 40 c9 74 e8 f4 bc 1b 5e c0 e6 22 be 13 b3 6c 42 40 19 1b ba d9 1f 1c 5e c0 c7 bd f9 0d 13 6d 42 40 18 5f b4 c7 0b 1c 5e c0 0d 35 0a 49 66 6d 42 40 db 6c ac c4 3c 1c 5e c0 76 c3 b6 45 99 6d 42 40 c6 4d 0d 34 9f 1c 5e c0 81 26 c2 86 a7 6d 42 40 ae 11 c1 38 b8 1c 5e c0 dc 9e 20 b1 dd 6d 42 40 ff e6 c5 89 af 1c 5e c0 00 00 00 00 00 6e 42 40 7a 52 26 35 b4 1c 5e c0 34 47 56 7e 19 6e 42 40 dd 7a 4d 0f 0a 1d 5e c0 de e8 63 3e 20 6e 42 40 0e db 16 65 36 1d 5e c0 7c 43 e1 b3 75 6e 42 40 50 e2 73 27 d8 1c 5e c0 08 ad 87 2f 13 6f 42 40 92 e8 65 14 cb 1c 5e c0 61 38 d7 30 43 6f 42 40 44 fd 2e 6c cd 1c 5e c0 a1 67 b3 ea 73 6f 42 40 32 03 95 f1 ef 1c 5e c0 50 51 f5 2b 9d 6f 42 40 f8 54 4e 7b 4a 1d 5e c0 87 a2 40 9f c8 6f 42 40 c2 33 a1 49 62 1d 5e c0 12 4e 0b 5e f4 6f 42 40 23 be 13 b3 5e 1d 5e c0 c8 96 e5 eb 32 70 42 40 b6 4b 1b 0e 4b 1d 5e c0 e8 dc ed 7a 69 70 42 40 4b 1e 4f cb 0f 1d 5e c0 5e 64 02 7e 8d 70 42 40 55 13 44 dd 07 1d 5e c0 ff cf 61 be bc 70 42 40 57 23 bb d2 32 1d 5e c0 7f 2e 1a 32 1e 71 42 40 6c 08 8e cb b8 1d 5e c0 c9 af 1f 62 83 71 42 40 90 4f c8 ce db 1d 5e c0 c9 cb 9a 58 e0 71 42 40 c5 90 9c 4c dc 1d 5e c0 0b 28 d4 d3 47 72 42 40 16 6d 8e 73 9b 1d 5e c0 ce 8a a8 89 3e 73 42 40 36 90 2e 36 ad 1d 5e c0 c2 da 18 3b e1 73 42 40 40 85 23 48 a5 1d 5e c0 cd 3d 24 7c ef 73 42 40 1b 82 e3 32 6e 1d 5e c0 d7 a0 2f bd fd 73 42 40 a6 0b b1 fa 23 1d 5e c0 75 5a b7 41 ed 73 42 40 69 72 31 06 d6 1c 5e c0 34 86 39 41 9b 74 42 40 8a 75 aa 7c cf 1c 5e c0 64 b2 b8 ff c8 74 42 40 80 d4 26 4e ee 1c 5e c0 af 44 a0 fa 07 75 42 40 42 76 de c6 66 1d 5e c0 3c f4 dd ad 2c 75 42 40 a8 e3 31 03 95 1d 5e c0 87 86 c5 a8 6b 75 42 40 a6 7b 9d d4 97 1d 5e c0 e6 b1 66 64 90 75 42 40 15 36 03 5c 90 1d 5e c0 9b 3b fa 5f ae 75 42 40 81 24 ec db 49 1d 5e c0 f8 89 03 e8 f7 75 42 40 10 ae 80 42 3d 1d 5e c0 a9 f8 bf 23 2a 76 42 40 06 85 41 99 46 1d 5e c0 f7 20 04 e4 4b 76 42 40 f7 e8 0d f7 91 1d 5e c0 bd e0 d3 9c bc 76 42 40 07 7d e9 ed cf 1d 5e c0 73 9c db 84 7b 77 42 40 a2 0b ea 5b e6 1d 5e c0 47 3e af 78 ea 77 42 40 1a fb 92 8d 07 1e 5e c0 cd c8 20 77 11 78 42 40 5f ed 28 ce 51 1e 5e c0 40 12 f6 ed 24 78 42 40 3d 0a d7 a3 70 1e 5e c0 02 84 0f 25 5a 78 42 40 70 23 65 8b a4 1e 5e c0 9f 91 08 8d 60 79 42 40 58 ad 4c f8 a5 1e 5e c0 cb a2 b0 8b a2 79 42 40 ba 82 6d c4 93 1e 5e c0 8d 41 27 84 0e 7a 42 40 1d ac ff 73 98 1e 5e c0 cc eb 88 43 36 7a 42 40 ca fc a3 6f d2 1e 5e c0 1a a4 e0 29 e4 7a 42 40 6e 6c 76 a4 fa 1e 5e c0 b9 fe 5d 9f 39 7b 42 40 29 3e 3e 21 3b 1f 5e c0 a6 5e b7 08 8c 7b 42 40 ce c1 33 a1 49 1f 5e c0 2d e9 28 07 b3 7b 42 40 05 6b 9c 4d 47 1f 5e c0 1b d5 e9 40 d6 7b 42 40 c3 10 39 7d 3d 1f 5e c0 77 f6 95 07 e9 7b 42 40 30 bb 27 0f 0b 1f 5e c0 e9 62 d3 4a 21 7c 42 40 aa 9e cc 3f fa 1e 5e c0 19 8f 52 09 4f 7c 42 40 5c e7 df 2e fb 1e 5e c0 39 b8 74 cc 79 7c 42 40 ce a6 23 80 9b 1f 5e c0 e3 c6 2d e6 e7 7c 42 40 73 2a 19 00 aa 1f 5e c0 36 8f c3 60 fe 7c 42 40 76 df 31 3c f6 1f 5e c0 dc 2a 88 81 ae 7d 42 40 b8 05 4b 75 01 20 5e c0 7d 96 e7 c1 dd 7d 42 40 59 31 5c 1d 00 20 5e c0 70 61 dd 78 77 7e 42 40 d4 b9 a2 94 10 20 5e c0 c9 e5 3f a4 df 7e 42 40 f3 57 c8 5c 19 20 5e c0 9c df 30 d1 20 7f 42 40 7d 3e ca 88 0b 20 5e c0 4c fd bc a9 48 7f 42 40 66 4a eb 6f 09 20 5e c0 dd ea 39 e9 7d 7f 42 40 b8 59 bc 58 18 20 5e c0 69 e1 b2 0a 9b 7f 42 40 92 23 9d 81 91 20 5e c0 0d e2 03 3b fe 7f 42 40 50 1d ab 94 9e 20 5e c0 06 48 34 81 22 80 42 40 67 2b 2f f9 9f 20 5e c0 9c 50 88 80 43 80 42 40 ad 35 94 da 8b 20 5e c0 20 45 9d b9 87 80 42 40 c4 77 62 d6 8b 20 5e c0 e1 b6 b6 f0 bc 80 42 40 57 cd 73 44 be 20 5e c0 17 d3 4c f7 3a 81 42 40 1e dd 08 8b 8a 21 5e c0 a0 e1 cd 1a bc 81 42 40 16 89 09 6a f8 21 5e c0 fc 8e e1 b1 9f 81 42 40 58 e3 6c 3a 02 22 5e c0 ea 7a a2 eb c2 81 42 40 a0 89 b0 e1 e9 21 5e c0 71 21 8f e0 46 82 42 40 c1 e0 9a 3b fa 21 5e c0 63 d0 09 a1 83 82 42 40 46 61 17 45 0f 22 5e c0 87 14 03 24 9a 82 42 40 35 0c 1f 11 53 22 5e c0 70 44 f7 ac 6b 82 42 40 06 10 3e 94 68 22 5e c0 da e3 85 74 78 82 42 40 ba c0 e5 b1 66 22 5e c0 e0 47 35 ec f7 82 42 40 f2 5e b5 32 e1 22 5e c0 1b 7f a2 b2 61 83 42 40 84 d8 99 42 e7 22 5e c0 14 ce 6e 2d 93 83 42 40 d6 73 d2 fb c6 22 5e c0 a2 25 8f a7 e5 83 42 40 a4 8e 8e ab 91 22 5e c0 2c ba f5 9a 1e 84 42 40 59 87 a3 ab 74 22 5e c0 17 10 5a 0f 5f 84 42 40 47 e5 26 6a 69 22 5e c0 10 23 84 47 1b 85 42 40 fd 4a e7 c3 b3 22 5e c0 70 23 65 8b a4 85 42 40 6f 7e c3 44 83 1f 5e c0 f8 aa 95 09 bf 8a 42 40 f3 90 29 1f 82 1e 5e c0 aa d4 ec 81 56 8c 42 40 1c 5c 3a e6 3c 1e 5e c0 0b 97 55 d8 0c 8c 42 40 3f 56 f0 db 10 1e 5e c0 f0 6a b9 33 13 8c 42 40 b0 01 11 e2 ca 1d 5e c0 87 a6 ec f4 83 8c 42 40 20 5f 42 05 87 1d 5e c0 a1 12 d7 31 ae 8c 42 40 a6 0e f2 7a 30 1d 5e c0 f7 b0 17 0a d8 8c 42 40 7f 32 c6 87 d9 1c 5e c0 d9 e8 9c 9f e2 8c 42 40 76 de c6 66 47 1c 5e c0 70 07 ea 94 47 8d 42 40 da a9 b9 dc 60 1b 5e c0 3a 91 60 aa 99 8d 42 40 1c 7a 8b 87 f7 1a 5e c0 37 c6 4e 78 09 8e 42 40 e2 76 68 58 8c 1a 5e c0 cf 87 67 09 32 8e 42 40 f3 e2 c4 57 3b 1a 5e c0 5d 31 23 bc 3d 8e 42 40 99 9e b0 c4 03 1a 5e c0 7c d5 ca 84 5f 8e 42 40 00 a9 4d 9c dc 19 5e c0 0d 54 c6 bf cf 8e 42 40 22 52 d3 2e a6 19 5e c0 73 63 7a c2 12 8f 42 40 f1 2a 6b 9b e2 18 5e c0 cc b3 92 56 7c 8f 42 40 b8 20 5b 96 af 18 5e c0 59 69 52 0a ba 8f 42 40 87 1a 85 24 b3 18 5e c0 eb 1c 03 b2 d7 8f 42 40 ad a3 aa 09 a2 18 5e c0 7e 38 48 88 f2 8f 42 40 b5 c6 a0 13 42 18 5e c0 9a 98 2e c4 ea 8f 42 40 fa d1 70 ca dc 17 5e c0 23 4c 51 2e 8d 8f 42 40 5a b8 ac c2 66 17 5e c0 13 d5 5b 03 5b 8f 42 40 6d 1d 1c ec 4d 17 5e c0 6b b8 c8 3d 5d 8f 42 40 79 01 f6 d1 a9 16 5e c0 57 ea 59 10 ca 8f 42 40 d4 0e 7f 4d d6 15 5e c0 01 17 64 cb f2 8f 42 40 67 ba d7 49 7d 15 5e c0 f3 cb 60 8c 48 90 42 40 f6 09 a0 18 59 15 5e c0 9f 94 49 0d 6d 90 42 40 cb 0f 5c e5 09 15 5e c0 5c 93 6e 4b e4 90 42 40 f6 0a 0b ee 07 15 5e c0 65 36 c8 24 23 91 42 40 00 e3 19 34 f4 14 5e c0 84 0e ba 84 43 91 42 40 0c 03 96 5c c5 14 5e c0 dc 65 bf ee 74 91 42 40 40 12 f6 ed 24 14 5e c0 a3 e5 40 0f b5 91 42 40 a2 25 8f a7 e5 13 5e c0 86 1c 5b cf 10 92 42 40 ba dd cb 7d 72 13 5e c0 22 8b 34 f1 0e 92 42 40 bf 0f 07 09 51 13 5e c0 d0 b4 c4 ca 68 92 42 40 9b 02 99 9d 45 13 5e c0 3d 0d 18 24 7d 92 42 40 06 2b 4e b5 16 13 5e c0 63 b8 3a 00 e2 92 42 40 af b0 e0 7e c0 12 5e c0 63 0e 82 8e 56 93 42 40 a6 b5 69 6c af 12 5e c0 6a 6d 1a db 6b 93 42 40 f6 9a 1e 14 94 12 5e c0 7b 12 d8 9c 83 93 42 40 8d 24 41 b8 02 12 5e c0 72 6e 13 ee 95 93 42 40 ea 5a 7b 9f aa 11 5e c0 27 33 de 56 7a 93 42 40 9d a1 b8 e3 4d 11 5e c0 88 68 74 07 b1 93 42 40 7b 68 1f 2b f8 10 5e c0 12 da 72 2e c5 93 42 40 18 0b 43 e4 f4 10 5e c0 a0 15 18 b2 ba 93 42 40 3a 05 f9 d9 c8 10 5e c0 76 a5 65 a4 de 93 42 40 4d 32 72 16 f6 0f 5e c0 ec a0 12 d7 31 94 42 40 ca 17 b4 90 80 0f 5e c0 90 85 e8 10 38 94 42 40 64 cb f2 75 19 0f 5e c0 78 63 41 61 50 94 42 40 3b a7 59 a0 dd 0e 5e c0 99 f4 f7 52 78 94 42 40 3b 53 e8 bc c6 0e 5e c0 12 4c 35 b3 96 94 42 40 0f ed 63 05 bf 0e 5e c0 6e 8a c7 45 b5 94 42 40 ad 35 94 da 8b 0e 5e c0 4d 15 8c 4a ea 94 42 40 c5 c9 fd 0e 45 0e 5e c0 67 47 aa ef fc 94 42 40 1d 1c ec 4d 0c 0e 5e c0 32 75 57 76 c1 94 42 40 ef e3 68 8e ac 0d 5e c0 73 12 4a 5f 08 95 42 40 90 dc 9a 74 5b 0d 5e c0 41 f2 ce a1 0c 95 42 40 20 98 a3 c7 ef 0c 5e c0 8e ca 4d d4 d2 94 42 40 42 0a 9e 42 ae 0c 5e c0 79 78 cf 81 e5 94 42 40 f8 16 d6 8d 77 0c 5e c0 a4 a7 c8 21 e2 94 42 40 41 bb 43 8a 01 0c 5e c0 2e 8f 35 23 83 94 42 40 52 b5 dd 04 df 0b 5e c0 f1 9c 2d 20 b4 94 42 40 03 97 c7 9a 91 0b 5e c0 ae f4 da 6c ac 94 42 40 a4 c6 84 98 4b 0b 5e c0 89 24 7a 19 c5 94 42 40 04 00 c7 9e 3d 0b 5e c0 5a f0 a2 af 20 95 42 40 40 12 f6 ed 24 0b 5e c0 9f 3a 56 29 3d 95 42 40 73 bb 97 fb e4 0a 5e c0 c2 dc ee e5 3e 95 42 40 f8 fb c5 6c c9 0a 5e c0 d4 b6 61 14 04 95 42 40 c2 15 50 a8 a7 0a 5e c0 46 07 24 61 df 94 42 40 37 6d c6 69 88 0a 5e c0 f3 8f be 49 d3 94 42 40 47 af 06 28 0d 0a 5e c0 d0 d5 56 ec 2f 95 42 40 b6 f2 92 ff c9 09 5e c0 80 b9 16 2d 40 95 42 40 bc 90 0e 0f 61 09 5e c0 2d b1 32 1a f9 94 42 40 a8 a7 8f c0 1f 09 5e c0 89 b5 f8 14 00 95 42 40 78 b4 71 c4 5a 08 5e c0 43 aa 28 5e 65 95 42 40 aa 0c e3 6e 10 08 5e c0 31 b4 3a 39 43 95 42 40 11 52 b7 b3 af 07 5e c0 82 38 0f 27 30 95 42 40 47 c8 40 9e 5d 07 5e c0 6e 31 3f 37 34 95 42 40 8d 09 31 97 54 07 5e c0 93 c6 68 1d 55 95 42 40 af 44 a0 fa 07 07 5e c0 2a 1a 6b 7f 67 95 42 40 c9 1f 0c 3c f7 06 5e c0 83 14 3c 85 5c 95 42 40 8b fd 65 f7 e4 06 5e c0 7e 74 ea ca 67 95 42 40 8f 18 3d b7 d0 06 5e c0 68 05 86 ac 6e 95 42 40 7e 74 ea ca 67 06 5e c0 9a 5e 62 2c d3 95 42 40 7a 8e c8 77 29 06 5e c0 ba d8 b4 52 08 96 42 40 e2 3c 9c c0 74 05 5e c0 c3 0d f8 fc 30 96 42 40 a8 aa d0 40 2c 05 5e c0 ed 2b 0f d2 53 96 42 40 89 61 87 31 e9 04 5e c0 00 e1 43 89 96 96 42 40 63 d3 4a 21 90 04 5e c0 95 81 03 5a ba 96 42 40 ac e2 8d cc 23 04 5e c0 ae 62 f1 9b c2 96 42 40 f8 a5 7e de 54 03 5e c0 67 d1 3b 15 70 97 42 40 68 ec 4b 36 1e 03 5e c0 b1 35 5b 79 c9 97 42 40 7b 4e 7a df f8 02 5e c0 4a 99 d4 d0 06 98 42 40 69 8f 17 d2 e1 00 5e c0 6a 6d 1a db 6b 9b 42 40 b4 92 56 7c 43 ff 5d c0 eb e1 cb 44 11 9e 42 40 a6 29 02 9c de fe 5d c0 21 21 ca 17 b4 9e 42 40 06 bb 61 db a2 fe 5d c0 67 81 76 87 14 9f 42 40 d7 88 60 1c 5c fe 5d c0 fd 88 5f b1 86 9f 42 40 ea 20 af 07 93 fc 5d c0 a2 98 bc 01 66 a2 42 40 d7 6b 7a 50 50 fc 5d c0 12 dd b3 ae d1 a2 42 40 28 0d 35 0a 49 fc 5d c0 71 02 d3 69 dd a2 42 40 b3 7d c8 5b ae fb 5d c0 45 68 04 1b d7 a3 42 40 24 44 f9 82 16 f8 5d c0 ca 6e 66 f4 a3 a9 42 40 4b 8f a6 7a 32 f7 5d c0 81 79 c8 94 0f ab 42 40 0a 81 5c e2 c8 f6 5d c0 0c cc 0a 45 ba ab 42 40 da aa 24 b2 0f f2 5d c0 72 c0 ae 26 4f b3 42 40 c1 01 2d 5d c1 f0 5d c0 e5 d5 39 06 64 b5 42 40 ad 31 e8 84 d0 ef 5d c0 16 fc 36 c4 78 b5 42 40 85 b2 f0 f5 b5 ee 5d c0 da c9 e0 28 79 b5 42 40 0d c7 f3 19 50 ed 5d c0 b0 02 7c b7 79 b5 42 40 d4 61 85 5b 3e ed 5d c0 80 7e df bf 79 b5 42 40 99 d7 11 87 6c ec 5d c0 74 d0 25 1c 7a b5 42 40 20 60 ad da 35 ec 5d c0 e5 43 50 35 7a b5 42 40 c5 1c 04 1d ad e9 5d c0 32 ad 4d 63 7b b5 42 40 13 ba 4b e2 ac e9 5d c0 2c d4 9a e6 1d b9 42 40 44 a6 7c 08 aa e9 5d c0 ac 74 77 9d 0d bb 42 40 f3 02 ec a3 53 e9 5d c0 22 a5 d9 3c 0e bb 42 40 be 2e c3 7f ba e8 5d c0 5e d7 2f d8 0d bb 42 40 0e 83 f9 2b 64 e7 5d c0 c7 bd f9 0d 13 bb 42 40 ca 8c b7 95 5e e7 5d c0 ee 79 fe b4 51 bf 42 40 9a 5c 8c 81 75 e5 5d c0 83 c3 0b 22 52 bf 42 40 d3 6a 48 dc 63 e5 5d c0 23 bb d2 32 52 bf 42 40 88 83 84 28 5f e5 5d c0 a9 4a 5b 5c e3 c3 42 40 e4 6a 64 57 5a e5 5d c0 e2 23 62 4a 24 c7 42 40 a3 95 7b 81 59 e5 5d c0 27 f8 a6 e9 b3 c7 42 40 82 ca f8 f7 19 d6 5d c0 66 2d 05 a4 fd df 42 40 29 e8 f6 92 c6 d3 5d c0 78 5f 95 0b 95 e3 42 40 6b ba 9e e8 ba d3 5d c0 b6 4b 1b 0e 4b e3 42 40 0b cf 4b c5 c6 d2 5d c0 cf f5 7d 38 48 e2 42 40 e8 15 4f 3d d2 d2 5d c0 51 69 c4 cc 3e e1 42 40 e1 b6 b6 f0 bc d2 5d c0 b5 35 22 18 07 e1 42 40 be 6c 3b 6d 8d d2 5d c0 19 02 80 63 cf e0 42 40 2f f7 c9 51 80 d2 5d c0 0b 44 4f ca a4 e0 42 40 a8 e0 f0 82 88 d2 5d c0 37 a9 68 ac fd df 42 40 4c e2 ac 88 9a d2 5d c0 e2 78 3e 03 ea df 42 40 6b f2 94 d5 74 d2 5d c0 fa 44 9e 24 5d df 42 40 a8 3a e4 66 b8 d1 5d c0 c2 dd 59 bb ed de 42 40 3e ed f0 d7 64 d1 5d c0 6d 1c b1 16 9f de 42 40 e4 13 b2 f3 36 d1 5d c0 6d 1c b1 16 9f de 42 40'
-San Mateo X'00 00 00 00 03 05 00 00 00 56 0c 57 07 40 a1 5e c0 65 69 93 81 bd 8d 42 40 73 a0 87 da 36 85 5e c0 7d 3c f4 dd ad da 42 40 01 00 00 00 4f 01 00 00 00 00 00 00 a1 f6 5b 3b 51 9b 5e c0 49 2f 6a f7 ab da 42 40 16 88 9e 94 49 9b 5e c0 61 8b dd 3e ab da 42 40 20 7d 93 a6 41 9b 5e c0 a9 6b ed 7d aa da 42 40 80 b9 16 2d 40 9b 5e c0 67 7c 5f 5c aa da 42 40 a0 54 fb 74 3c 9b 5e c0 44 a6 7c 08 aa da 42 40 8b a8 89 3e 1f 9b 5e c0 b6 81 3b 50 a7 da 42 40 d9 e8 9c 9f e2 9a 5e c0 b6 81 3b 50 a7 da 42 40 13 9d 65 16 a1 9a 5e c0 1a df 17 97 aa da 42 40 d3 f8 85 57 92 9a 5e c0 b6 81 3b 50 a7 da 42 40 f7 b1 82 df 86 9a 5e c0 b6 81 3b 50 a7 da 42 40 e1 25 38 f5 81 9a 5e c0 b6 81 3b 50 a7 da 42 40 18 e9 45 ed 7e 9a 5e c0 ce a9 64 00 a8 da 42 40 a1 81 58 36 73 9a 5e c0 1a df 17 97 aa da 42 40 9c 50 88 80 43 9a 5e c0 09 a8 70 04 a9 da 42 40 29 76 34 0e f5 99 5e c0 b6 81 3b 50 a7 da 42 40 2a c4 23 f1 f2 99 5e c0 1b 47 ac c5 a7 da 42 40 4a 5f 08 39 ef 99 5e c0 45 da c6 9f a8 da 42 40 1c 96 06 7e 54 99 5e c0 1a df 17 97 aa da 42 40 f2 7d 71 a9 4a 99 5e c0 1a df 17 97 aa da 42 40 39 f0 6a b9 33 99 5e c0 b6 81 3b 50 a7 da 42 40 95 ee ae b3 21 99 5e c0 1a df 17 97 aa da 42 40 c7 40 b5 44 0c 99 5e c0 1a df 17 97 aa da 42 40 2f be 69 04 2a 99 5e c0 21 34 53 5c 90 da 42 40 c2 47 4d de ce 98 5e c0 e4 9d cd 6c eb d6 42 40 b5 82 3c f5 55 98 5e c0 51 63 9d cf b9 d5 42 40 49 3c 12 61 f4 97 5e c0 b0 63 92 2b c3 d4 42 40 af 47 6f d3 09 98 5e c0 2d 3f e8 77 81 d3 42 40 d3 ea 98 5b 2f 98 5e c0 34 e0 b5 20 41 d3 42 40 6d 89 dd d8 ca 98 5e c0 03 be 9e c9 00 d3 42 40 a4 df 72 66 b5 98 5e c0 03 6a 5b ea 9e d1 42 40 72 28 46 54 6e 98 5e c0 6d 75 95 f2 4f d1 42 40 c6 c1 09 9d 63 97 5e c0 39 6c ad 98 27 d0 42 40 78 9d 96 66 bd 96 5e c0 eb 9d 98 72 d0 ce 42 40 5a d8 8c 04 f3 96 5e c0 5c 11 f2 51 3a ce 42 40 b5 7b ed 09 b4 97 5e c0 ee b7 d7 8d a5 ce 42 40 8b a4 45 4b e4 97 5e c0 88 ac 7a 1b 90 ce 42 40 9a dd 55 29 2f 98 5e c0 18 b8 a2 25 a6 cd 42 40 ea 0e d5 14 3a 98 5e c0 83 24 b9 05 84 cd 42 40 41 8e f4 c7 0a 98 5e c0 1c 4e 56 73 40 cd 42 40 67 57 7a d3 0d 97 5e c0 3b 8a 22 16 d7 cb 42 40 99 da 8b f7 fa 96 5e c0 4b 0f c2 a2 d5 cb 42 40 e1 6b f5 90 5e 95 5e c0 09 aa b0 e9 b5 cb 42 40 b5 96 14 ce 54 94 5e c0 54 d8 45 78 a1 cb 42 40 c4 c2 f1 45 2f 94 5e c0 42 7b 38 83 2b cb 42 40 9d 05 89 a2 34 94 5e c0 2f 1e 2b 8e b5 ca 42 40 29 24 ad c8 93 93 5e c0 c5 f7 5b 78 a9 c9 42 40 98 d8 43 51 6f 93 5e c0 0a bb ca 0c a5 c9 42 40 c3 66 54 40 02 93 5e c0 9f dd 6d d4 97 c9 42 40 f3 e3 53 26 73 92 5e c0 9d 58 ef 7b 86 c9 42 40 6c cb 3e 64 d5 90 5e c0 af 46 de 54 54 c9 42 40 5b 4b 2d 0a d0 90 5e c0 66 8d cc ae 53 c9 42 40 1f d6 20 7c c6 90 5e c0 cb 86 e3 19 48 c9 42 40 99 d3 96 17 72 90 5e c0 bf 46 8b ce e1 c8 42 40 5a a1 8b 1a 1f 90 5e c0 de a1 f3 36 7d c8 42 40 4f 7f cf 3a 0f 90 5e c0 f8 16 c3 b4 5a c8 42 40 36 3f a5 bc f8 8f 5e c0 79 1d 09 cf 29 c8 42 40 d8 93 d2 c8 a3 8f 5e c0 3c b8 3f 21 71 c7 42 40 b0 64 cd e4 8a 8f 5e c0 4a d2 eb a9 50 c7 42 40 34 42 ec f0 5c 8f 5e c0 a1 78 bc b9 14 c7 42 40 47 98 c7 c3 01 8f 5e c0 8c 25 c7 cc 9d c6 42 40 6b 65 d0 6c 69 8e 5e c0 3b 99 94 18 d7 c5 42 40 be d3 77 1b 4a 8e 5e c0 7b 2e 2d 3f ae c5 42 40 fc 38 17 82 b6 8d 5e c0 35 6f eb b9 ed c4 42 40 f4 43 06 fa 94 8c 5e c0 a1 1b 16 d5 c2 c4 42 40 83 26 0b 2b 7a 8c 5e c0 dc e4 96 47 e0 c2 42 40 30 7a d6 de c7 8a 5e c0 3b 14 ef c4 87 c0 42 40 be 56 16 9a 70 8a 5e c0 5e 82 cf 7d 5f c0 42 40 17 3f a9 c3 f6 89 5e c0 5c f3 37 42 27 c0 42 40 c8 82 b7 92 93 89 5e c0 64 eb 96 83 57 c0 42 40 2d e4 72 15 f8 88 5e c0 21 c0 b5 16 03 c1 42 40 3a b9 be f4 61 88 5e c0 45 b9 3d af 77 c0 42 40 bc 1c d6 56 2c 88 5e c0 6a 1f 15 ba 01 c0 42 40 24 6b f1 d9 0b 88 5e c0 2d e3 a1 9d 1f c0 42 40 88 78 0c 73 6e 87 5e c0 79 50 f9 6c b0 c0 42 40 7b 9f aa 42 03 87 5e c0 c8 23 b8 91 b2 bf 42 40 73 a0 87 da 36 85 5e c0 9d 64 ab cb 29 bd 42 40 b7 5f 3e 59 31 86 5e c0 f0 6d fa b3 1f bb 42 40 36 e9 b6 44 2e 86 5e c0 20 b6 f4 68 aa bb 42 40 ca fb 38 9a 23 87 5e c0 77 31 cd 74 af bb 42 40 03 b2 d7 bb 3f 87 5e c0 0e f3 e5 05 d8 bb 42 40 c2 33 a1 49 62 87 5e c0 20 b6 f4 68 aa bb 42 40 9a 0b 5c 1e 6b 87 5e c0 df a9 80 7b 9e bb 42 40 39 0b 7b da e1 87 5e c0 07 0b 27 69 fe ba 42 40 82 e5 08 19 c8 87 5e c0 c1 71 19 37 35 ba 42 40 c8 ce db d8 ec 87 5e c0 cb a1 45 b6 f3 b9 42 40 9d 2d 20 b4 1e 88 5e c0 ea 79 37 16 14 ba 42 40 07 b3 09 30 2c 88 5e c0 17 ba 12 81 ea b9 42 40 5c 3c bc e7 c0 88 5e c0 a3 07 3e 06 2b ba 42 40 4b b0 38 9c f9 88 5e c0 c0 b1 67 cf 65 ba 42 40 82 90 2c 60 02 89 5e c0 19 c9 1e a1 66 ba 42 40 13 0a 11 70 08 89 5e c0 ad fb c7 42 74 ba 42 40 e5 b9 be 0f 07 89 5e c0 b7 2a 89 ec 83 ba 42 40 35 ef 38 45 47 89 5e c0 69 e4 f3 8a a7 ba 42 40 17 2a ff 5a 5e 89 5e c0 47 02 0d 36 75 ba 42 40 49 9d 80 26 c2 89 5e c0 77 11 a6 28 97 ba 42 40 52 98 f7 38 d3 89 5e c0 43 ad 69 de 71 ba 42 40 ce 88 d2 de e0 89 5e c0 c7 b9 4d b8 57 ba 42 40 ff ae cf 9c f5 89 5e c0 0c 79 04 37 52 ba 42 40 a2 0e 2b dc f2 89 5e c0 4c 8c 65 fa 25 ba 42 40 a8 53 1e dd 08 8a 5e c0 36 cc d0 78 22 ba 42 40 02 bc 05 12 14 8a 5e c0 23 bf 7e 88 0d ba 42 40 f3 8e 53 74 24 8a 5e c0 86 1c 5b cf 10 ba 42 40 3a 3e 5a 9c 31 8a 5e c0 76 70 b0 37 31 ba 42 40 50 01 30 9e 41 8a 5e c0 53 ce 17 7b 2f ba 42 40 bc 3c 9d 2b 4a 8a 5e c0 f2 d2 4d 62 10 ba 42 40 08 ac 1c 5a 64 8a 5e c0 b1 34 f0 a3 1a ba 42 40 fd 68 38 65 6e 8a 5e c0 d0 7b 63 08 00 ba 42 40 dd b5 84 7c d0 8a 5e c0 cf db d8 ec 48 b9 42 40 4c 8d d0 cf d4 8a 5e c0 4d 4e ed 0c 53 b9 42 40 57 3e cb f3 e0 8a 5e c0 c8 b6 0c 38 4b b9 42 40 5d fe 43 fa ed 8a 5e c0 6b 7e fc a5 45 b9 42 40 9c 33 a2 b4 37 8b 5e c0 17 2c d5 05 bc b8 42 40 d6 e6 ff 55 47 8b 5e c0 ef 58 6c 93 8a b8 42 40 9d a1 b8 e3 4d 8b 5e c0 00 c4 5d bd 8a b8 42 40 76 fb ac 32 53 8b 5e c0 b2 d5 e5 94 80 b8 42 40 3f c6 dc b5 84 8b 5e c0 18 0a d8 0e 46 b8 42 40 b8 af 03 e7 8c 8b 5e c0 26 37 8a ac 35 b8 42 40 0b 2a aa 7e a5 8b 5e c0 b4 ac fb c7 42 b8 42 40 9b 3b fa 5f ae 8b 5e c0 2e 90 a0 f8 31 b8 42 40 d9 43 fb 58 c1 8b 5e c0 35 42 3f 53 af b7 42 40 7c 45 b7 5e d3 8b 5e c0 5b 5d 4e 09 88 b7 42 40 b5 1b 7d cc 07 8c 5e c0 3f 72 6b d2 6d b7 42 40 c4 5b e7 df 2e 8c 5e c0 74 ee 76 bd 34 b7 42 40 4a f0 86 34 2a 8c 5e c0 4e 7d 20 79 e7 b6 42 40 b7 28 b3 41 26 8c 5e c0 e4 c0 ab e5 ce b6 42 40 8a 94 66 f3 38 8c 5e c0 a4 fa ce 2f 4a b6 42 40 4a f0 86 34 2a 8c 5e c0 88 0f ec f8 2f b6 42 40 ca a7 c7 b6 0c 8c 5e c0 17 f4 de 18 02 b6 42 40 26 a6 0b b1 fa 8b 5e c0 ed 28 ce 51 47 b5 42 40 ee 60 c4 3e 01 8c 5e c0 18 b0 e4 2a 16 b5 42 40 c9 38 46 b2 47 8c 5e c0 99 67 25 ad f8 b4 42 40 1f 69 70 5b 5b 8c 5e c0 8b a9 f4 13 ce b4 42 40 60 ac 6f 60 72 8c 5e c0 fe 0b 04 01 32 b4 42 40 28 44 c0 21 54 8c 5e c0 ff cc 20 3e b0 b3 42 40 78 60 00 e1 43 8c 5e c0 c3 7f ba 81 02 b3 42 40 c9 38 46 b2 47 8c 5e c0 39 f1 d5 8e e2 b2 42 40 de 39 94 a1 2a 8c 5e c0 f1 ba 7e c1 6e b2 42 40 75 39 25 20 26 8c 5e c0 ed 7d aa 0a 0d b2 42 40 11 ab 3f c2 30 8c 5e c0 a8 18 e7 6f 42 b1 42 40 1f 69 70 5b 5b 8c 5e c0 54 57 3e cb f3 b0 42 40 93 ff c9 df bd 8c 5e c0 9d b9 87 84 ef af 42 40 9e 5f 94 a0 bf 8c 5e c0 b0 c7 44 4a b3 af 42 40 97 74 94 83 d9 8c 5e c0 01 c1 1c 3d 7e af 42 40 65 8e e5 5d f5 8c 5e c0 74 07 b1 33 85 ae 42 40 d2 ac 6c 1f f2 8c 5e c0 6a a5 10 c8 25 ae 42 40 fe 26 14 22 e0 8c 5e c0 06 2c b9 8a c5 ad 42 40 97 e3 15 88 9e 8c 5e c0 da fe 95 95 26 ad 42 40 6b 0f 7b a1 80 8c 5e c0 f7 94 9c 13 7b ac 42 40 b0 c8 af 1f 62 8c 5e c0 bb 63 b1 4d 2a ac 42 40 a9 f7 54 4e 7b 8b 5e c0 2f e1 d0 5b 3c aa 42 40 39 d6 c5 6d 34 8b 5e c0 b1 fb 8e e1 b1 a9 42 40 ec 34 d2 52 79 8b 5e c0 32 8f fc c1 c0 a9 42 40 97 8c 63 24 7b 8b 5e c0 f6 5c a6 26 c1 a9 42 40 e8 33 a0 de 8c 8b 5e c0 8b a6 b3 93 c1 a9 42 40 72 a8 df 85 ad 8b 5e c0 27 66 bd 18 ca a9 42 40 8a 72 69 fc c2 8b 5e c0 14 59 6b 28 b5 a9 42 40 06 63 44 a2 d0 8b 5e c0 a4 df be 0e 9c a9 42 40 05 32 3b 8b de 8b 5e c0 a9 c0 c9 36 70 a9 42 40 7c d6 35 5a 0e 8c 5e c0 6b d6 19 df 17 a9 42 40 a6 7f 49 2a 53 8c 5e c0 7a 72 4d 81 cc a8 42 40 d4 10 55 f8 33 8c 5e c0 23 bd a8 dd af a8 42 40 df 33 12 a1 11 8c 5e c0 ec 6b 5d 6a 84 a8 42 40 33 6b 29 20 ed 8b 5e c0 2b a3 91 cf 2b a8 42 40 55 85 06 62 d9 8b 5e c0 0c 06 d7 dc d1 a7 42 40 08 5a 81 21 ab 8b 5e c0 20 99 0e 9d 9e a7 42 40 fa d5 1c 20 98 8b 5e c0 6f bb d0 5c a7 a7 42 40 d0 ee 90 62 80 8b 5e c0 ea e8 b8 1a d9 a7 42 40 05 6e dd cd 53 8b 5e c0 8d f0 f6 20 04 a8 42 40 c7 4b 37 89 41 8b 5e c0 26 8f a7 e5 07 a8 42 40 aa 60 54 52 27 8b 5e c0 f6 0a 0b ee 07 a8 42 40 79 58 a8 35 cd 8a 5e c0 85 5e 7f 12 9f a7 42 40 b9 c2 bb 5c c4 8a 5e c0 2b 4e b5 16 66 a7 42 40 15 3c 85 5c a9 8a 5e c0 91 99 0b 5c 1e a7 42 40 1d 38 67 44 69 8a 5e c0 b1 c3 98 f4 f7 a6 42 40 22 8d 0a 9c 6c 8a 5e c0 bc b3 76 db 85 a6 42 40 cf 66 d5 e7 6a 8a 5e c0 f4 c4 73 b6 80 a6 42 40 27 d8 7f 9d 9b 8a 5e c0 a6 29 02 9c de a5 42 40 2d af 5c 6f 9b 8a 5e c0 c8 40 9e 5d be a5 42 40 1c 78 b5 dc 99 8a 5e c0 45 62 82 1a be a5 42 40 73 68 91 ed 7c 8a 5e c0 a5 30 ef 71 a6 a5 42 40 1f d7 86 8a 71 8a 5e c0 eb 8b 84 b6 9c a5 42 40 a3 03 92 b0 6f 8a 5e c0 5a 2f 86 72 a2 a5 42 40 41 45 d5 af 74 8a 5e c0 d9 3d 79 58 a8 a5 42 40 15 74 7b 49 63 8a 5e c0 06 84 d6 c3 97 a5 42 40 a1 47 8c 9e 5b 8a 5e c0 6e 19 70 96 92 a5 42 40 62 a3 ac df 4c 8a 5e c0 db 6b 41 ef 8d a5 42 40 13 7e a9 9f 37 8a 5e c0 13 9a 24 96 94 a5 42 40 1f d8 f1 5f 20 8a 5e c0 78 25 c9 73 7d a5 42 40 d3 a2 3e c9 1d 8a 5e c0 e4 0f 06 9e 7b a5 42 40 09 8a 1f 63 ee 89 5e c0 fb 3e 1c 24 44 a5 42 40 f5 48 83 db da 89 5e c0 82 1b 29 5b 24 a5 42 40 2b 84 d5 58 c2 89 5e c0 b1 c4 03 ca a6 a4 42 40 ad fa 5c 6d c5 89 5e c0 aa 0a 0d c4 b2 9f 42 40 b3 d1 39 3f c5 89 5e c0 e1 46 ca 16 49 9f 42 40 98 6e 12 83 c0 89 5e c0 65 54 19 c6 dd 9e 42 40 4c 36 1e 6c b1 89 5e c0 27 16 f8 8a 6e 9d 42 40 62 f6 b2 ed b4 89 5e c0 9d 64 ab cb 29 9d 42 40 10 58 39 b4 c8 89 5e c0 2c d7 db 66 2a 9c 42 40 1c 06 f3 57 c8 89 5e c0 27 db c0 1d a8 9b 42 40 28 f1 b9 13 ec 89 5e c0 49 0f 43 ab 93 9b 42 40 09 8a 1f 63 ee 89 5e c0 49 0f 43 ab 93 9b 42 40 32 8e 91 ec 11 8a 5e c0 49 0f 43 ab 93 9b 42 40 de 02 09 8a 1f 8a 5e c0 49 0f 43 ab 93 9b 42 40 50 fd 83 48 86 8b 5e c0 b1 a4 dc 7d 8e 9b 42 40 7f 50 17 29 94 8b 5e c0 d6 e2 53 00 8c 9b 42 40 48 c1 53 c8 95 8b 5e c0 89 45 0c 3b 8c 9b 42 40 2a 8e 03 af 96 8b 5e c0 a6 2a 6d 71 8d 9b 42 40 b8 74 cc 79 c6 8d 5e c0 dc 9f 8b 86 8c 9b 42 40 60 e8 11 a3 e7 8d 5e c0 13 49 f4 32 8a 9b 42 40 04 92 b0 6f 27 8e 5e c0 26 1c 7a 8b 87 9b 42 40 07 b3 09 30 2c 8e 5e c0 0e c0 06 44 88 9b 42 40 27 88 ba 0f 40 8e 5e c0 ee 3e c7 47 8b 9b 42 40 da 55 48 f9 49 8e 5e c0 5f 7e a7 c9 8c 9b 42 40 c7 b9 4d b8 57 8e 5e c0 96 5b 5a 0d 89 9b 42 40 c1 02 98 32 70 8e 5e c0 e6 e5 b0 fb 8e 9b 42 40 77 a0 4e 79 74 8e 5e c0 f2 5f 20 08 90 9b 42 40 b8 74 cc 79 c6 8e 5e c0 5f 7e a7 c9 8c 9b 42 40 a9 2e e0 65 86 8f 5e c0 fb 20 cb 82 89 9b 42 40 1a a2 0a 7f 86 8f 5e c0 13 80 7f 4a 95 9a 42 40 20 45 9d b9 87 8f 5e c0 3b e2 90 0d a4 99 42 40 af d1 72 a0 87 8f 5e c0 67 0b 08 ad 87 99 42 40 1a a2 0a 7f 86 8f 5e c0 c6 8a 1a 4c c3 98 42 40 5a b7 41 ed b7 8f 5e c0 43 56 b7 7a 4e 98 42 40 ad 68 73 9c db 8f 5e c0 91 27 49 d7 4c 98 42 40 02 61 a7 58 35 91 5e c0 f1 2f 82 c6 4c 98 42 40 b6 d9 58 89 79 92 5e c0 54 54 fd 4a e7 97 42 40 c9 21 e2 e6 54 94 5e c0 4b 93 52 d0 ed 97 42 40 71 1e 4e 60 3a 94 5e c0 81 cc ce a2 77 96 42 40 d0 5e 7d 3c f4 93 5e c0 eb 53 8e c9 e2 92 42 40 d8 f5 0b 76 c3 92 5e c0 e8 6b 96 cb 46 8f 42 40 8b 1b b7 98 9f 92 5e c0 44 4e 5f cf d7 8e 42 40 45 83 14 3c 85 92 5e c0 27 6a 69 6e 85 8e 42 40 45 0c cf bb c0 92 5e c0 65 69 93 81 bd 8d 42 40 ee 41 08 c8 97 93 5e c0 ba 67 5d a3 e5 8e 42 40 12 bf 62 0d 17 94 5e c0 d2 54 4f e6 1f 8f 42 40 9c a3 8e 8e ab 94 5e c0 82 ff ad 64 c7 8e 42 40 6f 62 48 4e 26 95 5e c0 37 fe 44 65 c3 8e 42 40 85 5b 3e 92 92 95 5e c0 cd 91 95 5f 06 8f 42 40 6b 83 13 d1 af 95 5e c0 9f cb d4 24 78 8f 42 40 38 be f6 cc 92 95 5e c0 7b bd fb e3 bd 90 42 40 9d a0 4d 0e 9f 95 5e c0 ae 2c d1 59 66 91 42 40 0a f8 35 92 04 96 5e c0 72 31 06 d6 71 92 42 40 4a 99 d4 d0 06 97 5e c0 d0 0d 4d d9 e9 93 42 40 45 81 3e 91 27 97 5e c0 61 6d 8c 9d f0 94 42 40 8a 76 15 52 7e 97 5e c0 af 98 11 de 1e 96 42 40 a7 96 ad f5 45 98 5e c0 e8 31 ca 33 2f 97 42 40 6b b6 f2 92 ff 98 5e c0 bd c7 99 26 6c 97 42 40 b8 58 51 83 69 99 5e c0 b7 9a 75 c6 f7 97 42 40 87 df 4d b7 ec 99 5e c0 f2 ec f2 ad 0f 99 42 40 5d a3 e5 40 0f 9a 5e c0 ee ce da 6d 17 9c 42 40 c5 a9 d6 c2 2c 9a 5e c0 c8 94 0f 41 d5 9c 42 40 45 bd e0 d3 9c 9a 5e c0 de 1d 19 ab cd 9d 42 40 05 15 55 bf d2 9a 5e c0 89 7b 2c 7d e8 9e 42 40 88 bc e5 ea c7 9a 5e c0 42 af 3f 89 cf 9f 42 40 26 8e 3c 10 59 9a 5e c0 3e eb 1a 2d 07 a2 42 40 84 f3 54 f4 0c 9a 5e c0 6d 3d 9f 7d 1c a6 42 40 7d 08 aa 46 af 99 5e c0 95 ba 64 1c 23 ab 42 40 81 26 c2 86 a7 99 5e c0 eb 73 b5 15 fb ad 42 40 16 68 77 48 31 9a 5e c0 4b b0 38 9c f9 af 42 40 11 f2 77 31 33 9a 5e c0 32 4b cf 70 fe af 42 40 ff 36 12 f6 ac 9a 5e c0 cb b0 12 5e 32 b1 42 40 e7 08 1b 79 c2 9a 5e c0 62 5c 42 c4 68 b1 42 40 b3 b6 29 1e 17 9b 5e c0 39 27 f6 d0 3e b2 42 40 5f 3a 02 de 0f 9c 5e c0 f0 21 f0 24 61 b6 42 40 0c 9d c0 e0 0f 9c 5e c0 9b 7f 9c 30 61 b6 42 40 43 aa 28 5e 65 9c 5e c0 26 fe 28 ea cc b7 42 40 3c dc 0e 0d 8b 9c 5e c0 ae 61 86 c6 13 bb 42 40 ce 91 60 9e a0 9c 5e c0 9f 56 d1 1f 9a bb 42 40 98 fc 4f fe ee 9c 5e c0 bd 35 b0 55 82 bd 42 40 61 20 0a 0b 90 9d 5e c0 da 7a 37 1b e9 be 42 40 37 8c 82 e0 f1 9d 5e c0 2b 6a 30 0d c3 bf 42 40 76 c1 e0 9a 3b 9e 5e c0 80 2b d9 b1 11 c0 42 40 e9 ba 06 5f 62 9e 5e c0 2e b7 e0 d0 ef bf 42 40 c5 ee 46 68 62 9e 5e c0 f8 14 cb c8 ef bf 42 40 ba 7f 92 ea 6a 9e 5e c0 76 e9 1f 59 e8 bf 42 40 89 37 36 6b f1 9e 5e c0 fb 5b 8c cd 72 bf 42 40 35 61 fb c9 18 9f 5e c0 60 21 73 65 50 bf 42 40 f9 87 2d 3d 9a 9f 5e c0 6f bd a6 07 05 bf 42 40 2a 90 d9 59 f4 9f 5e c0 19 af 79 55 67 bf 42 40 a9 d8 98 d7 11 a0 5e c0 30 08 12 77 bf bf 42 40 66 0c 47 ca 26 a0 5e c0 9c ff da 10 fe bf 42 40 02 13 c6 b9 7d a0 5e c0 b3 c3 f9 dc 01 c1 42 40 09 ec 21 5d ae a0 5e c0 24 71 80 36 93 c1 42 40 fd 4c bd 6e 11 a1 5e c0 5f 0c e5 44 bb c2 42 40 d7 a0 f6 c0 1b a1 5e c0 a9 88 f3 1f 2f c3 42 40 56 0c 57 07 40 a1 5e c0 a6 09 db 4f c6 c4 42 40 9a 3b 61 67 3d a1 5e c0 49 11 1b ee d3 c4 42 40 4b 1e 4f cb 0f a1 5e c0 50 a8 a7 8f c0 c5 42 40 cb d5 8f 4d f2 a0 5e c0 fa 08 fc e1 e7 c5 42 40 40 16 a2 43 e0 a0 5e c0 b2 49 7e c4 af c6 42 40 ce 1c 92 5a 28 a1 5e c0 ce fd d5 e3 be c9 42 40 12 fc 73 7a 1d a1 5e c0 69 07 8c e8 1c cb 42 40 a6 ba 80 97 19 a1 5e c0 5e 67 43 fe 99 cb 42 40 48 77 93 a7 c3 a0 5e c0 27 87 e2 e3 fb cb 42 40 db 06 df b7 7c a0 5e c0 22 cf d8 b2 4c cc 42 40 06 2b 4e b5 16 a0 5e c0 5c 3c bc e7 c0 cc 42 40 07 05 c6 6a d3 9f 5e c0 0d 7b b3 96 2e ce 42 40 b3 bf 20 8e d1 9f 5e c0 2a 82 f5 b4 38 ce 42 40 15 c7 81 57 cb 9f 5e c0 25 5a f2 78 5a ce 42 40 fd e7 99 e4 b2 9f 5e c0 b4 56 2e f5 9b d0 42 40 6d 1c b1 16 9f 9f 5e c0 5c 3d 27 bd 6f d2 42 40 41 29 4d 44 b4 9f 5e c0 24 8f e8 1a 09 d5 42 40 7e 85 7a 1f b5 9f 5e c0 5b de eb 00 24 d5 42 40 09 c7 e0 26 b8 9f 5e c0 fb d0 14 2a 83 d5 42 40 45 4b 1e 4f cb 9f 5e c0 95 f2 5a 09 dd d7 42 40 08 95 f6 a7 08 a0 5e c0 7e fe eb dc b4 d9 42 40 26 6f 24 e9 1e a0 5e c0 b5 2c 82 06 60 da 42 40 98 85 61 c2 27 a0 5e c0 0e b1 b0 14 a4 da 42 40 0f 09 df fb 1b 9f 5e c0 87 fd 9e 58 a7 da 42 40 33 c2 db 83 10 9f 5e c0 87 fd 9e 58 a7 da 42 40 57 ea 59 10 ca 9e 5e c0 87 fd 9e 58 a7 da 42 40 ae 67 08 c7 2c 9e 5e c0 ea 5a 7b 9f aa da 42 40 83 4f 73 f2 22 9e 5e c0 87 fd 9e 58 a7 da 42 40 a0 a9 d7 2d 02 9e 5e c0 87 fd 9e 58 a7 da 42 40 ae d6 89 cb f1 9d 5e c0 87 fd 9e 58 a7 da 42 40 d2 8f 86 53 e6 9d 5e c0 87 fd 9e 58 a7 da 42 40 92 eb a6 94 d7 9d 5e c0 23 a0 c2 11 a4 da 42 40 8b fb 8f 4c 87 9d 5e c0 87 fd 9e 58 a7 da 42 40 04 54 38 82 54 9d 5e c0 87 fd 9e 58 a7 da 42 40 28 7c b6 0e 0e 9d 5e c0 87 fd 9e 58 a7 da 42 40 a9 33 f7 90 f0 9c 5e c0 23 a0 c2 11 a4 da 42 40 85 e9 7b 0d c1 9c 5e c0 87 fd 9e 58 a7 da 42 40 80 bb ec d7 9d 9c 5e c0 87 fd 9e 58 a7 da 42 40 8d b7 95 5e 9b 9c 5e c0 87 fd 9e 58 a7 da 42 40 9e 09 4d 12 4b 9c 5e c0 87 fd 9e 58 a7 da 42 40 10 94 db f6 3d 9c 5e c0 ea 5a 7b 9f aa da 42 40 97 aa b4 c5 35 9c 5e c0 ea 5a 7b 9f aa da 42 40 bb 63 b1 4d 2a 9c 5e c0 ea 5a 7b 9f aa da 42 40 04 8f 6f ef 1a 9c 5e c0 f7 3c 7f da a8 da 42 40 18 d1 76 4c dd 9b 5e c0 23 a0 c2 11 a4 da 42 40 34 2b db 87 bc 9b 5e c0 87 fd 9e 58 a7 da 42 40 d8 9b 18 92 93 9b 5e c0 b6 81 3b 50 a7 da 42 40 68 59 f7 8f 85 9b 5e c0 68 b0 a9 f3 a8 da 42 40 67 80 0b b2 65 9b 5e c0 7d 3c f4 dd ad da 42 40 a1 f6 5b 3b 51 9b 5e c0 49 2f 6a f7 ab da 42 40'
+San Francisco POLYGON ((-122.5024267151224 37.70813234927674, -122.506483 37.723731, -122.50782901995821 37.73533099999975, -122.5093953926665 37.74882999999992, -122.50939990408358 37.74886887939222, -122.51113374917077 37.763811156353654, -122.511983 37.77113, -122.514483 37.780829, -122.50985492674391 37.7846044139513, -122.50531 37.788312, -122.4929995748537 37.78793259283567, -122.492883 37.787929, -122.485783 37.790629, -122.478083 37.810828, -122.47033619921733 37.80867139189769, -122.46379252780997 37.80465287181413, -122.44826217281084 37.80724869124332, -122.44286017365187 37.80815160778156, -122.4259424176653 37.810979324191834, -122.42423175360105 37.81102202801582, -122.42028987677702 37.81112043027818, -122.40745179565363 37.81144091117684, -122.3996578257948 37.80657820153446, -122.39813860391477 37.80563034889725, -122.39439124521003 37.80127166258145, -122.38981779869818 37.7959521241284, -122.38856208578255 37.79449155973274, -122.38559109136668 37.791035890359986, -122.3853227835059 37.79072381193532, -122.38406388221165 37.7833123743444, -122.38135678738803 37.76737509282, -122.37900260524107 37.75351548830655, -122.37679663286187 37.74052842832782, -122.3758876818182 37.73517722727334, -122.37585395583966 37.7349786747845, -122.37335481238509 37.73383, -122.37009367469328 37.732331091792666, -122.37008910062909 37.73233607547181, -122.3696210704221 37.73284601855081, -122.36769653795419 37.73494289605506, -122.36768710100542 37.734941526175874, -122.36547829416895 37.73462089303678, -122.3567842057419 37.729504618706976, -122.36174893455313 37.71500952294956, -122.37041125718375 37.717572339902105, -122.37520618808058 37.71545849426952, -122.39137380314342 37.708331, -122.392682 37.708331, -122.393782 37.708231, -122.395182 37.708331, -122.395782 37.708331, -122.405226 37.708271, -122.405453 37.708245, -122.405582 37.708231, -122.41037 37.708283, -122.413282 37.708331, -122.413997 37.708252, -122.414182 37.708231, -122.414482 37.708231, -122.415182 37.708231, -122.416082 37.708331, -122.420082 37.708231, -122.423782 37.708231, -122.425565 37.708314, -122.425792 37.708324, -122.425882 37.708328, -122.426366 37.708351, -122.426833 37.708373, -122.428082 37.708431, -122.430027 37.708281, -122.430882 37.708231, -122.433382 37.708232, -122.435382 37.708132, -122.439144 37.708278, -122.440082 37.708332, -122.440782 37.708332, -122.441282 37.708332, -122.442082 37.708232, -122.446983 37.708232, -122.447134 37.708232, -122.449283 37.708232, -122.452183 37.708132, -122.453983 37.708232, -122.458283 37.708232, -122.461383 37.708232, -122.466283 37.708132, -122.467183 37.708232, -122.467883 37.708232, -122.468883 37.708232, -122.470883 37.708232, -122.471483 37.708332, -122.481083 37.708232, -122.485383 37.708232, -122.486083 37.708232, -122.5024267151224 37.70813234927674))
+Madera POLYGON ((-119.268979 37.73923, -119.264079 37.732931, -119.261179 37.732731, -119.260478 37.731431, -119.260036 37.727174, -119.257878 37.724631, -119.255178 37.725331, -119.253678 37.728631, -119.252978 37.729431, -119.250978 37.73016, -119.243045 37.728547, -119.23063 37.720954, -119.229992 37.717406, -119.22219 37.715051, -119.216333 37.714123, -119.210928 37.721192, -119.206659 37.724882, -119.201717 37.731748, -119.20054 37.734916, -119.198491 37.736657, -119.192225 37.737936, -119.187908 37.73637, -119.182545 37.737093, -119.176342 37.7348, -119.172824 37.735725, -119.171464 37.737812, -119.169071 37.738359, -119.166821 37.738051, -119.164075 37.736123, -119.155237 37.734641, -119.149593 37.732895, -119.14123 37.733154, -119.127897 37.734682, -119.124312 37.733944, -119.114446 37.728195, -119.109694 37.724735, -119.107399 37.720073, -119.104554 37.717726, -119.101973 37.715622, -119.101589 37.713648, -119.099979 37.71208, -119.096855 37.707647, -119.093126 37.703627, -119.090871 37.702682, -119.073476 37.691225, -119.071306 37.688793, -119.065315 37.683878, -119.062773 37.677425, -119.062441 37.671096, -119.060619 37.669044, -119.059638 37.665047, -119.060311 37.663811, -119.059997 37.661345, -119.060671 37.659106, -119.060989 37.658851, -119.057912 37.653879, -119.054331 37.651184, -119.0538 37.645782, -119.049635 37.641902, -119.043489 37.636425, -119.041813 37.634749, -119.038261 37.632766, -119.036873 37.632535, -119.032998 37.630617, -119.032341 37.62939, -119.032706 37.628653, -119.031166 37.626373, -119.030887 37.620892, -119.031966 37.613836, -119.033265 37.609736, -119.033342 37.609535, -119.033957 37.602883, -119.031864 37.600037, -119.026564 37.594737, -119.026164 37.593437, -119.025164 37.590137, -119.022363 37.585737, -119.129985 37.500118, -119.130207 37.499941, -119.180918 37.458882, -119.286701 37.374905, -119.300911 37.362721, -119.306181 37.356809, -119.31139 37.353542, -119.314491 37.349634, -119.31521 37.348009, -119.315015 37.346037, -119.311539 37.342939, -119.311431 37.34075, -119.312226 37.339706, -119.316839 37.337784, -119.324413 37.33706, -119.325774 37.335417, -119.325808 37.333962, -119.324545 37.33251, -119.316443 37.328127, -119.315575 37.325801, -119.316411 37.323331, -119.318789 37.320996, -119.322859 37.321179, -119.323833 37.320813, -119.331883 37.315182, -119.334804 37.312206, -119.333598 37.304152, -119.330475 37.302047, -119.326605 37.292711, -119.327607 37.288248, -119.329238 37.28445, -119.330498 37.282711, -119.331813 37.274884, -119.331613 37.273877, -119.33104 37.273099, -119.326143 37.270331, -119.324252 37.265114, -119.324223 37.25882, -119.323507 37.256463, -119.321874 37.253855, -119.322108 37.246388, -119.324054 37.244031, -119.328461 37.242245, -119.331179 37.23943, -119.332922 37.228308, -119.333636 37.226294, -119.333607 37.223914, -119.337182 37.220047, -119.337582 37.218628, -119.33452 37.216157, -119.330743 37.215128, -119.329368 37.210025, -119.329997 37.207073, -119.331226 37.205082, -119.337232 37.200023, -119.339404 37.193981, -119.343006 37.189358, -119.348326 37.186862, -119.353388 37.186128, -119.35942 37.181571, -119.360562 37.179168, -119.360355 37.169534, -119.362613 37.167061, -119.364756 37.165848, -119.367272 37.165114, -119.37342 37.164837, -119.374306 37.164448, -119.375363 37.163463, -119.375626 37.16225, -119.375497 37.159939, -119.374673 37.158269, -119.375015 37.157468, -119.379025 37.155344, -119.3838 37.154065, -119.384744 37.153081, -119.386004 37.149901, -119.388749 37.14949, -119.393066 37.149812, -119.399787 37.149835, -119.405097 37.158855, -119.407983 37.160911, -119.418183 37.164062, -119.425805 37.164297, -119.430697 37.163142, -119.432125 37.162571, -119.43306 37.161516, -119.433101 37.152016, -119.433939 37.148291, -119.434875 37.146954, -119.44272 37.144051, -119.450469 37.144946, -119.456665 37.144997, -119.460188 37.145585, -119.461503 37.145493, -119.462189 37.144898, -119.463987 37.138217, -119.468128 37.129406, -119.46887 37.126476, -119.468787 37.124948, -119.46613 37.120836, -119.466186 37.119257, -119.469754 37.110904, -119.471154 37.110194, -119.481067 37.109321, -119.482097 37.109572, -119.484326 37.111241, -119.485843 37.114284, -119.488731 37.117921, -119.49119 37.119728, -119.493023 37.124349, -119.491421 37.131068, -119.489622 37.133311, -119.488824 37.135486, -119.489139 37.137339, -119.490063 37.13858, -119.496404 37.144269, -119.499264 37.144725, -119.503302 37.147329, -119.503783 37.147979, -119.504354 37.149064, -119.506048 37.150345, -119.509329 37.150501, -119.517838 37.144559, -119.523551 37.129287, -119.525006 37.12825, -119.527575 37.129019, -119.532832 37.133047, -119.539919 37.135246, -119.540976 37.136413, -119.541918 37.138587, -119.54612 37.142592, -119.550893 37.145178, -119.552665 37.145384, -119.559582 37.143829, -119.562212 37.141586, -119.563613 37.13884, -119.563213 37.134996, -119.563556 37.128406, -119.566162 37.122324, -119.568904 37.1188, -119.568904 37.117542, -119.568361 37.116672, -119.566847 37.115963, -119.555674 37.115597, -119.551474 37.11713, -119.549273 37.117016, -119.547559 37.116123, -119.53996 37.109212, -119.537704 37.10507, -119.539277 37.099716, -119.541391 37.096215, -119.543105 37.095323, -119.545105 37.095346, -119.549447 37.094111, -119.554675 37.091823, -119.559045 37.088184, -119.561272 37.079147, -119.563357 37.074822, -119.5631 37.073541, -119.561244 37.071939, -119.560578 37.070728, -119.56161 37.065324, -119.562979 37.064095, -119.568416 37.063569, -119.580749 37.066103, -119.584847 37.065622, -119.589431 37.066636, -119.594547 37.070187, -119.596961 37.071266, -119.600053 37.071634, -119.604908 37.071016, -119.606077 37.070375, -119.60967 37.064692, -119.610624 37.062025, -119.61032 37.059644, -119.610639 37.057768, -119.613194 37.053502, -119.613602 37.051975, -119.613373 37.050778, -119.610361 37.045668, -119.610738 37.044652, -119.619217 37.028955, -119.620657 37.027058, -119.622437 37.026074, -119.625974 37.025181, -119.632172 37.020983, -119.634064 37.020891, -119.635371 37.021545, -119.63618 37.023696, -119.635953 37.025345, -119.630356 37.031842, -119.629033 37.034619, -119.639349 37.043287, -119.647489 37.043775, -119.649456 37.043497, -119.651604 37.042961, -119.653999 37.041177, -119.659377 37.038938, -119.657114 37.018903, -119.657359 37.0164, -119.658611 37.01333, -119.662195 37.011759, -119.670918 37.00926, -119.677362 37.009108, -119.684794 37.010383, -119.690779 37.011987, -119.693867 37.010955, -119.698075 37.008748, -119.6993 37.007276, -119.69971 37.004904, -119.700744 37.003328, -119.705878 36.999949, -119.713078 36.991649, -119.731446 36.979238, -119.732779 36.978349, -119.740479 36.969949, -119.742679 36.953749, -119.738196 36.953871, -119.736609 36.953154, -119.733778 36.948549, -119.734273 36.946753, -119.735433 36.945303, -119.737679 36.943949, -119.739879 36.944149, -119.744771 36.940329, -119.747179 36.938449, -119.751979 36.935849, -119.751879 36.932149, -119.750479 36.928449, -119.752779 36.923649, -119.756979 36.920549, -119.761779 36.920049, -119.76608 36.918849, -119.77258 36.918549, -119.77408 36.914749, -119.77528 36.909249, -119.78818 36.897649, -119.78958 36.894249, -119.78878 36.892549, -119.78848 36.890449, -119.78558 36.885649, -119.78548 36.879949, -119.78908 36.875949, -119.79147 36.875949, -119.79178 36.875949, -119.791848 36.875941, -119.793479 36.875746, -119.798448 36.872537, -119.80148 36.868448, -119.805026 36.864348, -119.805181 36.864148, -119.809281 36.862748, -119.812181 36.857948, -119.812681 36.852448, -119.813093 36.852114, -119.818781 36.848148, -119.823281 36.848648, -119.831895 36.851505, -119.835982 36.853248, -119.837682 36.854948, -119.839282 36.858748, -119.840782 36.860948, -119.842982 36.861248, -119.844637 36.860804, -119.847083 36.860148, -119.848383 36.858748, -119.849283 36.856348, -119.852983 36.851748, -119.854783 36.851548, -119.856683 36.852548, -119.856683 36.852047, -119.857638 36.852797, -119.861383 36.855247, -119.865283 36.851147, -119.865883 36.847947, -119.866401 36.84798, -119.868984 36.848147, -119.868884 36.850347, -119.876484 36.854847, -119.884885 36.858547, -119.885908 36.857955, -119.887885 36.854947, -119.887944 36.853645, -119.889285 36.851947, -119.895685 36.852147, -119.896085 36.852647, -119.902185 36.852047, -119.905986 36.850347, -119.909486 36.846347, -119.913886 36.845147, -119.918486 36.845446, -119.923286 36.847946, -119.927287 36.848646, -119.930587 36.846746, -119.932587 36.844346, -119.932087 36.843546, -119.932587 36.842946, -119.934687 36.837946, -119.936987 36.836246, -119.943487 36.834146, -119.952377 36.834468, -119.957977 36.836223, -119.96109 36.8353, -119.967285 36.832393, -119.968765 36.832301, -119.970529 36.832885, -119.976884 36.835032, -119.97963 36.839739, -119.984513 36.841028, -119.990221 36.837969, -119.991518 36.835558, -119.991396 36.832307, -119.990647 36.831118, -119.991289 36.829897, -119.992495 36.829065, -119.999102 36.827402, -120.006603 36.827408, -120.008424 36.828004, -120.01329 36.828235, -120.020294 36.824738, -120.02112 36.823228, -120.021976 36.819065, -120.024649 36.816568, -120.025364 36.815566, -120.027356 36.81472, -120.040843 36.815227, -120.045878 36.817516, -120.050772 36.821864, -120.057794 36.822933, -120.062695 36.820539, -120.070179 36.818457, -120.073424 36.820127, -120.076099 36.824542, -120.079827 36.82532, -120.090669 36.821749, -120.095107 36.81779, -120.09635 36.817227, -120.09798 36.815364, -120.10808 36.812776, -120.111011 36.81305, -120.11449 36.814091, -120.129112 36.811805, -120.136341 36.809544, -120.144027 36.804652, -120.147299 36.803807, -120.152052 36.801223, -120.156178 36.798662, -120.156947 36.797609, -120.158938 36.79777, -120.162863 36.799075, -120.163659 36.800311, -120.168618 36.803805, -120.170743 36.803813, -120.172707 36.80283, -120.173703 36.800656, -120.173192 36.796629, -120.173591 36.795394, -120.174473 36.794296, -120.177773 36.792877, -120.181443 36.789766, -120.181955 36.786998, -120.180988 36.786403, -120.181358 36.784504, -120.182724 36.782284, -120.188953 36.776862, -120.192281 36.777205, -120.199477 36.780751, -120.200785 36.782101, -120.202379 36.786311, -120.201783 36.789606, -120.20221 36.790018, -120.205339 36.790955, -120.210942 36.788805, -120.213418 36.790131, -120.216291 36.790611, -120.220074 36.788574, -120.220271 36.785371, -120.224253 36.782853, -120.22667 36.782234, -120.230965 36.782508, -120.232728 36.781317, -120.23341 36.779875, -120.233324 36.778686, -120.231673 36.775437, -120.229963 36.770084, -120.230077 36.76942, -120.231669 36.768825, -120.234058 36.768572, -120.236846 36.770493, -120.238866 36.771064, -120.243672 36.770536, -120.245066 36.770787, -120.24777 36.773028, -120.248084 36.774835, -120.250246 36.775086, -120.252037 36.773984, -120.254031 36.770896, -120.255994 36.770096, -120.258041 36.770349, -120.261196 36.772478, -120.263273 36.772708, -120.270469 36.771955, -120.276187 36.770631, -120.279971 36.766491, -120.280938 36.76624, -120.282161 36.766675, -120.2829 36.768025, -120.282672 36.769855, -120.281732 36.771388, -120.281874 36.772281, -120.284035 36.773585, -120.2854 36.773517, -120.288195 36.772331, -120.289427 36.770478, -120.289321 36.767455, -120.287951 36.764823, -120.288288 36.763673, -120.289812 36.763058, -120.291823 36.763449, -120.293623 36.76537, -120.294166 36.766816, -120.293627 36.769107, -120.292119 36.770816, -120.29215 36.771388, -120.293171 36.772233, -120.297695 36.772289, -120.302688 36.769746, -120.304078 36.769638, -120.306058 36.770265, -120.30639 36.771648, -120.305308 36.773285, -120.300895 36.773728, -120.299805 36.774572, -120.299667 36.77613, -120.300357 36.777633, -120.312583 36.782024, -120.313746 36.781934, -120.314538 36.77917, -120.315892 36.778285, -120.317662 36.777695, -120.321722 36.777697, -120.323828 36.779019, -120.324349 36.779893, -120.324552 36.7816, -120.322264 36.784591, -120.322824 36.785574, -120.324216 36.786304, -120.329252 36.785184, -120.33285 36.782945, -120.334614 36.782884, -120.335563 36.78417, -120.335312 36.786237, -120.334264 36.788946, -120.334387 36.79147, -120.33513 36.792069, -120.33694 36.792396, -120.341832 36.791005, -120.344071 36.791385, -120.346071 36.794734, -120.346359 36.797285, -120.347664 36.798162, -120.34983 36.798095, -120.350671 36.797495, -120.35095 36.795436, -120.348206 36.789909, -120.344506 36.787414, -120.344335 36.786352, -120.345124 36.785049, -120.346523 36.784559, -120.349422 36.785135, -120.353095 36.786859, -120.355883 36.787348, -120.369052 36.784805, -120.37296 36.789021, -120.370773 36.793141, -120.369436 36.794423, -120.370377 36.796528, -120.373907 36.800141, -120.373993 36.80108, -120.372857 36.803574, -120.368221 36.806986, -120.369163 36.809937, -120.371725 36.810417, -120.374939 36.808516, -120.376924 36.808672, -120.37809 36.809931, -120.378142 36.815193, -120.379193 36.817208, -120.388666 36.824374, -120.390772 36.824513, -120.397975 36.822365, -120.40028 36.822457, -120.40139 36.823121, -120.404149 36.826005, -120.404119 36.828637, -120.404886 36.831383, -120.409893 36.84008, -120.411858 36.840058, -120.413425 36.8369, -120.414991 36.836008, -120.416613 36.83578, -120.418463 36.836581, -120.419601 36.838618, -120.420882 36.839693, -120.422247 36.840052, -120.425266 36.839992, -120.426376 36.840495, -120.428283 36.842097, -120.428739 36.843608, -120.433408 36.849215, -120.439444 36.852144, -120.438219 36.854684, -120.441209 36.85624, -120.447217 36.856675, -120.448744 36.858328, -120.448214 36.859375, -120.448499 36.860153, -120.453739 36.860359, -120.456445 36.862967, -120.450693 36.867773, -120.449895 36.869238, -120.450038 36.870725, -120.452145 36.871984, -120.457671 36.87331, -120.459124 36.874645, -120.458905 36.876554, -120.457706 36.878219, -120.454089 36.879318, -120.453605 36.88076, -120.456227 36.883734, -120.464404 36.886822, -120.466541 36.889659, -120.466571 36.892817, -120.462613 36.900346, -120.463697 36.905311, -120.463213 36.905746, -120.459851 36.906181, -120.455321 36.905678, -120.450563 36.910988, -120.450164 36.912384, -120.452045 36.914306, -120.459398 36.915426, -120.46222 36.917348, -120.462392 36.918469, -120.461936 36.919384, -120.457633 36.921628, -120.456864 36.923161, -120.457434 36.924191, -120.462034 36.927631, -120.465816 36.933457, -120.467185 36.936843, -120.469211 36.938033, -120.473743 36.938627, -120.475625 36.940251, -120.478793 36.948259, -120.47888 36.950273, -120.477769 36.953568, -120.478055 36.954781, -120.481594 36.960088, -120.484048 36.962696, -120.487984 36.965211, -120.488869 36.966401, -120.488727 36.967476, -120.488128 36.968049, -120.48505 36.969766, -120.484024 36.971162, -120.484081 36.972467, -120.493866 36.975827, -120.494751 36.976513, -120.499404 36.981888, -120.500089 36.98333, -120.500007 36.988021, -120.501012 36.9912, -120.501548 36.993189, -120.500704 36.994405, -120.500576 36.99603, -120.501486 36.996919, -120.508881 36.999946, -120.509679 37.001053, -120.509764 37.00206, -120.508536 37.004142, -120.508535 37.005766, -120.511613 37.009612, -120.524081 37.013553, -120.530787 37.012686, -120.531386 37.013761, -120.5299 37.017788, -120.530898 37.019642, -120.532182 37.020329, -120.53632 37.018911, -120.537633 37.019301, -120.537518 37.023191, -120.544995 37.026419, -120.545365 37.027929, -120.543395 37.030446, -120.540141 37.032184, -120.538371 37.034151, -120.537684 37.039895, -120.542222 37.044084, -120.492387 37.083955, -120.476692 37.09639, -120.472467 37.094142, -120.469779 37.094336, -120.465508 37.097777, -120.461366 37.099066, -120.456084 37.100343, -120.450777 37.100666, -120.441858 37.103747, -120.427787 37.106252, -120.421358 37.109664, -120.414816 37.110902, -120.409872 37.111259, -120.40648 37.11229, -120.40409 37.115715, -120.400768 37.11776, -120.388831 37.120982, -120.385717 37.122865, -120.385934 37.12377, -120.38489 37.124589, -120.379033 37.124352, -120.372851 37.121496, -120.365647 37.119965, -120.364131 37.120033, -120.354115 37.123354, -120.341205 37.124597, -120.335772 37.127214, -120.333563 37.128328, -120.328729 37.131967, -120.328609 37.133885, -120.327405 37.134873, -120.324546 37.136381, -120.314754 37.138338, -120.310892 37.141138, -120.303863 37.141081, -120.301821 37.143823, -120.301124 37.144444, -120.298261 37.147522, -120.292999 37.151079, -120.291957 37.151729, -120.290288 37.152454, -120.281416 37.153013, -120.276039 37.152171, -120.270379 37.15384, -120.265147 37.154455, -120.264947 37.154135, -120.262259 37.155232, -120.249395 37.157771, -120.242222 37.157961, -120.235929 37.158703, -120.232277 37.159922, -120.23088 37.160849, -120.230409 37.161782, -120.227286 37.1634, -120.222965 37.163969, -120.219501 37.162154, -120.213657 37.164318, -120.208707 37.164448, -120.202135 37.162684, -120.198136 37.163254, -120.194797 37.163151, -120.187594 37.160252, -120.185487 37.161747, -120.180762 37.161512, -120.176489 37.162265, -120.175636 37.16506, -120.174129 37.165929, -120.170226 37.165982, -120.168544 37.164187, -120.166483 37.163067, -120.164576 37.162698, -120.157053 37.165525, -120.152954 37.166021, -120.146549 37.163852, -120.142563 37.164065, -120.13054 37.167156, -120.126003 37.166114, -120.120099 37.165532, -120.115089 37.165656, -120.114538 37.16666, -120.109862 37.167221, -120.10884 37.166886, -120.107725 37.16723, -120.106489 37.16744, -120.100085 37.170507, -120.096281 37.172129, -120.085251 37.17337, -120.080826 37.174433, -120.076733 37.176469, -120.071297 37.177562, -120.064685 37.177814, -120.052055 37.183108, -120.048719 37.185836, -120.04644 37.187708, -120.013783 37.214229, -119.988494 37.234902, -119.982337 37.239871, -119.97869 37.242814, -119.974372 37.246298, -119.946474 37.268738, -119.942402 37.272024, -119.941958 37.272382, -119.932517 37.280002, -119.876374 37.325316, -119.862456 37.336413, -119.856011 37.341622, -119.782208 37.400853, -119.761802 37.417115, -119.747102 37.417748, -119.729856 37.41776, -119.708014 37.417777, -119.706931 37.417778, -119.694124 37.417789, -119.690787 37.417792, -119.651191 37.417828, -119.651177 37.446225, -119.651003 37.461353, -119.64573 37.461372, -119.636383 37.46136, -119.615489 37.461519, -119.615148 37.494681, -119.585297 37.494694, -119.58422 37.494696, -119.583933 37.530376, -119.583639 37.555795, -119.583588 37.560178, -119.345335 37.749928, -119.308995 37.777986, -119.308283 37.775728, -119.293382 37.767829, -119.294082 37.759729, -119.292782 37.758029, -119.289882 37.756329, -119.289082 37.755029, -119.289582 37.749929, -119.290682 37.749329, -119.288381 37.74503, -119.27688 37.74163, -119.27178 37.73923, -119.268979 37.73923))
+San Mateo POLYGON ((-122.426833 37.708373, -122.426366 37.708351, -122.425882 37.708328, -122.425792 37.708324, -122.425565 37.708314, -122.423782 37.708231, -122.420082 37.708231, -122.416082 37.708331, -122.415182 37.708231, -122.414482 37.708231, -122.414182 37.708231, -122.413997 37.708252, -122.413282 37.708331, -122.41037 37.708283, -122.405582 37.708231, -122.405453 37.708245, -122.405226 37.708271, -122.395782 37.708331, -122.395182 37.708331, -122.393782 37.708231, -122.392682 37.708331, -122.39137380314342 37.708331, -122.39318952871757 37.70753053724935, -122.38762624308495 37.67905960121041, -122.380246457188 37.66973300156554, -122.3742907217103 37.662206121901704, -122.37559972636403 37.652388561631916, -122.37789049084977 37.65042504196791, -122.38738080629783 37.64846153498731, -122.38607178896069 37.637662214885516, -122.38173395968786 37.635252306917074, -122.36545492127252 37.626208386115145, -122.35531010349757 37.615736317176776, -122.35858262781531 37.61115478820531, -122.3703636950657 37.614427309352195, -122.3733089618783 37.61377280702533, -122.3778785074741 37.60663290448446, -122.37854500586005 37.6055915025727, -122.37565802461997 37.60352937425725, -122.36021887729076 37.59250141798517, -122.35906780870063 37.59245714641164, -122.33389686552938 37.59148903967077, -122.31767608652702 37.590865167717, -122.315385328383 37.58726539857754, -122.31571257954643 37.58366562943808, -122.30589501293493 37.57548431864374, -122.30366927743523 37.5753494253059, -122.2970124077065 37.57494597782647, -122.2882781810038 37.5744166296656, -122.2630243886212 37.5728860936482, -122.26269773886945 37.57286629665286, -122.2621145554226 37.57251285180754, -122.256963631908 37.56939107704738, -122.25189841876764 37.56632124801739, -122.25092954886826 37.5652681305491, -122.24955669534106 37.563775901270624, -122.24437161030016 37.55813994990646, -122.24285240228596 37.55714916243285, -122.24004767486696 37.55531999304481, -122.23448271266635 37.55169067119559, -122.22518463469684 37.54562670951642, -122.22327315046184 37.5443800898183, -122.21426441448051 37.53850482936688, -122.19659281360492 37.53719581202973, -122.19495655144605 37.522469471625044, -122.16844912476586 37.504143349397474, -122.16312267476584 37.50291416771209, -122.15568629768346 37.501198079414024, -122.14963214798911 37.50267071599117, -122.14014183254103 37.50790675997292, -122.13097876191469 37.50365248216489, -122.12770625028037 37.50005270034201, -122.12572334838893 37.50096483616503, -122.11611629693846 37.50538408444931, -122.109574 37.497637, -122.081473 37.477838, -122.096762 37.461905, -122.096574 37.466138, -122.111548 37.466292, -122.113265 37.46753, -122.115374 37.466138, -122.115913 37.465774, -122.12316 37.460889, -122.121588 37.454749, -122.123831 37.45275, -122.126874 37.453738, -122.127697 37.452469, -122.136774 37.454438, -122.140235 37.456232, -122.14077 37.456257, -122.14114 37.456673, -122.141056 37.457151, -122.144975 37.458238, -122.146384 37.456702, -122.152475 37.457738, -122.153517 37.4566, -122.15435 37.455802, -122.155616 37.455634, -122.155448 37.454284, -122.156791 37.454177, -122.157475 37.453538, -122.158475 37.453638, -122.159278 37.454627, -122.160255 37.454574, -122.160777 37.453625, -122.162375 37.453938, -122.162988 37.453126, -122.168975 37.447538, -122.169239 37.447847, -122.16998 37.447608, -122.170775 37.447438, -122.175275 37.443238, -122.176229 37.441729, -122.176629 37.441734, -122.176953 37.441424, -122.179975 37.439638, -122.180475 37.439138, -122.181976 37.439538, -122.182518 37.439025, -122.183676 37.435038, -122.184776 37.433839, -122.187976 37.433039, -122.190361 37.431297, -122.190076 37.428939, -122.189835 37.428189, -122.190976 37.424139, -122.190076 37.423339, -122.188276 37.421939, -122.187176 37.416239, -122.187576 37.414739, -122.191876 37.413839, -122.193076 37.412539, -122.194481 37.407776, -122.192635 37.403816, -122.191643 37.398514, -122.191876 37.397539, -122.190102 37.394005, -122.189827 37.391023, -122.190476 37.38484, -122.193076 37.38244, -122.199089 37.374497, -122.199196 37.372659, -122.200776 37.37104, -122.202476 37.36344, -122.202278 37.360528, -122.20118 37.357591, -122.197176 37.35274, -122.195351 37.347506, -122.193489 37.345041, -122.179401 37.329967, -122.175075 37.325741, -122.17928 37.326195, -122.179391 37.326207, -122.180473 37.32622, -122.182466 37.32648, -122.183776 37.325841, -122.184609 37.325075, -122.185458 37.323737, -122.188376 37.321041, -122.192576 37.318741, -122.190672 37.317867, -122.188576 37.316541, -122.186348 37.313837, -122.185143 37.311092, -122.18232 37.309528, -122.18116 37.309795, -122.179711 37.311313, -122.17699 37.312626, -122.175875 37.312741, -122.174275 37.312742, -122.168775 37.309542, -122.168235 37.307803, -122.166587 37.305614, -122.162675 37.304442, -122.162879 37.30096, -122.162775 37.300803, -122.165748 37.295856, -122.165737 37.294872, -122.165641 37.294864, -122.163875 37.294142, -122.16318 37.293845, -122.163067 37.29402, -122.163372 37.2942, -122.16231 37.293694, -122.161842 37.293536, -122.160942 37.293394, -122.159645 37.293597, -122.158226 37.292891, -122.158068 37.292835, -122.155175 37.291142, -122.153983 37.290172, -122.152487 37.28634, -122.152675 37.247643, -122.152664 37.244418, -122.152375 37.241143, -122.151454 37.229936, -122.151668 37.227838, -122.152875 37.220044, -122.152853 37.216068, -122.155034 37.215444, -122.155175 37.215444, -122.157344 37.215444, -122.158175 37.215444, -122.180071 37.215286, -122.180918 37.21521, -122.181017 37.215217, -122.181072 37.215254, -122.215239 37.215226, -122.217263 37.215155, -122.221157 37.215074, -122.221447 37.215096, -122.22266 37.215188, -122.223265 37.215234, -122.224104 37.21512, -122.225598 37.215301, -122.225859 37.215333, -122.230864 37.215234, -122.242578 37.215134, -122.242584 37.207681, -122.242659 37.200319, -122.242653 37.199453, -122.242584 37.19346, -122.245601 37.189895, -122.247779 37.189845, -122.268881 37.189843, -122.288668 37.186746, -122.317682 37.186945, -122.316063 37.175526, -122.311782 37.147546, -122.29318 37.119348, -122.290991 37.115961, -122.289382 37.113447, -122.2930135271027 37.1073457689079, -122.306139 37.116383, -122.313907 37.118161, -122.322971 37.11546, -122.330463 37.115338, -122.337071 37.117382, -122.338856 37.120854, -122.337085 37.130795, -122.337833 37.135936, -122.344029 37.144099, -122.359791 37.155574, -122.36179 37.163593, -122.367085 37.172817, -122.37927 37.181128, -122.390599 37.182988, -122.397065 37.187249, -122.405073 37.195791, -122.407181 37.219465, -122.408982 37.225258, -122.415822 37.232839, -122.419113 37.24147, -122.418452 37.248521, -122.411686 37.265844, -122.4070406751253 37.29774446750057, -122.401323 37.337009, -122.40085 37.359225, -122.409258 37.374805, -122.40937458718714 37.374952412812846, -122.41680671480934 37.384349593282955, -122.41811969413867 37.38600972404744, -122.423286 37.392542, -122.43846845834331 37.42483960843299, -122.43846911249074 37.4248409999993, -122.443687 37.435941, -122.445987 37.461541, -122.4473033850729 37.465641, -122.452087 37.48054, -122.46191669453539 37.49148884018878, -122.467888 37.49814, -122.472388 37.50054, -122.47475410134543 37.49950610134543, -122.47475630690595 37.49950513759717, -122.47527565294777 37.49927820261193, -122.48348503393767 37.4956910071705, -122.485888 37.494641, -122.493789 37.492341, -122.499289 37.495341, -122.501089 37.49803055172413, -122.50236756265949 37.499940974171494, -122.5076736864394 37.50786935992746, -122.51064232170769 37.51230508109646, -122.516689 37.52134, -122.51731895527642 37.52487563506413, -122.519533 37.537302, -122.51937279220002 37.537717593399584, -122.516589 37.544939, -122.514789 37.546139, -122.513688 37.552239, -122.518088 37.576138, -122.51742421461702 37.586819713948735, -122.517187 37.590637, -122.51194180870982 37.593624578103395, -122.50761219768485 37.59609065615065, -122.501386 37.599637, -122.4972788747583 37.61079677356438, -122.4971652335942 37.6111055563709, -122.496786 37.612136, -122.49529376058486 37.629759452463446, -122.494085 37.644035, -122.4953776124503 37.66434036593844, -122.4954298683642 37.665161242647365, -122.49561473799635 37.668065319217895, -122.496784 37.686433, -122.50052832680024 37.70083199999952, -122.5018866401484 37.706055463373026, -122.5024267151224 37.70813234927674, -122.486083 37.708232, -122.485383 37.708232, -122.481083 37.708232, -122.471483 37.708332, -122.470883 37.708232, -122.468883 37.708232, -122.467883 37.708232, -122.467183 37.708232, -122.466283 37.708132, -122.461383 37.708232, -122.458283 37.708232, -122.453983 37.708232, -122.452183 37.708132, -122.449283 37.708232, -122.447134 37.708232, -122.446983 37.708232, -122.442082 37.708232, -122.441282 37.708332, -122.440782 37.708332, -122.440082 37.708332, -122.439144 37.708278, -122.435382 37.708132, -122.433382 37.708232, -122.430882 37.708231, -122.430027 37.708281, -122.428082 37.708431, -122.426833 37.708373))
diff --git a/plugin/trino-iceberg/pom.xml b/plugin/trino-iceberg/pom.xml
index 128e9775b976..f3aa390bd3fc 100644
--- a/plugin/trino-iceberg/pom.xml
+++ b/plugin/trino-iceberg/pom.xml
@@ -466,6 +466,7 @@
kms
runtime
+
software.amazon.awssdk
retries-spi
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ExpressionConverter.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ExpressionConverter.java
index a634b711f678..5954aeb5b799 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ExpressionConverter.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ExpressionConverter.java
@@ -14,16 +14,26 @@
package io.trino.plugin.iceberg;
import com.google.common.base.VerifyException;
+import com.google.common.math.LongMath;
+import io.airlift.slice.Slice;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.Range;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.type.ArrayType;
+import io.trino.spi.type.DecimalType;
+import io.trino.spi.type.Int128;
+import io.trino.spi.type.LongTimestamp;
+import io.trino.spi.type.LongTimestampWithTimeZone;
import io.trino.spi.type.MapType;
import io.trino.spi.type.RowType;
import io.trino.spi.type.Type;
+import io.trino.spi.type.VarbinaryType;
+import io.trino.spi.type.VarcharType;
import org.apache.iceberg.expressions.Expression;
import org.apache.iceberg.expressions.Expressions;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.List;
@@ -33,11 +43,33 @@
import java.util.function.BiFunction;
import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Verify.verify;
import static io.trino.plugin.hive.util.HiveUtil.isStructuralType;
import static io.trino.plugin.iceberg.IcebergMetadataColumn.isMetadataColumnId;
-import static io.trino.plugin.iceberg.IcebergTypes.convertTrinoValueToIceberg;
+import static io.trino.plugin.iceberg.util.Timestamps.compareTimestampNanosToRange;
+import static io.trino.plugin.iceberg.util.Timestamps.compareTimestampTzNanosToRange;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampToNanos;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToNanos;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.BooleanType.BOOLEAN;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.RealType.REAL;
+import static io.trino.spi.type.TimeType.TIME_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
+import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
import static io.trino.spi.type.UuidType.UUID;
+import static io.trino.spi.type.UuidType.trinoUuidToJavaUuid;
+import static java.lang.Float.intBitsToFloat;
+import static java.lang.Math.toIntExact;
import static java.lang.String.format;
+import static java.math.RoundingMode.UNNECESSARY;
+import static java.util.Objects.requireNonNull;
import static org.apache.iceberg.expressions.Expressions.alwaysFalse;
import static org.apache.iceberg.expressions.Expressions.alwaysTrue;
import static org.apache.iceberg.expressions.Expressions.equal;
@@ -112,7 +144,10 @@ private static Expression toIcebergExpression(String columnName, Type type, Doma
List rangeExpressions = new ArrayList<>();
for (Range range : orderedRanges) {
if (range.isSingleValue()) {
- icebergValues.add(convertTrinoValueToIceberg(type, range.getLowBoundedValue()));
+ // skip out-of-range values (they are implicitly false)
+ if (range(type, range.getSingleValue()) == ValueInRange.IN_RANGE) {
+ icebergValues.add(convertTrinoValueToIceberg(type, range.getSingleValue()));
+ }
}
else {
rangeExpressions.add(toIcebergExpression(columnName, range));
@@ -132,33 +167,43 @@ private static Expression toIcebergExpression(String columnName, Range range)
Type type = range.getType();
if (range.isSingleValue()) {
- Object icebergValue = convertTrinoValueToIceberg(type, range.getSingleValue());
- return equal(columnName, icebergValue);
+ return switch (range(type, range.getSingleValue())) {
+ case BELOW_RANGE, ABOVE_RANGE -> alwaysFalse();
+ case IN_RANGE -> equal(columnName, convertTrinoValueToIceberg(type, range.getSingleValue()));
+ };
}
List conjuncts = new ArrayList<>(2);
if (!range.isLowUnbounded()) {
- Object icebergLow = convertTrinoValueToIceberg(type, range.getLowBoundedValue());
- Expression lowBound;
- if (range.isLowInclusive()) {
- lowBound = greaterThanOrEqual(columnName, icebergLow);
- }
- else {
- lowBound = greaterThan(columnName, icebergLow);
- }
- conjuncts.add(lowBound);
+ conjuncts.add(switch (range(type, range.getLowBoundedValue())) {
+ case ABOVE_RANGE -> alwaysFalse();
+ case BELOW_RANGE -> alwaysTrue();
+ case IN_RANGE -> {
+ Object icebergLow = convertTrinoValueToIceberg(type, range.getLowBoundedValue());
+ if (range.isLowInclusive()) {
+ yield greaterThanOrEqual(columnName, icebergLow);
+ }
+ else {
+ yield greaterThan(columnName, icebergLow);
+ }
+ }
+ });
}
if (!range.isHighUnbounded()) {
- Object icebergHigh = convertTrinoValueToIceberg(type, range.getHighBoundedValue());
- Expression highBound;
- if (range.isHighInclusive()) {
- highBound = lessThanOrEqual(columnName, icebergHigh);
- }
- else {
- highBound = lessThan(columnName, icebergHigh);
- }
- conjuncts.add(highBound);
+ conjuncts.add(switch (range(type, range.getHighBoundedValue())) {
+ case ABOVE_RANGE -> alwaysTrue();
+ case BELOW_RANGE -> alwaysFalse();
+ case IN_RANGE -> {
+ Object icebergHigh = convertTrinoValueToIceberg(type, range.getHighBoundedValue());
+ if (range.isHighInclusive()) {
+ yield lessThanOrEqual(columnName, icebergHigh);
+ }
+ else {
+ yield lessThan(columnName, icebergHigh);
+ }
+ }
+ });
}
return and(conjuncts);
@@ -235,4 +280,111 @@ private static Expression combine(List expressions, BiFunction ValueInRange.BELOW_RANGE;
+ case 1 -> ValueInRange.ABOVE_RANGE;
+ default -> ValueInRange.IN_RANGE;
+ };
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ return switch (compareTimestampTzNanosToRange((LongTimestampWithTimeZone) value)) {
+ case -1 -> ValueInRange.BELOW_RANGE;
+ case 1 -> ValueInRange.ABOVE_RANGE;
+ default -> ValueInRange.IN_RANGE;
+ };
+ }
+ // all other types are in range
+ return ValueInRange.IN_RANGE;
+ }
+
+ /**
+ * Convert value from Trino representation to Iceberg representation for use in expressions.
+ * For nano timestamps, the value must be verified to be in range before calling this method.
+ */
+ private static Object convertTrinoValueToIceberg(Type type, Object trinoNativeValue)
+ {
+ requireNonNull(trinoNativeValue, "trinoNativeValue is null");
+ // this method should not be used for values outside supported range
+ verify(range(type, trinoNativeValue) == ValueInRange.IN_RANGE);
+
+ if (type == BOOLEAN) {
+ //noinspection RedundantCast
+ return (boolean) trinoNativeValue;
+ }
+
+ if (type == INTEGER) {
+ return toIntExact((long) trinoNativeValue);
+ }
+
+ if (type == BIGINT) {
+ //noinspection RedundantCast
+ return (long) trinoNativeValue;
+ }
+
+ if (type == REAL) {
+ return intBitsToFloat(toIntExact((long) trinoNativeValue));
+ }
+
+ if (type == DOUBLE) {
+ //noinspection RedundantCast
+ return (double) trinoNativeValue;
+ }
+
+ if (type instanceof DecimalType decimalType) {
+ if (decimalType.isShort()) {
+ return BigDecimal.valueOf((long) trinoNativeValue).movePointLeft(decimalType.getScale());
+ }
+ return new BigDecimal(((Int128) trinoNativeValue).toBigInteger(), decimalType.getScale());
+ }
+
+ if (type == DATE) {
+ return toIntExact((long) trinoNativeValue);
+ }
+
+ if (type.equals(TIME_MICROS)) {
+ return LongMath.divide((long) trinoNativeValue, PICOSECONDS_PER_MICROSECOND, UNNECESSARY);
+ }
+
+ if (type.equals(TIMESTAMP_MICROS)) {
+ //noinspection RedundantCast
+ return (long) trinoNativeValue;
+ }
+
+ if (type.equals(TIMESTAMP_TZ_MICROS)) {
+ return timestampTzToMicros((LongTimestampWithTimeZone) trinoNativeValue);
+ }
+
+ // The value has been verified to be in range
+ if (type.equals(TIMESTAMP_NANOS)) {
+ return Expressions.nanos(timestampToNanos((LongTimestamp) trinoNativeValue));
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ return Expressions.nanos(timestampTzToNanos((LongTimestampWithTimeZone) trinoNativeValue));
+ }
+
+ if (type instanceof VarcharType) {
+ return ((Slice) trinoNativeValue).toStringUtf8();
+ }
+
+ if (type instanceof VarbinaryType) {
+ return ByteBuffer.wrap(((Slice) trinoNativeValue).getBytes());
+ }
+
+ if (type == UUID) {
+ return trinoUuidToJavaUuid(((Slice) trinoNativeValue));
+ }
+
+ throw new UnsupportedOperationException("Unsupported type: " + type);
+ }
}
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergAvroDataConversion.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergAvroDataConversion.java
index 0848639259ef..6369f518df16 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergAvroDataConversion.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergAvroDataConversion.java
@@ -15,6 +15,7 @@
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slices;
+import io.trino.plugin.iceberg.util.Timestamps;
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.ArrayBlockBuilder;
@@ -28,6 +29,8 @@
import io.trino.spi.type.DecimalType;
import io.trino.spi.type.Decimals;
import io.trino.spi.type.Int128;
+import io.trino.spi.type.LongTimestamp;
+import io.trino.spi.type.LongTimestampWithTimeZone;
import io.trino.spi.type.MapType;
import io.trino.spi.type.RowType;
import io.trino.spi.type.Type;
@@ -59,9 +62,12 @@
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
-import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTz;
+import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTzMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampToNanos;
import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromNanos;
import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToNanos;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
@@ -71,7 +77,9 @@
import static io.trino.spi.type.RealType.REAL;
import static io.trino.spi.type.TimeType.TIME_MICROS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
import static io.trino.spi.type.UuidType.UUID;
@@ -83,9 +91,13 @@
import static org.apache.iceberg.types.Type.TypeID.FIXED;
import static org.apache.iceberg.util.DateTimeUtil.microsFromTimestamp;
import static org.apache.iceberg.util.DateTimeUtil.microsFromTimestamptz;
+import static org.apache.iceberg.util.DateTimeUtil.nanosFromTimestamp;
+import static org.apache.iceberg.util.DateTimeUtil.nanosFromTimestamptz;
import static org.apache.iceberg.util.DateTimeUtil.timeFromMicros;
import static org.apache.iceberg.util.DateTimeUtil.timestampFromMicros;
+import static org.apache.iceberg.util.DateTimeUtil.timestampFromNanos;
import static org.apache.iceberg.util.DateTimeUtil.timestamptzFromMicros;
+import static org.apache.iceberg.util.DateTimeUtil.timestamptzFromNanos;
public final class IcebergAvroDataConversion
{
@@ -187,9 +199,19 @@ public static Object toIcebergAvroObject(Type type, org.apache.iceberg.types.Typ
return timestampFromMicros(epochMicros);
}
if (type.equals(TIMESTAMP_TZ_MICROS)) {
- long epochUtcMicros = timestampTzToMicros(getTimestampTz(block, position));
+ long epochUtcMicros = timestampTzToMicros(getTimestampTzMicros(block, position));
return timestamptzFromMicros(epochUtcMicros);
}
+ if (type.equals(TIMESTAMP_NANOS)) {
+ LongTimestamp timestamp = (LongTimestamp) TIMESTAMP_NANOS.getObject(block, position);
+ long epochNanos = timestampToNanos(timestamp);
+ return timestampFromNanos(epochNanos);
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ LongTimestampWithTimeZone timestamp = (LongTimestampWithTimeZone) TIMESTAMP_TZ_NANOS.getObject(block, position);
+ long epochUtcNanos = timestampTzToNanos(timestamp);
+ return timestamptzFromNanos(epochUtcNanos);
+ }
if (type.equals(UUID)) {
return trinoUuidToJavaUuid(UUID.getSlice(block, position));
}
@@ -312,6 +334,16 @@ public static void serializeToTrinoBlock(Type type, org.apache.iceberg.types.Typ
type.writeObject(builder, timestampTzFromMicros(epochUtcMicros));
return;
}
+ if (type.equals(TIMESTAMP_NANOS)) {
+ long epochNanos = nanosFromTimestamp((LocalDateTime) object);
+ type.writeObject(builder, Timestamps.timestampFromNanos(epochNanos));
+ return;
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ long epochUtcNanos = nanosFromTimestamptz((OffsetDateTime) object);
+ type.writeObject(builder, timestampTzFromNanos(epochUtcNanos));
+ return;
+ }
if (type.equals(UUID)) {
type.writeSlice(builder, javaUuidToTrinoUuid((UUID) object));
return;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
index 9675be45dc63..e97d84267918 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
@@ -433,7 +433,9 @@
import static io.trino.spi.type.SmallintType.SMALLINT;
import static io.trino.spi.type.TimeType.TIME_MICROS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.Timestamps.MICROSECONDS_PER_MILLISECOND;
import static io.trino.spi.type.TinyintType.TINYINT;
import static io.trino.spi.type.VarcharType.VARCHAR;
@@ -486,6 +488,7 @@ public class IcebergMetadata
implements ConnectorMetadata
{
private static final Logger log = Logger.get(IcebergMetadata.class);
+ private static final int TIMESTAMP_NANOS_SUPPORTED_MIN_VERSION = 3;
private static final int OPTIMIZE_MAX_SUPPORTED_TABLE_VERSION = 3;
private static final int CLEANING_UP_PROCEDURES_MAX_SUPPORTED_TABLE_VERSION = 3;
private static final String RETENTION_THRESHOLD = "retention_threshold";
@@ -1393,22 +1396,28 @@ public Optional getNewTableLayout(ConnectorSession session
@Override
public Optional getSupportedType(ConnectorSession session, Map tableProperties, io.trino.spi.type.Type type)
{
- io.trino.spi.type.Type newType = coerceType(type);
+ io.trino.spi.type.Type newType = coerceType(type, getFormatVersion(tableProperties));
if (type.equals(newType)) {
return Optional.empty();
}
return Optional.of(newType);
}
- private io.trino.spi.type.Type coerceType(io.trino.spi.type.Type type)
+ private io.trino.spi.type.Type coerceType(io.trino.spi.type.Type type, int formatVersion)
{
if (type == TINYINT || type == SMALLINT) {
return INTEGER;
}
- if (type instanceof TimestampWithTimeZoneType) {
+ if (type instanceof TimestampWithTimeZoneType timestampTzType) {
+ if (formatVersion >= TIMESTAMP_NANOS_SUPPORTED_MIN_VERSION && timestampTzType.getPrecision() > 6) {
+ return TIMESTAMP_TZ_NANOS;
+ }
return TIMESTAMP_TZ_MICROS;
}
- if (type instanceof TimestampType) {
+ if (type instanceof TimestampType timestampType) {
+ if (formatVersion >= TIMESTAMP_NANOS_SUPPORTED_MIN_VERSION && timestampType.getPrecision() > 6) {
+ return TIMESTAMP_NANOS;
+ }
return TIMESTAMP_MICROS;
}
if (type instanceof TimeType) {
@@ -1418,14 +1427,14 @@ private io.trino.spi.type.Type coerceType(io.trino.spi.type.Type type)
return VARCHAR;
}
if (type instanceof ArrayType arrayType) {
- return new ArrayType(coerceType(arrayType.getElementType()));
+ return new ArrayType(coerceType(arrayType.getElementType(), formatVersion));
}
if (type instanceof MapType mapType) {
- return new MapType(coerceType(mapType.getKeyType()), coerceType(mapType.getValueType()), typeManager.getTypeOperators());
+ return new MapType(coerceType(mapType.getKeyType(), formatVersion), coerceType(mapType.getValueType(), formatVersion), typeManager.getTypeOperators());
}
if (type instanceof RowType rowType) {
return RowType.from(rowType.getFields().stream()
- .map(field -> new RowType.Field(field.getName(), coerceType(field.getType())))
+ .map(field -> new RowType.Field(field.getName(), coerceType(field.getType(), formatVersion)))
.collect(toImmutableList()));
}
return type;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java
index a7842d0c1fb4..8b51e6f3b3a1 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java
@@ -32,6 +32,7 @@
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.SortOrder;
import io.trino.spi.type.DecimalType;
+import io.trino.spi.type.LongTimestamp;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeManager;
import io.trino.spi.type.VarbinaryType;
@@ -66,8 +67,11 @@
import static io.trino.plugin.iceberg.IcebergSessionProperties.isSortedWritingEnabled;
import static io.trino.plugin.iceberg.IcebergUtil.getTopLevelColumns;
import static io.trino.plugin.iceberg.PartitionTransforms.getColumnTransform;
-import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTz;
+import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTzMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTzNanos;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampToNanos;
import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToNanos;
import static io.trino.spi.block.RowBlock.getRowFieldsFromBlock;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
@@ -79,7 +83,9 @@
import static io.trino.spi.type.SmallintType.SMALLINT;
import static io.trino.spi.type.TimeType.TIME_MICROS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
import static io.trino.spi.type.TinyintType.TINYINT;
import static io.trino.spi.type.UuidType.UUID;
@@ -511,7 +517,13 @@ public static Object getIcebergValue(Block block, int position, Type type)
return TIMESTAMP_MICROS.getLong(block, position);
}
if (type.equals(TIMESTAMP_TZ_MICROS)) {
- return timestampTzToMicros(getTimestampTz(block, position));
+ return timestampTzToMicros(getTimestampTzMicros(block, position));
+ }
+ if (type.equals(TIMESTAMP_NANOS)) {
+ return timestampToNanos((LongTimestamp) TIMESTAMP_NANOS.getObject(block, position));
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ return timestampTzToNanos(getTimestampTzNanos(block, position));
}
if (type instanceof VarbinaryType varbinaryType) {
return varbinaryType.getSlice(block, position).toByteBuffer();
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTypes.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTypes.java
index f5cdbd1b37f5..51c3f15bd6b6 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTypes.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergTypes.java
@@ -19,6 +19,7 @@
import io.trino.spi.type.DecimalType;
import io.trino.spi.type.Decimals;
import io.trino.spi.type.Int128;
+import io.trino.spi.type.LongTimestamp;
import io.trino.spi.type.LongTimestampWithTimeZone;
import io.trino.spi.type.UuidType;
import io.trino.spi.type.VarbinaryType;
@@ -33,8 +34,12 @@
import static io.airlift.slice.Slices.utf8Slice;
import static io.trino.plugin.base.io.ByteBuffers.getWrappedBytes;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampFromNanos;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampToNanos;
import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromNanos;
import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToNanos;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DateType.DATE;
@@ -43,7 +48,9 @@
import static io.trino.spi.type.RealType.REAL;
import static io.trino.spi.type.TimeType.TIME_MICROS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
import static io.trino.spi.type.UuidType.javaUuidToTrinoUuid;
import static io.trino.spi.type.UuidType.trinoUuidToJavaUuid;
@@ -60,8 +67,8 @@ private IcebergTypes() {}
/**
* Convert value from Trino representation to Iceberg representation.
- *
- * Note: This accepts a Trino type because, currently, no two Iceberg types translate to one Trino type.
+ * Returns raw Java values suitable for Iceberg's Conversions.toByteBuffer().
+ * For nano timestamps, this will throw TrinoException if the value is outside the supported range.
*/
public static Object convertTrinoValueToIceberg(io.trino.spi.type.Type type, Object trinoNativeValue)
{
@@ -114,6 +121,16 @@ public static Object convertTrinoValueToIceberg(io.trino.spi.type.Type type, Obj
return timestampTzToMicros((LongTimestampWithTimeZone) trinoNativeValue);
}
+ if (type.equals(TIMESTAMP_NANOS)) {
+ // Will throw TrinoException if out of range
+ return timestampToNanos((LongTimestamp) trinoNativeValue);
+ }
+
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ // Will throw TrinoException if out of range
+ return timestampTzToNanos((LongTimestampWithTimeZone) trinoNativeValue);
+ }
+
if (type instanceof VarcharType) {
return ((Slice) trinoNativeValue).toStringUtf8();
}
@@ -185,6 +202,13 @@ public static Object convertIcebergValueToTrino(Type icebergType, Object value)
}
return epochMicros;
}
+ if (icebergType instanceof Types.TimestampNanoType icebergTimestampNanoType) {
+ long epochNanos = (long) value;
+ if (icebergTimestampNanoType.shouldAdjustToUTC()) {
+ return timestampTzFromNanos(epochNanos);
+ }
+ return timestampFromNanos(epochNanos);
+ }
if (icebergType instanceof Types.UUIDType) {
return javaUuidToTrinoUuid((UUID) value);
}
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java
index ee05ce629b63..1d85f8fda563 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java
@@ -166,7 +166,9 @@
import static io.trino.plugin.iceberg.TypeConverter.toIcebergType;
import static io.trino.plugin.iceberg.TypeConverter.toIcebergTypeForNewColumn;
import static io.trino.plugin.iceberg.TypeConverter.toTrinoType;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampFromNanos;
import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromNanos;
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS;
import static io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY;
@@ -183,7 +185,9 @@
import static io.trino.spi.type.RealType.REAL;
import static io.trino.spi.type.TimeType.TIME_MICROS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
import static io.trino.spi.type.UuidType.javaUuidToTrinoUuid;
import static java.lang.Boolean.parseBoolean;
@@ -749,6 +753,12 @@ public static Object deserializePartitionValue(Type type, String valueString, St
if (type.equals(TIMESTAMP_TZ_MICROS)) {
return timestampTzFromMicros(parseLong(valueString));
}
+ if (type.equals(TIMESTAMP_NANOS)) {
+ return timestampFromNanos(parseLong(valueString));
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ return timestampTzFromNanos(parseLong(valueString));
+ }
if (type instanceof VarcharType varcharType) {
Slice value = utf8Slice(valueString);
if (!varcharType.isUnbounded() && SliceUtf8.countCodePoints(value) > varcharType.getBoundedLength()) {
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionData.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionData.java
index 33e564a0e13c..bd67906fbb8d 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionData.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionData.java
@@ -138,8 +138,9 @@ public static Object getValue(JsonNode partitionValue, Type type)
case DATE:
return partitionValue.asInt();
case LONG:
- case TIMESTAMP:
case TIME:
+ case TIMESTAMP:
+ case TIMESTAMP_NANO:
return partitionValue.asLong();
case FLOAT:
if (partitionValue.asText().equalsIgnoreCase("NaN")) {
@@ -168,8 +169,6 @@ public static Object getValue(JsonNode partitionValue, Type type)
return rescale(
partitionValue.decimalValue(),
createDecimalType(decimalType.precision(), decimalType.scale()));
- // TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
- case TIMESTAMP_NANO:
// TODO https://github.com/trinodb/trino/issues/24538 Support variant type
case VARIANT:
case GEOMETRY:
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java
index 4dc1dbfc3422..b1f6957b639d 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/PartitionTransforms.java
@@ -23,6 +23,7 @@
import io.trino.spi.type.DecimalType;
import io.trino.spi.type.FixedWidthType;
import io.trino.spi.type.Int128;
+import io.trino.spi.type.LongTimestamp;
import io.trino.spi.type.LongTimestampWithTimeZone;
import io.trino.spi.type.Type;
import io.trino.spi.type.VarcharType;
@@ -40,8 +41,11 @@
import java.util.regex.Pattern;
import static io.airlift.slice.SliceUtf8.offsetOfCodePoint;
-import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTz;
+import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTzMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.getTimestampTzNanos;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampToNanos;
import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToNanos;
import static io.trino.spi.predicate.Utils.nativeValueToBlock;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.DateType.DATE;
@@ -51,10 +55,13 @@
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.TimeType.TIME_MICROS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.Timestamps.MICROSECONDS_PER_MILLISECOND;
import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_DAY;
import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_HOUR;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
import static io.trino.spi.type.TypeUtils.readNativeValue;
import static io.trino.spi.type.UuidType.UUID;
@@ -87,10 +94,16 @@ public static ColumnTransform getColumnTransform(PartitionField field, Type sour
return yearsFromDate();
}
if (sourceType.equals(TIMESTAMP_MICROS)) {
- return yearsFromTimestamp();
+ return yearsFromTimestampMicros();
}
if (sourceType.equals(TIMESTAMP_TZ_MICROS)) {
- return yearsFromTimestampWithTimeZone();
+ return yearsFromTimestampMicrosWithTimeZone();
+ }
+ if (sourceType.equals(TIMESTAMP_NANOS)) {
+ return yearsFromTimestampNanos();
+ }
+ if (sourceType.equals(TIMESTAMP_TZ_NANOS)) {
+ return yearsFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'year': " + field);
case "month":
@@ -98,10 +111,16 @@ public static ColumnTransform getColumnTransform(PartitionField field, Type sour
return monthsFromDate();
}
if (sourceType.equals(TIMESTAMP_MICROS)) {
- return monthsFromTimestamp();
+ return monthsFromTimestampMicros();
}
if (sourceType.equals(TIMESTAMP_TZ_MICROS)) {
- return monthsFromTimestampWithTimeZone();
+ return monthsFromTimestampMicrosWithTimeZone();
+ }
+ if (sourceType.equals(TIMESTAMP_NANOS)) {
+ return monthsFromTimestampNanos();
+ }
+ if (sourceType.equals(TIMESTAMP_TZ_NANOS)) {
+ return monthsFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'month': " + field);
case "day":
@@ -109,18 +128,30 @@ public static ColumnTransform getColumnTransform(PartitionField field, Type sour
return daysFromDate();
}
if (sourceType.equals(TIMESTAMP_MICROS)) {
- return daysFromTimestamp();
+ return daysFromTimestampMicros();
}
if (sourceType.equals(TIMESTAMP_TZ_MICROS)) {
- return daysFromTimestampWithTimeZone();
+ return daysFromTimestampMicrosWithTimeZone();
+ }
+ if (sourceType.equals(TIMESTAMP_NANOS)) {
+ return daysFromTimestampNanos();
+ }
+ if (sourceType.equals(TIMESTAMP_TZ_NANOS)) {
+ return daysFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'day': " + field);
case "hour":
if (sourceType.equals(TIMESTAMP_MICROS)) {
- return hoursFromTimestamp();
+ return hoursFromTimestampMicros();
}
if (sourceType.equals(TIMESTAMP_TZ_MICROS)) {
- return hoursFromTimestampWithTimeZone();
+ return hoursFromTimestampMicrosWithTimeZone();
+ }
+ if (sourceType.equals(TIMESTAMP_NANOS)) {
+ return hoursFromTimestampNanos();
+ }
+ if (sourceType.equals(TIMESTAMP_TZ_NANOS)) {
+ return hoursFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'hour': " + field);
case "void":
@@ -170,10 +201,16 @@ public static ColumnTransform getColumnTransform(IcebergPartitionFunction field)
yield yearsFromDate();
}
if (type.equals(TIMESTAMP_MICROS)) {
- yield yearsFromTimestamp();
+ yield yearsFromTimestampMicros();
}
if (type.equals(TIMESTAMP_TZ_MICROS)) {
- yield yearsFromTimestampWithTimeZone();
+ yield yearsFromTimestampMicrosWithTimeZone();
+ }
+ if (type.equals(TIMESTAMP_NANOS)) {
+ yield yearsFromTimestampNanos();
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ yield yearsFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'year': " + field);
}
@@ -182,10 +219,16 @@ public static ColumnTransform getColumnTransform(IcebergPartitionFunction field)
yield monthsFromDate();
}
if (type.equals(TIMESTAMP_MICROS)) {
- yield monthsFromTimestamp();
+ yield monthsFromTimestampMicros();
}
if (type.equals(TIMESTAMP_TZ_MICROS)) {
- yield monthsFromTimestampWithTimeZone();
+ yield monthsFromTimestampMicrosWithTimeZone();
+ }
+ if (type.equals(TIMESTAMP_NANOS)) {
+ yield monthsFromTimestampNanos();
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ yield monthsFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'month': " + field);
}
@@ -194,19 +237,31 @@ public static ColumnTransform getColumnTransform(IcebergPartitionFunction field)
yield daysFromDate();
}
if (type.equals(TIMESTAMP_MICROS)) {
- yield daysFromTimestamp();
+ yield daysFromTimestampMicros();
}
if (type.equals(TIMESTAMP_TZ_MICROS)) {
- yield daysFromTimestampWithTimeZone();
+ yield daysFromTimestampMicrosWithTimeZone();
+ }
+ if (type.equals(TIMESTAMP_NANOS)) {
+ yield daysFromTimestampNanos();
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ yield daysFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'day': " + field);
}
case HOUR -> {
if (type.equals(TIMESTAMP_MICROS)) {
- yield hoursFromTimestamp();
+ yield hoursFromTimestampMicros();
}
if (type.equals(TIMESTAMP_TZ_MICROS)) {
- yield hoursFromTimestampWithTimeZone();
+ yield hoursFromTimestampMicrosWithTimeZone();
+ }
+ if (type.equals(TIMESTAMP_NANOS)) {
+ yield hoursFromTimestampNanos();
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ yield hoursFromTimestampNanosWithTimeZone();
}
throw new UnsupportedOperationException("Unsupported type for 'hour': " + field);
}
@@ -283,10 +338,16 @@ private static Hasher getBucketingHash(Type type)
return PartitionTransforms::hashTime;
}
if (type.equals(TIMESTAMP_MICROS)) {
- return PartitionTransforms::hashTimestamp;
+ return PartitionTransforms::hashTimestampMicros;
}
if (type.equals(TIMESTAMP_TZ_MICROS)) {
- return PartitionTransforms::hashTimestampWithTimeZone;
+ return PartitionTransforms::hashTimestampMicrosWithTimeZone;
+ }
+ if (type.equals(TIMESTAMP_NANOS)) {
+ return PartitionTransforms::hashTimestampNanos;
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ return PartitionTransforms::hashTimestampNanosWithTimeZone;
}
if (type instanceof VarcharType) {
return PartitionTransforms::hashVarchar;
@@ -336,7 +397,7 @@ private static ColumnTransform daysFromDate()
ValueTransform.from(DATE, transform));
}
- private static ColumnTransform yearsFromTimestamp()
+ private static ColumnTransform yearsFromTimestampMicros()
{
LongUnaryOperator transform = epochMicros -> epochYear(floorDiv(epochMicros, MICROSECONDS_PER_MILLISECOND));
return new ColumnTransform(
@@ -348,7 +409,7 @@ private static ColumnTransform yearsFromTimestamp()
ValueTransform.from(TIMESTAMP_MICROS, transform));
}
- private static ColumnTransform monthsFromTimestamp()
+ private static ColumnTransform monthsFromTimestampMicros()
{
LongUnaryOperator transform = epochMicros -> epochMonth(floorDiv(epochMicros, MICROSECONDS_PER_MILLISECOND));
return new ColumnTransform(
@@ -360,7 +421,7 @@ private static ColumnTransform monthsFromTimestamp()
ValueTransform.from(TIMESTAMP_MICROS, transform));
}
- private static ColumnTransform daysFromTimestamp()
+ private static ColumnTransform daysFromTimestampMicros()
{
LongUnaryOperator transform = epochMicros -> epochDay(floorDiv(epochMicros, MICROSECONDS_PER_MILLISECOND));
return new ColumnTransform(
@@ -372,7 +433,7 @@ private static ColumnTransform daysFromTimestamp()
ValueTransform.from(TIMESTAMP_MICROS, transform));
}
- private static ColumnTransform hoursFromTimestamp()
+ private static ColumnTransform hoursFromTimestampMicros()
{
LongUnaryOperator transform = epochMicros -> epochHour(floorDiv(epochMicros, MICROSECONDS_PER_MILLISECOND));
return new ColumnTransform(
@@ -384,7 +445,7 @@ private static ColumnTransform hoursFromTimestamp()
ValueTransform.from(TIMESTAMP_MICROS, transform));
}
- private static ColumnTransform yearsFromTimestampWithTimeZone()
+ private static ColumnTransform yearsFromTimestampMicrosWithTimeZone()
{
ToLongFunction transform = value -> epochYear(value.getEpochMillis());
return new ColumnTransform(
@@ -392,11 +453,11 @@ private static ColumnTransform yearsFromTimestampWithTimeZone()
false,
true,
true,
- block -> extractTimestampWithTimeZone(block, transform),
- ValueTransform.fromTimestampTzTransform(transform));
+ block -> extractTimestampMicrosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzMicrosTransform(transform));
}
- private static ColumnTransform monthsFromTimestampWithTimeZone()
+ private static ColumnTransform monthsFromTimestampMicrosWithTimeZone()
{
ToLongFunction transform = value -> epochMonth(value.getEpochMillis());
return new ColumnTransform(
@@ -404,11 +465,11 @@ private static ColumnTransform monthsFromTimestampWithTimeZone()
false,
true,
true,
- block -> extractTimestampWithTimeZone(block, transform),
- ValueTransform.fromTimestampTzTransform(transform));
+ block -> extractTimestampMicrosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzMicrosTransform(transform));
}
- private static ColumnTransform daysFromTimestampWithTimeZone()
+ private static ColumnTransform daysFromTimestampMicrosWithTimeZone()
{
ToLongFunction transform = value -> epochDay(value.getEpochMillis());
return new ColumnTransform(
@@ -416,11 +477,11 @@ private static ColumnTransform daysFromTimestampWithTimeZone()
false,
true,
true,
- block -> extractTimestampWithTimeZone(block, transform),
- ValueTransform.fromTimestampTzTransform(transform));
+ block -> extractTimestampMicrosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzMicrosTransform(transform));
}
- private static ColumnTransform hoursFromTimestampWithTimeZone()
+ private static ColumnTransform hoursFromTimestampMicrosWithTimeZone()
{
ToLongFunction transform = value -> epochHour(value.getEpochMillis());
return new ColumnTransform(
@@ -428,11 +489,125 @@ private static ColumnTransform hoursFromTimestampWithTimeZone()
false,
true,
true,
- block -> extractTimestampWithTimeZone(block, transform),
- ValueTransform.fromTimestampTzTransform(transform));
+ block -> extractTimestampMicrosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzMicrosTransform(transform));
+ }
+
+ // Nano timestamp transforms (local timestamp without timezone)
+
+ private static ColumnTransform yearsFromTimestampNanos()
+ {
+ ToLongFunction transform = value -> epochYear(floorDiv(timestampToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanos(block, transform),
+ ValueTransform.fromTimestampNanosTransform(transform));
+ }
+
+ private static ColumnTransform monthsFromTimestampNanos()
+ {
+ ToLongFunction transform = value -> epochMonth(floorDiv(timestampToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanos(block, transform),
+ ValueTransform.fromTimestampNanosTransform(transform));
+ }
+
+ private static ColumnTransform daysFromTimestampNanos()
+ {
+ ToLongFunction transform = value -> epochDay(floorDiv(timestampToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanos(block, transform),
+ ValueTransform.fromTimestampNanosTransform(transform));
+ }
+
+ private static ColumnTransform hoursFromTimestampNanos()
+ {
+ ToLongFunction transform = value -> epochHour(floorDiv(timestampToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanos(block, transform),
+ ValueTransform.fromTimestampNanosTransform(transform));
+ }
+
+ // Nano timestamp with timezone transforms (instant, stored as UTC)
+
+ private static ColumnTransform yearsFromTimestampNanosWithTimeZone()
+ {
+ ToLongFunction transform = value -> epochYear(floorDiv(timestampTzToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzNanosTransform(transform));
+ }
+
+ private static ColumnTransform monthsFromTimestampNanosWithTimeZone()
+ {
+ ToLongFunction transform = value -> epochMonth(floorDiv(timestampTzToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzNanosTransform(transform));
}
- private static Block extractTimestampWithTimeZone(Block block, ToLongFunction function)
+ private static ColumnTransform daysFromTimestampNanosWithTimeZone()
+ {
+ ToLongFunction transform = value -> epochDay(floorDiv(timestampTzToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzNanosTransform(transform));
+ }
+
+ private static ColumnTransform hoursFromTimestampNanosWithTimeZone()
+ {
+ ToLongFunction transform = value -> epochHour(floorDiv(timestampTzToNanos(value), NANOSECONDS_PER_MILLISECOND));
+ return new ColumnTransform(
+ INTEGER,
+ false,
+ true,
+ true,
+ block -> extractTimestampNanosWithTimeZone(block, transform),
+ ValueTransform.fromTimestampTzNanosTransform(transform));
+ }
+
+ private static Block extractTimestampNanos(Block block, ToLongFunction function)
+ {
+ BlockBuilder builder = INTEGER.createFixedSizeBlockBuilder(block.getPositionCount());
+ for (int position = 0; position < block.getPositionCount(); position++) {
+ if (block.isNull(position)) {
+ builder.appendNull();
+ continue;
+ }
+ LongTimestamp value = (LongTimestamp) TIMESTAMP_NANOS.getObject(block, position);
+ INTEGER.writeLong(builder, function.applyAsLong(value));
+ }
+ return builder.build();
+ }
+
+ private static Block extractTimestampMicrosWithTimeZone(Block block, ToLongFunction function)
{
BlockBuilder builder = INTEGER.createFixedSizeBlockBuilder(block.getPositionCount());
for (int position = 0; position < block.getPositionCount(); position++) {
@@ -440,7 +615,21 @@ private static Block extractTimestampWithTimeZone(Block block, ToLongFunction function)
+ {
+ BlockBuilder builder = INTEGER.createFixedSizeBlockBuilder(block.getPositionCount());
+ for (int position = 0; position < block.getPositionCount(); position++) {
+ if (block.isNull(position)) {
+ builder.appendNull();
+ continue;
+ }
+ LongTimestampWithTimeZone value = getTimestampTzNanos(block, position);
INTEGER.writeLong(builder, function.applyAsLong(value));
}
return builder.build();
@@ -485,14 +674,24 @@ private static int hashTime(Block block, int position)
return bucketHash(picos / PICOSECONDS_PER_MICROSECOND);
}
- private static int hashTimestamp(Block block, int position)
+ private static int hashTimestampMicros(Block block, int position)
{
return bucketHash(TIMESTAMP_MICROS.getLong(block, position));
}
- private static int hashTimestampWithTimeZone(Block block, int position)
+ private static int hashTimestampMicrosWithTimeZone(Block block, int position)
+ {
+ return bucketHash(timestampTzToMicros(getTimestampTzMicros(block, position)));
+ }
+
+ private static int hashTimestampNanos(Block block, int position)
+ {
+ return bucketHash(timestampToNanos((LongTimestamp) TIMESTAMP_NANOS.getObject(block, position)));
+ }
+
+ private static int hashTimestampNanosWithTimeZone(Block block, int position)
{
- return bucketHash(timestampTzToMicros(getTimestampTz(block, position)));
+ return bucketHash(timestampTzToNanos(getTimestampTzNanos(block, position)));
}
private static int hashVarchar(Block block, int position)
@@ -865,13 +1064,33 @@ static ValueTransform from(Type sourceType, LongUnaryOperator transform)
};
}
- static ValueTransform fromTimestampTzTransform(ToLongFunction transform)
+ static ValueTransform fromTimestampTzMicrosTransform(ToLongFunction transform)
+ {
+ return (block, position) -> {
+ if (block.isNull(position)) {
+ return null;
+ }
+ return transform.applyAsLong(getTimestampTzMicros(block, position));
+ };
+ }
+
+ static ValueTransform fromTimestampTzNanosTransform(ToLongFunction transform)
+ {
+ return (block, position) -> {
+ if (block.isNull(position)) {
+ return null;
+ }
+ return transform.applyAsLong(getTimestampTzNanos(block, position));
+ };
+ }
+
+ static ValueTransform fromTimestampNanosTransform(ToLongFunction transform)
{
return (block, position) -> {
if (block.isNull(position)) {
return null;
}
- return transform.applyAsLong(getTimestampTz(block, position));
+ return transform.applyAsLong((LongTimestamp) TIMESTAMP_NANOS.getObject(block, position));
};
}
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TypeConverter.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TypeConverter.java
index 5cde3b183d17..b7df1f27ac87 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TypeConverter.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TypeConverter.java
@@ -51,7 +51,9 @@
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.type.TimeType.TIME_MICROS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.UuidType.UUID;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
@@ -86,8 +88,7 @@ public static Type toTrinoType(org.apache.iceberg.types.Type type, TypeManager t
case TIMESTAMP:
return ((Types.TimestampType) type).shouldAdjustToUTC() ? TIMESTAMP_TZ_MICROS : TIMESTAMP_MICROS;
case TIMESTAMP_NANO:
- // TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
- break;
+ return ((Types.TimestampNanoType) type).shouldAdjustToUTC() ? TIMESTAMP_TZ_NANOS : TIMESTAMP_NANOS;
case STRING:
return VarcharType.createUnboundedVarcharType();
case UUID:
@@ -164,6 +165,12 @@ private static org.apache.iceberg.types.Type toIcebergTypeInternal(Type type, Op
if (type.equals(TIMESTAMP_TZ_MICROS)) {
return Types.TimestampType.withZone();
}
+ if (type.equals(TIMESTAMP_NANOS)) {
+ return Types.TimestampNanoType.withoutZone();
+ }
+ if (type.equals(TIMESTAMP_TZ_NANOS)) {
+ return Types.TimestampNanoType.withZone();
+ }
if (type.equals(UUID)) {
return Types.UUIDType.get();
}
@@ -180,10 +187,10 @@ private static org.apache.iceberg.types.Type toIcebergTypeInternal(Type type, Op
throw new TrinoException(NOT_SUPPORTED, format("Time precision (%s) not supported for Iceberg. Use \"time(6)\" instead.", timeType.getPrecision()));
}
if (type instanceof TimestampType timestampType) {
- throw new TrinoException(NOT_SUPPORTED, format("Timestamp precision (%s) not supported for Iceberg. Use \"timestamp(6)\" instead.", timestampType.getPrecision()));
+ throw new TrinoException(NOT_SUPPORTED, format("Timestamp precision (%s) not supported for Iceberg. Use \"timestamp(6)\" or \"timestamp(9)\" instead.", timestampType.getPrecision()));
}
if (type instanceof TimestampWithTimeZoneType timestampWithTimeZoneType) {
- throw new TrinoException(NOT_SUPPORTED, format("Timestamp precision (%s) not supported for Iceberg. Use \"timestamp(6) with time zone\" instead.", timestampWithTimeZoneType.getPrecision()));
+ throw new TrinoException(NOT_SUPPORTED, format("Timestamp precision (%s) not supported for Iceberg. Use \"timestamp(6) with time zone\" or \"timestamp(9) with time zone\" instead.", timestampWithTimeZoneType.getPrecision()));
}
throw new TrinoException(NOT_SUPPORTED, "Type not supported for Iceberg: " + type.getDisplayName());
}
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergUtil.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergUtil.java
index 119115c80b22..b42c5e2b5350 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergUtil.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergUtil.java
@@ -179,6 +179,7 @@ private static String toGlueTypeStringLossy(Type type)
case UUID:
return "string";
case TIMESTAMP:
+ case TIMESTAMP_NANO:
return "timestamp";
case FIXED:
case BINARY:
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/HiveSchemaUtil.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/HiveSchemaUtil.java
index e5b07b1d2131..603736a8cc41 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/HiveSchemaUtil.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/HiveSchemaUtil.java
@@ -42,9 +42,7 @@ private static String convertToTypeString(Type type)
case DOUBLE -> "double";
case DATE -> "date";
case TIME, STRING, UUID -> "string";
- case TIMESTAMP -> "timestamp";
- // TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
- case TIMESTAMP_NANO -> throw new TrinoException(NOT_SUPPORTED, "Unsupported Iceberg type: TIMESTAMP_NANO");
+ case TIMESTAMP, TIMESTAMP_NANO -> "timestamp";
case FIXED, BINARY -> "binary";
case DECIMAL -> "decimal(%s,%s)".formatted(((DecimalType) type).precision(), ((DecimalType) type).scale());
case UNKNOWN, GEOMETRY, GEOGRAPHY -> throw new TrinoException(NOT_SUPPORTED, "Unsupported Iceberg type: " + type);
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcMetrics.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcMetrics.java
index 35debe802afd..a9660de54aa5 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcMetrics.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcMetrics.java
@@ -68,6 +68,9 @@
import static io.trino.plugin.iceberg.util.OrcIcebergIds.fileColumnsByIcebergId;
import static io.trino.plugin.iceberg.util.OrcTypeConverter.ORC_ICEBERG_ID_KEY;
import static io.trino.spi.type.Timestamps.MICROSECONDS_PER_MILLISECOND;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
+import static java.lang.Math.addExact;
+import static java.lang.Math.multiplyExact;
import static java.lang.Math.toIntExact;
import static java.math.RoundingMode.UNNECESSARY;
import static java.util.function.Function.identity;
@@ -299,12 +302,29 @@ private static Optional toIcebergMinMax(ColumnStatistics orcColum
return Optional.empty();
}
// Since ORC timestamp statistics are truncated to millisecond precision, this can cause some column values to fall outside the stats range.
- // We are appending 999 microseconds to account for the fact that Trino ORC writer truncates timestamps.
- return Optional.of(new IcebergMinMax(icebergType, min * MICROSECONDS_PER_MILLISECOND, (max * MICROSECONDS_PER_MILLISECOND) + (MICROSECONDS_PER_MILLISECOND - 1), metricsModes));
+ // We are appending the max sub-millisecond value to account for the fact that ORC writer truncates timestamps.
+ if (icebergType.typeId() == TypeID.TIMESTAMP_NANO) {
+ return timestampMinMax(icebergType, metricsModes, min, max, NANOSECONDS_PER_MILLISECOND);
+ }
+ return timestampMinMax(icebergType, metricsModes, min, max, MICROSECONDS_PER_MILLISECOND);
}
return Optional.empty();
}
+ private static Optional timestampMinMax(Type icebergType, MetricsModes.MetricsMode metricsMode, long min, long max, long unit)
+ {
+ try {
+ long scaledMin = multiplyExact(min, unit);
+ long scaledMax = addExact(multiplyExact(max, unit), unit - 1);
+ return Optional.of(new IcebergMinMax(icebergType, scaledMin, scaledMax, metricsMode));
+ }
+ catch (ArithmeticException _) {
+ // ORC timestamp stats are millisecond-granularity hints. If widening them to Iceberg units overflows,
+ // drop the bounds entirely rather than emitting wrapped values.
+ return Optional.empty();
+ }
+ }
+
private static class IcebergMinMax
{
private final ByteBuffer min;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcTypeConverter.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcTypeConverter.java
index 2b757319a68e..4b6c39fbc6b5 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcTypeConverter.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/OrcTypeConverter.java
@@ -27,6 +27,7 @@
import org.apache.iceberg.types.Types.MapType;
import org.apache.iceberg.types.Types.NestedField;
import org.apache.iceberg.types.Types.StructType;
+import org.apache.iceberg.types.Types.TimestampNanoType;
import org.apache.iceberg.types.Types.TimestampType;
import java.util.ArrayList;
@@ -42,6 +43,9 @@ public final class OrcTypeConverter
public static final String ORC_ICEBERG_REQUIRED_KEY = "iceberg.required";
public static final String ICEBERG_LONG_TYPE = "iceberg.long-type";
public static final String ICEBERG_BINARY_TYPE = "iceberg.binary-type";
+ public static final String ICEBERG_TIMESTAMP_UNIT = "iceberg.timestamp-unit";
+ public static final String ICEBERG_TIMESTAMP_UNIT_MICROS = "MICROS";
+ public static final String ICEBERG_TIMESTAMP_UNIT_NANOS = "NANOS";
private OrcTypeConverter() {}
@@ -68,10 +72,20 @@ private static List toOrcType(int nextFieldTypeIndex, Type type, Map {
OrcTypeKind timestampKind = ((TimestampType) type).shouldAdjustToUTC() ? OrcTypeKind.TIMESTAMP_INSTANT : OrcTypeKind.TIMESTAMP;
+ attributes = ImmutableMap.builder()
+ .putAll(attributes)
+ .put(ICEBERG_TIMESTAMP_UNIT, ICEBERG_TIMESTAMP_UNIT_MICROS)
+ .buildOrThrow();
+ yield ImmutableList.of(new OrcType(timestampKind, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), attributes));
+ }
+ case TIMESTAMP_NANO -> {
+ OrcTypeKind timestampKind = ((TimestampNanoType) type).shouldAdjustToUTC() ? OrcTypeKind.TIMESTAMP_INSTANT : OrcTypeKind.TIMESTAMP;
+ attributes = ImmutableMap.builder()
+ .putAll(attributes)
+ .put(ICEBERG_TIMESTAMP_UNIT, ICEBERG_TIMESTAMP_UNIT_NANOS)
+ .buildOrThrow();
yield ImmutableList.of(new OrcType(timestampKind, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), attributes));
}
- // TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
- case TIMESTAMP_NANO -> throw new TrinoException(NOT_SUPPORTED, "Unsupported Iceberg type: TIMESTAMP_NANO");
case STRING -> ImmutableList.of(new OrcType(OrcTypeKind.STRING, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), attributes));
case FIXED, BINARY -> ImmutableList.of(new OrcType(OrcTypeKind.BINARY, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), attributes));
case DECIMAL -> {
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/Timestamps.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/Timestamps.java
index 502386ba13c7..9934bd24e2d7 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/Timestamps.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/util/Timestamps.java
@@ -14,19 +14,37 @@
package io.trino.plugin.iceberg.util;
import com.google.common.math.LongMath;
+import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
+import io.trino.spi.type.LongTimestamp;
import io.trino.spi.type.LongTimestampWithTimeZone;
+import static io.trino.spi.StandardErrorCode.NUMERIC_VALUE_OUT_OF_RANGE;
import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
import static io.trino.spi.type.Timestamps.MICROSECONDS_PER_MILLISECOND;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MICROSECOND;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
+import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
import static java.lang.Math.floorDiv;
import static java.lang.Math.floorMod;
+import static java.lang.Math.toIntExact;
import static java.math.RoundingMode.UNNECESSARY;
public final class Timestamps
{
+ // Nano timestamp range: Long.MIN_VALUE/MAX_VALUE nanos converted to micros/millis
+ private static final long MIN_NANO_EPOCH_MICROS = floorDiv(Long.MIN_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final int MIN_NANO_OF_MICRO = toIntExact(floorMod(Long.MIN_VALUE, NANOSECONDS_PER_MICROSECOND));
+ private static final long MAX_NANO_EPOCH_MICROS = floorDiv(Long.MAX_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final int MAX_NANO_OF_MICRO = toIntExact(floorMod(Long.MAX_VALUE, NANOSECONDS_PER_MICROSECOND));
+ private static final long MIN_NANO_EPOCH_MILLIS = floorDiv(Long.MIN_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final int MIN_NANO_OF_MILLI = toIntExact(floorMod(Long.MIN_VALUE, NANOSECONDS_PER_MILLISECOND));
+ private static final long MAX_NANO_EPOCH_MILLIS = floorDiv(Long.MAX_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final int MAX_NANO_OF_MILLI = toIntExact(floorMod(Long.MAX_VALUE, NANOSECONDS_PER_MILLISECOND));
+
private Timestamps() {}
public static long timestampTzToMicros(LongTimestampWithTimeZone timestamp)
@@ -42,8 +60,89 @@ public static LongTimestampWithTimeZone timestampTzFromMicros(long epochMicros)
return LongTimestampWithTimeZone.fromEpochMillisAndFraction(epochMillis, picosOfMillis, UTC_KEY);
}
- public static LongTimestampWithTimeZone getTimestampTz(Block block, int position)
+ public static LongTimestampWithTimeZone getTimestampTzMicros(Block block, int position)
{
return (LongTimestampWithTimeZone) TIMESTAMP_TZ_MICROS.getObject(block, position);
}
+
+ public static LongTimestampWithTimeZone getTimestampTzNanos(Block block, int position)
+ {
+ return (LongTimestampWithTimeZone) TIMESTAMP_TZ_NANOS.getObject(block, position);
+ }
+
+ public static int getNanosOfMicro(LongTimestamp timestamp)
+ {
+ return toIntExact(LongMath.divide(timestamp.getPicosOfMicro(), PICOSECONDS_PER_NANOSECOND, UNNECESSARY));
+ }
+
+ public static int getNanosOfMilli(LongTimestampWithTimeZone timestamp)
+ {
+ return toIntExact(LongMath.divide(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND, UNNECESSARY));
+ }
+
+ public static int compareTimestampNanosToRange(LongTimestamp timestamp)
+ {
+ return compareToRange(timestamp.getEpochMicros(), getNanosOfMicro(timestamp), MIN_NANO_EPOCH_MICROS, MIN_NANO_OF_MICRO, MAX_NANO_EPOCH_MICROS, MAX_NANO_OF_MICRO);
+ }
+
+ public static int compareTimestampTzNanosToRange(LongTimestampWithTimeZone timestamp)
+ {
+ return compareToRange(timestamp.getEpochMillis(), getNanosOfMilli(timestamp), MIN_NANO_EPOCH_MILLIS, MIN_NANO_OF_MILLI, MAX_NANO_EPOCH_MILLIS, MAX_NANO_OF_MILLI);
+ }
+
+ // Nano timestamp conversions (local timestamp without timezone)
+ public static LongTimestamp timestampFromNanos(long epochNanos)
+ {
+ long epochMicros = floorDiv(epochNanos, NANOSECONDS_PER_MICROSECOND);
+ int picosOfMicro = toIntExact(floorMod(epochNanos, NANOSECONDS_PER_MICROSECOND)) * PICOSECONDS_PER_NANOSECOND;
+ return new LongTimestamp(epochMicros, picosOfMicro);
+ }
+
+ public static long timestampToNanos(LongTimestamp timestamp)
+ {
+ long epochMicros = timestamp.getEpochMicros();
+ int nanosOfMicro = getNanosOfMicro(timestamp);
+ if (isOutOfRange(epochMicros, nanosOfMicro, MIN_NANO_EPOCH_MICROS, MIN_NANO_OF_MICRO, MAX_NANO_EPOCH_MICROS, MAX_NANO_OF_MICRO)) {
+ throw new TrinoException(NUMERIC_VALUE_OUT_OF_RANGE, "Timestamp value is outside the range supported by Iceberg nano timestamps");
+ }
+ if (epochMicros == MIN_NANO_EPOCH_MICROS) {
+ return Long.MIN_VALUE + (nanosOfMicro - MIN_NANO_OF_MICRO);
+ }
+ return (epochMicros * NANOSECONDS_PER_MICROSECOND) + nanosOfMicro;
+ }
+
+ // Nano timestamp with timezone conversions (instant, stored as UTC)
+ public static LongTimestampWithTimeZone timestampTzFromNanos(long epochNanos)
+ {
+ long epochMillis = floorDiv(epochNanos, NANOSECONDS_PER_MILLISECOND);
+ int picosOfMilli = toIntExact(floorMod(epochNanos, NANOSECONDS_PER_MILLISECOND)) * PICOSECONDS_PER_NANOSECOND;
+ return LongTimestampWithTimeZone.fromEpochMillisAndFraction(epochMillis, picosOfMilli, UTC_KEY);
+ }
+
+ public static long timestampTzToNanos(LongTimestampWithTimeZone timestamp)
+ {
+ long epochMillis = timestamp.getEpochMillis();
+ int nanosOfMilli = getNanosOfMilli(timestamp);
+ if (isOutOfRange(epochMillis, nanosOfMilli, MIN_NANO_EPOCH_MILLIS, MIN_NANO_OF_MILLI, MAX_NANO_EPOCH_MILLIS, MAX_NANO_OF_MILLI)) {
+ throw new TrinoException(NUMERIC_VALUE_OUT_OF_RANGE, "Timestamp value is outside the range supported by Iceberg nano timestamps");
+ }
+ if (epochMillis == MIN_NANO_EPOCH_MILLIS) {
+ return Long.MIN_VALUE + (nanosOfMilli - MIN_NANO_OF_MILLI);
+ }
+ return (epochMillis * NANOSECONDS_PER_MILLISECOND) + nanosOfMilli;
+ }
+
+ private static boolean isOutOfRange(long epoch, int nanosFraction, long minEpoch, int minNanosFraction, long maxEpoch, int maxNanosFraction)
+ {
+ return compareToRange(epoch, nanosFraction, minEpoch, minNanosFraction, maxEpoch, maxNanosFraction) != 0;
+ }
+
+ private static int compareToRange(long epoch, int nanosFraction, long minEpoch, int minNanosFraction, long maxEpoch, int maxNanosFraction)
+ {
+ return epoch < minEpoch ||
+ (epoch == minEpoch && nanosFraction < minNanosFraction) ? -1 :
+ epoch > maxEpoch ||
+ (epoch == maxEpoch && nanosFraction > maxNanosFraction) ? 1 :
+ 0;
+ }
}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
index f93d892e755e..fdc930664841 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
@@ -86,10 +86,12 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.math.BigDecimal;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Instant;
+import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
@@ -168,6 +170,7 @@
import static io.trino.testing.assertions.Assert.assertEventually;
import static java.lang.String.format;
import static java.lang.String.join;
+import static java.math.RoundingMode.HALF_UP;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.time.ZoneOffset.UTC;
import static java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME;
@@ -193,6 +196,8 @@
public abstract class BaseIcebergConnectorTest
extends BaseConnectorTest
{
+ private static final DateTimeFormatter SQL_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd HH:mm:ss");
+ private static final String ICEBERG_TIMESTAMP_PRECISION_FAILURE = "Timestamp precision \\(3\\) not supported for Iceberg. Use \"timestamp\\(6\\)\"(?: or \"timestamp\\(9\\)\")? instead";
private static final Pattern WITH_CLAUSE_EXTRACTOR = Pattern.compile(".*(WITH\\s*\\([^)]*\\))\\s*$", Pattern.DOTALL);
protected final IcebergFileFormat format;
@@ -3412,6 +3417,158 @@ else if (format == AVRO) {
assertUpdate("DROP TABLE test_year_transform_timestamptz");
}
+ @Test
+ public void testYearTransformTimestampNano()
+ {
+ assertUpdate("CREATE TABLE test_year_transform_ts_nano (d TIMESTAMP(9), b INTEGER) WITH (format_version = 3, partitioning = ARRAY['year(d)'])");
+
+ String values = "VALUES " +
+ "(NULL, 101)," +
+ "(TIMESTAMP '2020-01-15 12:30:45.123456789', 1)," +
+ "(TIMESTAMP '2020-06-30 23:59:59.999999999', 2)," +
+ "(TIMESTAMP '2021-01-01 00:00:00.000000001', 3)," +
+ "(TIMESTAMP '2021-12-31 23:59:59.999999999', 4)";
+ assertUpdate("INSERT INTO test_year_transform_ts_nano " + values, 5);
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano"))
+ .matches(values);
+
+ // Verify partition structure
+ assertThat(query("SELECT partition.d_year, record_count FROM \"test_year_transform_ts_nano$partitions\" ORDER BY d_year NULLS FIRST"))
+ .skippingTypesCheck()
+ .matches("VALUES (NULL, BIGINT '1'), (50, BIGINT '2'), (51, BIGINT '2')");
+
+ // Verify predicate pushdown
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano WHERE d IS NOT NULL"))
+ .isFullyPushedDown();
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano WHERE d IS NULL"))
+ .isFullyPushedDown();
+ // Verify predicates work without overflow (key fix for nano timestamps)
+ assertThat(query("SELECT b FROM test_year_transform_ts_nano WHERE d >= DATE '2021-01-01' ORDER BY b"))
+ .matches("VALUES 3, 4");
+ assertThat(query("SELECT b FROM test_year_transform_ts_nano WHERE d >= TIMESTAMP '2021-01-01 00:00:00.000000000' ORDER BY b"))
+ .matches("VALUES 3, 4");
+ assertThat(query("SELECT b FROM test_year_transform_ts_nano WHERE d >= TIMESTAMP '2021-01-01 00:00:00.000000001' ORDER BY b"))
+ .matches("VALUES 3, 4");
+
+ // year()
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano WHERE year(d) = 2020"))
+ .isFullyPushedDown();
+
+ // date_trunc
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano WHERE date_trunc('year', d) = DATE '2020-01-01'"))
+ .isFullyPushedDown();
+
+ assertUpdate("DROP TABLE test_year_transform_ts_nano");
+ }
+
+ @Test
+ public void testYearTransformTimestampNanoWithTimeZone()
+ {
+ assertUpdate("CREATE TABLE test_year_transform_ts_nano_tz (d TIMESTAMP(9) WITH TIME ZONE, b INTEGER) WITH (format_version = 3, partitioning = ARRAY['year(d)'])");
+
+ String values = "VALUES " +
+ "(NULL, 101)," +
+ "(TIMESTAMP '2020-01-15 12:30:45.123456789 UTC', 1)," +
+ "(TIMESTAMP '2020-06-30 23:59:59.999999999 UTC', 2)," +
+ "(TIMESTAMP '2021-01-01 00:00:00.000000001 UTC', 3)," +
+ "(TIMESTAMP '2021-12-31 23:59:59.999999999 UTC', 4)";
+ assertUpdate("INSERT INTO test_year_transform_ts_nano_tz " + values, 5);
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano_tz"))
+ .matches(values);
+
+ // Verify partition structure
+ assertThat(query("SELECT partition.d_year, record_count FROM \"test_year_transform_ts_nano_tz$partitions\" ORDER BY d_year NULLS FIRST"))
+ .skippingTypesCheck()
+ .matches("VALUES (NULL, BIGINT '1'), (50, BIGINT '2'), (51, BIGINT '2')");
+
+ // Verify predicate pushdown
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano_tz WHERE d IS NOT NULL"))
+ .isFullyPushedDown();
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano_tz WHERE d IS NULL"))
+ .isFullyPushedDown();
+ // Verify predicates work without overflow (key fix for nano timestamps)
+ assertThat(query("SELECT b FROM test_year_transform_ts_nano_tz WHERE d >= TIMESTAMP '2021-01-01 00:00:00.000000000 UTC' ORDER BY b"))
+ .matches("VALUES 3, 4");
+ assertThat(query("SELECT b FROM test_year_transform_ts_nano_tz WHERE d >= TIMESTAMP '2021-01-01 00:00:00.000000001 UTC' ORDER BY b"))
+ .matches("VALUES 3, 4");
+
+ // year()
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano_tz WHERE year(d) = 2020"))
+ .isFullyPushedDown();
+
+ // date_trunc
+ assertThat(query("SELECT * FROM test_year_transform_ts_nano_tz WHERE date_trunc('year', d) = TIMESTAMP '2020-01-01 00:00:00.000000 UTC'"))
+ .isFullyPushedDown();
+
+ assertUpdate("DROP TABLE test_year_transform_ts_nano_tz");
+ }
+
+ @Test
+ public void testHourTransformTimestampNano()
+ {
+ assertUpdate("CREATE TABLE test_hour_transform_ts_nano (d TIMESTAMP(9), b INTEGER) WITH (format_version = 3, partitioning = ARRAY['hour(d)'])");
+
+ String values = "VALUES " +
+ "(NULL, 101)," +
+ "(TIMESTAMP '2024-01-15 10:00:00.000000001', 1)," +
+ "(TIMESTAMP '2024-01-15 10:59:59.999999999', 2)," +
+ "(TIMESTAMP '2024-01-15 11:00:00.000000001', 3)," +
+ "(TIMESTAMP '2024-01-15 11:30:45.123456789', 4)";
+ assertUpdate("INSERT INTO test_hour_transform_ts_nano " + values, 5);
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano"))
+ .matches(values);
+
+ // Verify predicate pushdown
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano WHERE d IS NOT NULL"))
+ .isFullyPushedDown();
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano WHERE d IS NULL"))
+ .isFullyPushedDown();
+ // Verify predicates work without overflow (key fix for nano timestamps)
+ assertThat(query("SELECT b FROM test_hour_transform_ts_nano WHERE d >= TIMESTAMP '2024-01-15 11:00:00.000000000' ORDER BY b"))
+ .matches("VALUES 3, 4");
+ assertThat(query("SELECT b FROM test_hour_transform_ts_nano WHERE d >= TIMESTAMP '2024-01-15 11:00:00.000000001' ORDER BY b"))
+ .matches("VALUES 3, 4");
+
+ // date_trunc
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano WHERE date_trunc('hour', d) = TIMESTAMP '2024-01-15 10:00:00'"))
+ .isFullyPushedDown();
+
+ assertUpdate("DROP TABLE test_hour_transform_ts_nano");
+ }
+
+ @Test
+ public void testHourTransformTimestampNanoWithTimeZone()
+ {
+ assertUpdate("CREATE TABLE test_hour_transform_ts_nano_tz (d TIMESTAMP(9) WITH TIME ZONE, b INTEGER) WITH (format_version = 3, partitioning = ARRAY['hour(d)'])");
+
+ String values = "VALUES " +
+ "(NULL, 101)," +
+ "(TIMESTAMP '2024-01-15 10:00:00.000000001 UTC', 1)," +
+ "(TIMESTAMP '2024-01-15 10:59:59.999999999 UTC', 2)," +
+ "(TIMESTAMP '2024-01-15 11:00:00.000000001 UTC', 3)," +
+ "(TIMESTAMP '2024-01-15 11:30:45.123456789 UTC', 4)";
+ assertUpdate("INSERT INTO test_hour_transform_ts_nano_tz " + values, 5);
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano_tz"))
+ .matches(values);
+
+ // Verify predicate pushdown
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano_tz WHERE d IS NOT NULL"))
+ .isFullyPushedDown();
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano_tz WHERE d IS NULL"))
+ .isFullyPushedDown();
+ // Verify predicates work without overflow (key fix for nano timestamps)
+ assertThat(query("SELECT b FROM test_hour_transform_ts_nano_tz WHERE d >= TIMESTAMP '2024-01-15 11:00:00.000000000 UTC' ORDER BY b"))
+ .matches("VALUES 3, 4");
+ assertThat(query("SELECT b FROM test_hour_transform_ts_nano_tz WHERE d >= TIMESTAMP '2024-01-15 11:00:00.000000001 UTC' ORDER BY b"))
+ .matches("VALUES 3, 4");
+
+ // date_trunc
+ assertThat(query("SELECT * FROM test_hour_transform_ts_nano_tz WHERE date_trunc('hour', d) = TIMESTAMP '2024-01-15 10:00:00.000000 UTC'"))
+ .isFullyPushedDown();
+
+ assertUpdate("DROP TABLE test_hour_transform_ts_nano_tz");
+ }
+
@Test
public void testTruncateTextTransform()
{
@@ -8976,9 +9133,45 @@ private List typeCoercionOnCreateTableAsSelectProvider()
protected Optional filterTypeCoercionOnCreateTableAsSelectProvider(TypeCoercionTestSetup setup)
{
+ if (formatVersion >= 3 &&
+ setup.newColumnType().equals("timestamp(6)") &&
+ setup.sourceValueLiteral().startsWith("TIMESTAMP '")) {
+ String timestampLiteral = setup.sourceValueLiteral().substring("TIMESTAMP '".length(), setup.sourceValueLiteral().length() - 1);
+ int fractionSeparator = timestampLiteral.indexOf('.');
+ if (fractionSeparator >= 0) {
+ String fractionalSeconds = timestampLiteral.substring(fractionSeparator + 1);
+ if (fractionalSeconds.length() > 6) {
+ return Optional.of(new TypeCoercionTestSetup(
+ setup.sourceValueLiteral(),
+ "timestamp(9)",
+ toTimestampNanosLiteral(timestampLiteral)));
+ }
+ }
+ }
+
return Optional.of(setup);
}
+ private static String toTimestampNanosLiteral(String timestampLiteral)
+ {
+ int fractionSeparator = timestampLiteral.indexOf('.');
+ String wholeSeconds = timestampLiteral.substring(0, fractionSeparator);
+ String fractionalSeconds = timestampLiteral.substring(fractionSeparator + 1);
+
+ long nanos = new BigDecimal("0." + fractionalSeconds)
+ .movePointRight(9)
+ .setScale(0, HALF_UP)
+ .longValueExact();
+
+ LocalDateTime timestamp = LocalDateTime.parse(wholeSeconds, SQL_TIMESTAMP_FORMATTER);
+ if (nanos == 1_000_000_000L) {
+ timestamp = timestamp.plusSeconds(1);
+ nanos = 0;
+ }
+
+ return "TIMESTAMP '%s.%09d'".formatted(timestamp.format(SQL_TIMESTAMP_FORMATTER), nanos);
+ }
+
private List typeCoercionOnCreateTableAsSelectData()
{
return ImmutableList.builder()
@@ -9009,6 +9202,16 @@ private List typeCoercionOnCreateTableAsSelectData()
.add(new TypeCoercionTestSetup("TIMESTAMP '1969-12-31 23:59:59.9999995'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.000000'"))
.add(new TypeCoercionTestSetup("TIMESTAMP '1969-12-31 23:59:59.999999499999'", "timestamp(6)", "TIMESTAMP '1969-12-31 23:59:59.999999'"))
.add(new TypeCoercionTestSetup("TIMESTAMP '1969-12-31 23:59:59.9999994'", "timestamp(6)", "TIMESTAMP '1969-12-31 23:59:59.999999'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1970-01-01 00:00:00.1234567891'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.123457'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1970-01-01 00:00:00.12345678949'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.123457'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1970-01-01 00:00:00.123456789499'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.123457'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1970-01-01 00:00:00.1234567895'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.123457'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1970-01-01 00:00:00.111222333444'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.111222'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1970-01-01 00:00:00.9999999995'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:01.000000'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1970-01-01 23:59:59.9999999995'", "timestamp(6)", "TIMESTAMP '1970-01-02 00:00:00.000000'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1969-12-31 23:59:59.9999999995'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.000000'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1969-12-31 23:59:59.999999999499'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.000000'"))
+ .add(new TypeCoercionTestSetup("TIMESTAMP '1969-12-31 23:59:59.9999999994'", "timestamp(6)", "TIMESTAMP '1970-01-01 00:00:00.000000'"))
.add(new TypeCoercionTestSetup("TIME '00:00:00'", "time(6)", "TIME '00:00:00.000000'"))
.add(new TypeCoercionTestSetup("TIME '00:00:00.9'", "time(6)", "TIME '00:00:00.900000'"))
.add(new TypeCoercionTestSetup("TIME '00:00:00.56'", "time(6)", "TIME '00:00:00.560000'"))
@@ -9080,6 +9283,9 @@ public TypeCoercionTestSetup withNewValueLiteral(String newValueLiteral)
@Test
public void testAddColumnWithTypeCoercion()
{
+ String expectedTimestampType = formatVersion >= 3 ? "timestamp(9)" : "timestamp(6)";
+ String expectedTimestampWithTimeZoneType = formatVersion >= 3 ? "timestamp(9) with time zone" : "timestamp(6) with time zone";
+
testAddColumnWithTypeCoercion("tinyint", "integer");
testAddColumnWithTypeCoercion("smallint", "integer");
@@ -9091,12 +9297,12 @@ public void testAddColumnWithTypeCoercion()
testAddColumnWithTypeCoercion("timestamp(4) with time zone", "timestamp(6) with time zone");
testAddColumnWithTypeCoercion("timestamp(5) with time zone", "timestamp(6) with time zone");
testAddColumnWithTypeCoercion("timestamp(6) with time zone", "timestamp(6) with time zone");
- testAddColumnWithTypeCoercion("timestamp(7) with time zone", "timestamp(6) with time zone");
- testAddColumnWithTypeCoercion("timestamp(8) with time zone", "timestamp(6) with time zone");
- testAddColumnWithTypeCoercion("timestamp(9) with time zone", "timestamp(6) with time zone");
- testAddColumnWithTypeCoercion("timestamp(10) with time zone", "timestamp(6) with time zone");
- testAddColumnWithTypeCoercion("timestamp(11) with time zone", "timestamp(6) with time zone");
- testAddColumnWithTypeCoercion("timestamp(12) with time zone", "timestamp(6) with time zone");
+ testAddColumnWithTypeCoercion("timestamp(7) with time zone", expectedTimestampWithTimeZoneType);
+ testAddColumnWithTypeCoercion("timestamp(8) with time zone", expectedTimestampWithTimeZoneType);
+ testAddColumnWithTypeCoercion("timestamp(9) with time zone", expectedTimestampWithTimeZoneType);
+ testAddColumnWithTypeCoercion("timestamp(10) with time zone", expectedTimestampWithTimeZoneType);
+ testAddColumnWithTypeCoercion("timestamp(11) with time zone", expectedTimestampWithTimeZoneType);
+ testAddColumnWithTypeCoercion("timestamp(12) with time zone", expectedTimestampWithTimeZoneType);
testAddColumnWithTypeCoercion("timestamp", "timestamp(6)");
testAddColumnWithTypeCoercion("timestamp(0)", "timestamp(6)");
@@ -9106,12 +9312,12 @@ public void testAddColumnWithTypeCoercion()
testAddColumnWithTypeCoercion("timestamp(4)", "timestamp(6)");
testAddColumnWithTypeCoercion("timestamp(5)", "timestamp(6)");
testAddColumnWithTypeCoercion("timestamp(6)", "timestamp(6)");
- testAddColumnWithTypeCoercion("timestamp(7)", "timestamp(6)");
- testAddColumnWithTypeCoercion("timestamp(8)", "timestamp(6)");
- testAddColumnWithTypeCoercion("timestamp(9)", "timestamp(6)");
- testAddColumnWithTypeCoercion("timestamp(10)", "timestamp(6)");
- testAddColumnWithTypeCoercion("timestamp(11)", "timestamp(6)");
- testAddColumnWithTypeCoercion("timestamp(12)", "timestamp(6)");
+ testAddColumnWithTypeCoercion("timestamp(7)", expectedTimestampType);
+ testAddColumnWithTypeCoercion("timestamp(8)", expectedTimestampType);
+ testAddColumnWithTypeCoercion("timestamp(9)", expectedTimestampType);
+ testAddColumnWithTypeCoercion("timestamp(10)", expectedTimestampType);
+ testAddColumnWithTypeCoercion("timestamp(11)", expectedTimestampType);
+ testAddColumnWithTypeCoercion("timestamp(12)", expectedTimestampType);
testAddColumnWithTypeCoercion("time", "time(6)");
testAddColumnWithTypeCoercion("time(0)", "time(6)");
@@ -9528,7 +9734,8 @@ protected Optional filterSetColumnTypesDataProvider(SetColum
protected void verifySetColumnTypeFailurePermissible(Throwable e)
{
assertThat(e).hasMessageMatching(".*(Failed to set column type: Cannot change (column type:|type from .* to )" +
- "|Time(stamp)? precision \\(3\\) not supported for Iceberg. Use \"time(stamp)?\\(6\\)\" instead" +
+ "|Time precision \\(3\\) not supported for Iceberg. Use \"time\\(6\\)\" instead" +
+ "|" + ICEBERG_TIMESTAMP_PRECISION_FAILURE +
"|Type not supported for Iceberg: (tinyint|smallint|char\\(20\\))" +
"|Cannot update map keys).*");
}
@@ -9570,7 +9777,8 @@ protected Optional filterSetFieldTypesDataProvider(SetColumn
protected void verifySetFieldTypeFailurePermissible(Throwable e)
{
assertThat(e).hasMessageMatching(".*(Failed to set field type: Cannot change (column type:|type from .* to )" +
- "|Time(stamp)? precision \\(3\\) not supported for Iceberg. Use \"time(stamp)?\\(6\\)\" instead" +
+ "|Time precision \\(3\\) not supported for Iceberg. Use \"time\\(6\\)\" instead" +
+ "|" + ICEBERG_TIMESTAMP_PRECISION_FAILURE +
"|Type not supported for Iceberg: (tinyint|smallint|char\\(20\\))" +
"|Cannot update map keys).*");
}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioOrcConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioOrcConnectorTest.java
index abcf97c4a1fd..fe2b4f298d23 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioOrcConnectorTest.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioOrcConnectorTest.java
@@ -168,12 +168,34 @@ public void testTimeType()
@Override
protected Optional filterTypeCoercionOnCreateTableAsSelectProvider(TypeCoercionTestSetup setup)
{
+ TypeCoercionTestSetup adjustedSetup = super.filterTypeCoercionOnCreateTableAsSelectProvider(setup).orElseThrow();
+
if (setup.sourceValueLiteral().equals("TIMESTAMP '1969-12-31 23:59:59.999999499999'")) {
- return Optional.of(setup.withNewValueLiteral("TIMESTAMP '1970-01-01 00:00:00.999999'"));
+ return Optional.of(adjustedSetup.withNewValueLiteral(adjustedSetup.newColumnType().equals("timestamp(9)")
+ ? "TIMESTAMP '1970-01-01 00:00:00.999999500'"
+ : "TIMESTAMP '1970-01-01 00:00:00.999999'"));
}
if (setup.sourceValueLiteral().equals("TIMESTAMP '1969-12-31 23:59:59.9999994'")) {
- return Optional.of(setup.withNewValueLiteral("TIMESTAMP '1970-01-01 00:00:00.999999'"));
+ return Optional.of(adjustedSetup.withNewValueLiteral(adjustedSetup.newColumnType().equals("timestamp(9)")
+ ? "TIMESTAMP '1970-01-01 00:00:00.999999400'"
+ : "TIMESTAMP '1970-01-01 00:00:00.999999'"));
+ }
+ if (setup.sourceValueLiteral().equals("TIMESTAMP '1969-12-31 23:59:59.9999995'") &&
+ adjustedSetup.newColumnType().equals("timestamp(9)")) {
+ return Optional.of(adjustedSetup.withNewValueLiteral("TIMESTAMP '1970-01-01 00:00:00.999999500'"));
+ }
+ if (setup.sourceValueLiteral().equals("TIMESTAMP '1969-12-31 23:59:59.999999999499'") &&
+ adjustedSetup.newColumnType().equals("timestamp(9)")) {
+ return Optional.of(adjustedSetup.withNewValueLiteral("TIMESTAMP '1970-01-01 00:00:00.999999999'"));
+ }
+ if (setup.sourceValueLiteral().equals("TIMESTAMP '1969-12-31 23:59:59.9999999994'") &&
+ adjustedSetup.newColumnType().equals("timestamp(9)")) {
+ return Optional.of(adjustedSetup.withNewValueLiteral("TIMESTAMP '1970-01-01 00:00:00.999999999'"));
+ }
+ if (setup.sourceValueLiteral().equals("TIMESTAMP '1969-12-31 23:59:59.9999999995'") &&
+ adjustedSetup.newColumnType().equals("timestamp(9)")) {
+ return Optional.of(adjustedSetup.withNewValueLiteral("TIMESTAMP '1970-01-01 00:00:00.000000000'"));
}
- return Optional.of(setup);
+ return Optional.of(adjustedSetup);
}
}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestExpressionConverter.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestExpressionConverter.java
new file mode 100644
index 000000000000..92ba8dadfffb
--- /dev/null
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestExpressionConverter.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.iceberg;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.trino.spi.predicate.Domain;
+import io.trino.spi.predicate.Range;
+import io.trino.spi.predicate.TupleDomain;
+import io.trino.spi.predicate.ValueSet;
+import io.trino.spi.type.LongTimestamp;
+import io.trino.spi.type.LongTimestampWithTimeZone;
+import org.apache.iceberg.expressions.Expression;
+import org.apache.iceberg.expressions.Expressions;
+import org.junit.jupiter.api.Test;
+
+import static io.trino.plugin.iceberg.ColumnIdentity.primitiveColumnIdentity;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampFromNanos;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampToNanos;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromMicros;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzFromNanos;
+import static io.trino.plugin.iceberg.util.Timestamps.timestampTzToNanos;
+import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
+import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_NANOS;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MICROSECOND;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
+import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
+import static java.lang.Math.floorDiv;
+import static java.lang.Math.floorMod;
+import static org.apache.iceberg.expressions.Expression.Operation.GT_EQ;
+import static org.apache.iceberg.expressions.Expression.Operation.LT_EQ;
+import static org.apache.iceberg.expressions.Expressions.alwaysFalse;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestExpressionConverter
+{
+ private static final long MIN_NANO_EPOCH_MICROS = floorDiv(Long.MIN_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final int MIN_NANO_OF_MICRO = (int) floorMod(Long.MIN_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final long MAX_NANO_EPOCH_MICROS = floorDiv(Long.MAX_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final int MAX_NANO_OF_MICRO = (int) floorMod(Long.MAX_VALUE, NANOSECONDS_PER_MICROSECOND);
+
+ private static final long MIN_NANO_EPOCH_MILLIS = floorDiv(Long.MIN_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final int MIN_NANO_OF_MILLI = (int) floorMod(Long.MIN_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final long MAX_NANO_EPOCH_MILLIS = floorDiv(Long.MAX_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final int MAX_NANO_OF_MILLI = (int) floorMod(Long.MAX_VALUE, NANOSECONDS_PER_MILLISECOND);
+
+ private static final IcebergColumnHandle TIMESTAMP_NANOS_COLUMN = IcebergColumnHandle.optional(primitiveColumnIdentity(1, "ts_nano"))
+ .columnType(TIMESTAMP_NANOS)
+ .build();
+ private static final IcebergColumnHandle TIMESTAMP_TZ_NANOS_COLUMN = IcebergColumnHandle.optional(primitiveColumnIdentity(2, "ts_tz_nano"))
+ .columnType(TIMESTAMP_TZ_NANOS)
+ .build();
+
+ @Test
+ public void testTimestampNanosOutOfRangeSingleValuesAreAlwaysFalse()
+ {
+ assertThat(toIcebergExpression(TIMESTAMP_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_NANOS, new LongTimestamp(MIN_NANO_EPOCH_MICROS - 1, 0))))
+ .isSameAs(alwaysFalse());
+ assertThat(toIcebergExpression(TIMESTAMP_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_NANOS, new LongTimestamp(MIN_NANO_EPOCH_MICROS, (MIN_NANO_OF_MICRO - 1) * PICOSECONDS_PER_NANOSECOND))))
+ .isSameAs(alwaysFalse());
+ assertThat(toIcebergExpression(TIMESTAMP_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_NANOS, new LongTimestamp(MAX_NANO_EPOCH_MICROS, (MAX_NANO_OF_MICRO + 1) * PICOSECONDS_PER_NANOSECOND))))
+ .isSameAs(alwaysFalse());
+ assertThat(toIcebergExpression(TIMESTAMP_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_NANOS, new LongTimestamp(MAX_NANO_EPOCH_MICROS + 1, 0))))
+ .isSameAs(alwaysFalse());
+ }
+
+ @Test
+ public void testTimestampNanosExactBoundaryValuesAreInRange()
+ {
+ LongTimestamp minValue = timestampFromNanos(Long.MIN_VALUE);
+ assertThat(toIcebergExpression(TIMESTAMP_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_NANOS, minValue)))
+ .hasToString(singleValueExpression("ts_nano", minValue));
+
+ LongTimestamp maxValue = timestampFromNanos(Long.MAX_VALUE);
+ assertThat(toIcebergExpression(TIMESTAMP_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_NANOS, maxValue)))
+ .hasToString(singleValueExpression("ts_nano", maxValue));
+ }
+
+ @Test
+ public void testTimestampNanosOutOfRangeBoundsAreClipped()
+ {
+ LongTimestamp upperBound = new LongTimestamp(123_456_789L, 987_000);
+ assertThat(toIcebergExpression(
+ TIMESTAMP_NANOS_COLUMN,
+ Domain.create(ValueSet.ofRanges(Range.range(TIMESTAMP_NANOS, new LongTimestamp(MIN_NANO_EPOCH_MICROS - 1, 0), true, upperBound, true)), false)))
+ .hasToString(predicate(LT_EQ, "ts_nano", upperBound));
+
+ LongTimestamp lowerBound = new LongTimestamp(123_456_789L, 654_000);
+ assertThat(toIcebergExpression(
+ TIMESTAMP_NANOS_COLUMN,
+ Domain.create(ValueSet.ofRanges(Range.range(TIMESTAMP_NANOS, lowerBound, true, new LongTimestamp(MAX_NANO_EPOCH_MICROS + 1, 0), true)), false)))
+ .hasToString(predicate(GT_EQ, "ts_nano", lowerBound));
+ }
+
+ @Test
+ public void testTimestampTzNanosOutOfRangeSingleValuesAreAlwaysFalse()
+ {
+ assertThat(toIcebergExpression(TIMESTAMP_TZ_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_TZ_NANOS, timestampTzFromEpochMicros(MIN_NANO_EPOCH_MICROS - 1))))
+ .isSameAs(alwaysFalse());
+ assertThat(toIcebergExpression(TIMESTAMP_TZ_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_TZ_NANOS, LongTimestampWithTimeZone.fromEpochMillisAndFraction(MIN_NANO_EPOCH_MILLIS, (MIN_NANO_OF_MILLI - 1) * PICOSECONDS_PER_NANOSECOND, UTC_KEY))))
+ .isSameAs(alwaysFalse());
+ assertThat(toIcebergExpression(TIMESTAMP_TZ_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_TZ_NANOS, LongTimestampWithTimeZone.fromEpochMillisAndFraction(MAX_NANO_EPOCH_MILLIS, (MAX_NANO_OF_MILLI + 1) * PICOSECONDS_PER_NANOSECOND, UTC_KEY))))
+ .isSameAs(alwaysFalse());
+ assertThat(toIcebergExpression(TIMESTAMP_TZ_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_TZ_NANOS, timestampTzFromEpochMicros(MAX_NANO_EPOCH_MICROS + 1))))
+ .isSameAs(alwaysFalse());
+ }
+
+ @Test
+ public void testTimestampTzNanosExactBoundaryValuesAreInRange()
+ {
+ LongTimestampWithTimeZone minValue = timestampTzFromNanos(Long.MIN_VALUE);
+ assertThat(toIcebergExpression(TIMESTAMP_TZ_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_TZ_NANOS, minValue)))
+ .hasToString(singleValueExpression("ts_tz_nano", minValue));
+
+ LongTimestampWithTimeZone maxValue = timestampTzFromNanos(Long.MAX_VALUE);
+ assertThat(toIcebergExpression(TIMESTAMP_TZ_NANOS_COLUMN, Domain.singleValue(TIMESTAMP_TZ_NANOS, maxValue)))
+ .hasToString(singleValueExpression("ts_tz_nano", maxValue));
+ }
+
+ @Test
+ public void testTimestampTzNanosOutOfRangeBoundsAreClipped()
+ {
+ LongTimestampWithTimeZone upperBound = LongTimestampWithTimeZone.fromEpochMillisAndFraction(123_456L, 789_000, UTC_KEY);
+ assertThat(toIcebergExpression(
+ TIMESTAMP_TZ_NANOS_COLUMN,
+ Domain.create(ValueSet.ofRanges(Range.range(TIMESTAMP_TZ_NANOS, timestampTzFromEpochMicros(MIN_NANO_EPOCH_MICROS - 1), true, upperBound, true)), false)))
+ .hasToString(predicate(LT_EQ, "ts_tz_nano", upperBound));
+
+ LongTimestampWithTimeZone lowerBound = LongTimestampWithTimeZone.fromEpochMillisAndFraction(123_456L, 654_000, UTC_KEY);
+ assertThat(toIcebergExpression(
+ TIMESTAMP_TZ_NANOS_COLUMN,
+ Domain.create(ValueSet.ofRanges(Range.range(TIMESTAMP_TZ_NANOS, lowerBound, true, timestampTzFromEpochMicros(MAX_NANO_EPOCH_MICROS + 1), true)), false)))
+ .hasToString(predicate(GT_EQ, "ts_tz_nano", lowerBound));
+ }
+
+ private static Expression toIcebergExpression(IcebergColumnHandle columnHandle, Domain domain)
+ {
+ return ExpressionConverter.toIcebergExpression(TupleDomain.withColumnDomains(ImmutableMap.of(columnHandle, domain)));
+ }
+
+ private static LongTimestampWithTimeZone timestampTzFromEpochMicros(long epochMicros)
+ {
+ return timestampTzFromMicros(epochMicros);
+ }
+
+ private static String predicate(Expression.Operation operation, String columnName, LongTimestamp timestamp)
+ {
+ return Expressions.predicate(operation, columnName, Expressions.nanos(timestampToNanos(timestamp))).toString();
+ }
+
+ private static String predicate(Expression.Operation operation, String columnName, LongTimestampWithTimeZone timestamp)
+ {
+ return Expressions.predicate(operation, columnName, Expressions.nanos(timestampTzToNanos(timestamp))).toString();
+ }
+
+ private static String singleValueExpression(String columnName, LongTimestamp timestamp)
+ {
+ return Expressions.in(columnName, ImmutableList.of(timestampToNanos(timestamp))).toString();
+ }
+
+ private static String singleValueExpression(String columnName, LongTimestampWithTimeZone timestamp)
+ {
+ return Expressions.in(columnName, ImmutableList.of(timestampTzToNanos(timestamp))).toString();
+ }
+}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV3.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV3.java
index 156f220bff1c..4fa637f08d63 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV3.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV3.java
@@ -57,6 +57,9 @@
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.time.LocalDateTime;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@@ -74,6 +77,7 @@
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.testing.TestingSession.testSessionBuilder;
+import static org.apache.iceberg.TableProperties.FORMAT_VERSION;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.keycloak.util.JsonSerialization.mapper;
@@ -82,10 +86,12 @@ public class TestIcebergV3
extends AbstractTestQueryFramework
{
private static final List ALL_FILE_FORMATS = List.of("PARQUET", "ORC", "AVRO");
+ private static final HadoopTables HADOOP_TABLES = new HadoopTables(new Configuration(false));
private HiveMetastore metastore;
private TrinoFileSystemFactory fileSystemFactory;
private TrinoCatalog catalog;
+ private Path dataDirectory;
@Override
protected QueryRunner createQueryRunner()
@@ -101,7 +107,7 @@ protected QueryRunner createQueryRunner()
queryRunner.installPlugin(new TpchPlugin());
queryRunner.createCatalog("tpch", "tpch");
- Path dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data");
+ dataDirectory = queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data");
dataDirectory.toFile().mkdirs();
queryRunner.installPlugin(new TestingIcebergPlugin(dataDirectory));
@@ -454,12 +460,206 @@ SELECT count(*) = count_if(
assertUpdate("DROP TABLE " + tableName);
}
+ @Test
+ void testTimestampNano()
+ throws IOException
+ {
+ String tableName = "test_timestamp_nano_" + randomNameSuffix();
+ Path tableLocation = dataDirectory.resolve(tableName);
+
+ // Create table with timestamp_nano column using Iceberg API
+ Schema schema = new Schema(
+ Types.NestedField.required(1, "id", Types.IntegerType.get()),
+ Types.NestedField.optional(2, "ts_nano", Types.TimestampNanoType.withoutZone()));
+
+ Table table = HADOOP_TABLES.create(
+ schema,
+ PartitionSpec.unpartitioned(),
+ SortOrder.unsorted(),
+ ImmutableMap.of(FORMAT_VERSION, "3"),
+ tableLocation.toString());
+
+ // Write data with nanosecond precision
+ String dataPath = tableLocation.resolve("data")
+ .resolve("data-" + UUID.randomUUID() + ".parquet")
+ .toString();
+ try (DataWriter writer = Parquet.writeData(table.io().newOutputFile(dataPath))
+ .forTable(table)
+ .withSpec(table.spec())
+ .withPartition(null)
+ .createWriterFunc(GenericParquetWriter::create)
+ .build()) {
+ Record record = GenericRecord.create(schema);
+ record.setField("id", 1);
+ // 2024-01-15 12:30:45.123456789
+ record.setField("ts_nano", LocalDateTime.of(2024, 1, 15, 12, 30, 45, 123456789));
+ writer.write(record);
+ writer.close();
+
+ table.newFastAppend()
+ .appendFile(writer.toDataFile())
+ .commit();
+ }
+
+ // Register table in Trino and verify
+ String registered = "registered_timestamp_nano_" + randomNameSuffix();
+ assertUpdate("CALL system.register_table(CURRENT_SCHEMA, '%s', '%s')".formatted(registered, tableLocation));
+
+ assertThat(query("SELECT id, ts_nano FROM " + registered))
+ .matches("VALUES (1, TIMESTAMP '2024-01-15 12:30:45.123456789')");
+
+ assertUpdate("DROP TABLE " + registered);
+ }
+
+ @Test
+ void testTrinoTimestampNano()
+ {
+ for (String format : List.of("PARQUET", "ORC", "AVRO")) {
+ String tableName = "test_trino_timestamp_nano_" + randomNameSuffix();
+ try {
+ assertUpdate("CREATE TABLE " + tableName + " (id INTEGER, ts_nano TIMESTAMP(9)) WITH (format = '" + format + "')");
+
+ // Insert with full nanosecond precision
+ assertUpdate("INSERT INTO " + tableName + " VALUES (1, TIMESTAMP '2024-01-15 12:30:45.123456789')", 1);
+ assertUpdate("INSERT INTO " + tableName + " VALUES (2, TIMESTAMP '2024-06-30 23:59:59.999999999')", 1);
+ assertUpdate("INSERT INTO " + tableName + " VALUES (3, NULL)", 1);
+
+ // Verify data is read back correctly with nanosecond precision preserved
+ assertThat(query("SELECT id, ts_nano FROM " + tableName + " ORDER BY id"))
+ .matches("VALUES " +
+ "(INTEGER '1', TIMESTAMP '2024-01-15 12:30:45.123456789'), " +
+ "(INTEGER '2', TIMESTAMP '2024-06-30 23:59:59.999999999'), " +
+ "(INTEGER '3', NULL)");
+
+ // Test that nanosecond precision differences are preserved
+ assertUpdate("INSERT INTO " + tableName + " VALUES (4, TIMESTAMP '2024-01-15 12:30:45.123456780')", 1);
+
+ // Verify all rows including the one with different nanosecond precision
+ assertThat(query("SELECT id, ts_nano FROM " + tableName + " ORDER BY id"))
+ .matches("VALUES " +
+ "(INTEGER '1', TIMESTAMP '2024-01-15 12:30:45.123456789'), " +
+ "(INTEGER '2', TIMESTAMP '2024-06-30 23:59:59.999999999'), " +
+ "(INTEGER '3', NULL), " +
+ "(INTEGER '4', TIMESTAMP '2024-01-15 12:30:45.123456780')");
+ }
+ finally {
+ assertUpdate("DROP TABLE IF EXISTS " + tableName);
+ }
+ }
+ }
+
+ @Test
+ void testTimestampNanoPartition()
+ {
+ try (TestTable table = newTrinoTable("test_nano_partition", "(id INTEGER, x TIMESTAMP(9)) WITH (partitioning = ARRAY['x'])")) {
+ assertUpdate("INSERT INTO " + table.getName() + " VALUES (1, TIMESTAMP '2022-07-26 12:13:14.123456789')", 1);
+
+ assertThat(query("SELECT x FROM " + table.getName()))
+ .matches("VALUES TIMESTAMP '2022-07-26 12:13:14.123456789'");
+ assertThat(query("SELECT 1 FROM " + table.getName() + " WHERE x = TIMESTAMP '2022-07-26 12:13:14.123456789'"))
+ .matches("VALUES 1");
+ }
+ }
+
+ @Test
+ void testTimestampNanoWithTimeZone()
+ throws IOException
+ {
+ String tableName = "test_timestamp_nano_tz_" + randomNameSuffix();
+ Path tableLocation = dataDirectory.resolve(tableName);
+
+ // Create table with timestamp_nano (with UTC adjustment) column using Iceberg API
+ Schema schema = new Schema(
+ Types.NestedField.required(1, "id", Types.IntegerType.get()),
+ Types.NestedField.optional(2, "ts_nano_tz", Types.TimestampNanoType.withZone()));
+
+ Table table = HADOOP_TABLES.create(
+ schema,
+ PartitionSpec.unpartitioned(),
+ SortOrder.unsorted(),
+ ImmutableMap.of(FORMAT_VERSION, "3"),
+ tableLocation.toString());
+
+ // Write data with nanosecond precision
+ String dataPath = tableLocation.resolve("data")
+ .resolve("data-" + UUID.randomUUID() + ".parquet")
+ .toString();
+ try (DataWriter writer = Parquet.writeData(table.io().newOutputFile(dataPath))
+ .forTable(table)
+ .withSpec(table.spec())
+ .withPartition(null)
+ .createWriterFunc(GenericParquetWriter::create)
+ .build()) {
+ Record record = GenericRecord.create(schema);
+ record.setField("id", 1);
+ // 2024-01-15 12:30:45.123456789 UTC
+ record.setField("ts_nano_tz", OffsetDateTime.of(2024, 1, 15, 12, 30, 45, 123456789, ZoneOffset.UTC));
+ writer.write(record);
+ writer.close();
+
+ table.newFastAppend()
+ .appendFile(writer.toDataFile())
+ .commit();
+ }
+
+ // Register table in Trino and verify
+ String registered = "registered_timestamp_nano_tz_" + randomNameSuffix();
+ assertUpdate("CALL system.register_table(CURRENT_SCHEMA, '%s', '%s')".formatted(registered, tableLocation));
+
+ assertThat(query("SELECT id, ts_nano_tz FROM " + registered))
+ .matches("VALUES (1, TIMESTAMP '2024-01-15 12:30:45.123456789 UTC')");
+
+ assertUpdate("DROP TABLE " + registered);
+ }
+
+ @Test
+ void testTrinoTimestampNanoWithTimeZone()
+ {
+ for (String format : List.of("PARQUET", "ORC", "AVRO")) {
+ String tableName = "test_trino_timestamp_nano_tz_" + randomNameSuffix();
+ try {
+ assertUpdate("CREATE TABLE " + tableName + " (id INTEGER, ts_nano_tz TIMESTAMP(9) WITH TIME ZONE) WITH (format = '" + format + "')");
+
+ // Insert with full nanosecond precision
+ assertUpdate("INSERT INTO " + tableName + " VALUES (1, TIMESTAMP '2024-01-15 12:30:45.123456789 UTC')", 1);
+ assertUpdate("INSERT INTO " + tableName + " VALUES (2, TIMESTAMP '2024-06-30 23:59:59.999999999 UTC')", 1);
+ assertUpdate("INSERT INTO " + tableName + " VALUES (3, NULL)", 1);
+ // Insert with non-UTC timezone - should be normalized to UTC when read back
+ assertUpdate("INSERT INTO " + tableName + " VALUES (4, TIMESTAMP '2024-01-15 18:00:45.123456789 +05:30')", 1);
+
+ // Verify data is read back correctly with nanosecond precision preserved
+ // Note: row 4 was inserted as +05:30 but reads back as UTC
+ assertThat(query("SELECT id, ts_nano_tz FROM " + tableName + " ORDER BY id"))
+ .matches("VALUES " +
+ "(INTEGER '1', TIMESTAMP '2024-01-15 12:30:45.123456789 UTC'), " +
+ "(INTEGER '2', TIMESTAMP '2024-06-30 23:59:59.999999999 UTC'), " +
+ "(INTEGER '3', NULL), " +
+ "(INTEGER '4', TIMESTAMP '2024-01-15 12:30:45.123456789 UTC')");
+
+ // Test that nanosecond precision differences are preserved
+ assertUpdate("INSERT INTO " + tableName + " VALUES (5, TIMESTAMP '2024-01-15 12:30:45.123456780 UTC')", 1);
+
+ // Verify all rows including the one with different nanosecond precision
+ assertThat(query("SELECT id, ts_nano_tz FROM " + tableName + " ORDER BY id"))
+ .matches("VALUES " +
+ "(INTEGER '1', TIMESTAMP '2024-01-15 12:30:45.123456789 UTC'), " +
+ "(INTEGER '2', TIMESTAMP '2024-06-30 23:59:59.999999999 UTC'), " +
+ "(INTEGER '3', NULL), " +
+ "(INTEGER '4', TIMESTAMP '2024-01-15 12:30:45.123456789 UTC'), " +
+ "(INTEGER '5', TIMESTAMP '2024-01-15 12:30:45.123456780 UTC')");
+ }
+ finally {
+ assertUpdate("DROP TABLE IF EXISTS " + tableName);
+ }
+ }
+ }
+
@Test
void testV3RejectsAddFilesProcedure()
{
String tableName = "add_files_target_" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableName + " (x integer) WITH (format = 'ORC', format_version = 3)");
+ assertUpdate("CREATE TABLE " + tableName + " (x integer) WITH (format = 'ORC')");
assertThat(query("ALTER TABLE " + tableName + " EXECUTE add_files(location => 'file:///tmp', format => 'ORC')"))
.failure()
@@ -474,7 +674,7 @@ void testV3RejectsAddFilesFromTableProcedure()
{
String tableName = "add_files_from_table_target_" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableName + " (x integer) WITH (format = 'ORC', format_version = 3)");
+ assertUpdate("CREATE TABLE " + tableName + " (x integer) WITH (format = 'ORC')");
assertThat(query("ALTER TABLE " + tableName + " EXECUTE add_files_from_table(schema_name => 'tpch', table_name => 'non_existent')"))
.failure()
@@ -489,7 +689,7 @@ void testV3InitialDefault()
{
// Create a data file with only 'id' column
String tableName = "v3_defaults_" + randomNameSuffix();
- assertUpdate("CREATE TABLE " + tableName + " (id INTEGER) WITH (format_version = 3, format = 'ORC')");
+ assertUpdate("CREATE TABLE " + tableName + " (id INTEGER) WITH (format = 'ORC')");
assertUpdate("INSERT INTO " + tableName + " VALUES 1", 1);
// Add a value column (missing from file, has initial-default)
@@ -1125,7 +1325,7 @@ void testV3RejectsEncryptionKeysInMetadata()
Path hadoopTableLocation = Path.of(tempTable.location()).resolveSibling(hadoopTableName);
// Use HadoopTables to prevent stale caches from direct metadata.json modification
- Table icebergTable = new HadoopTables(new Configuration(false)).create(
+ Table icebergTable = HADOOP_TABLES.create(
new Schema(Types.NestedField.optional(1, "id", Types.IntegerType.get())),
PartitionSpec.unpartitioned(),
SortOrder.unsorted(),
@@ -1428,4 +1628,78 @@ private void injectEncryptionKeysIntoMetadataJson(Path tableLocation, String key
Path crc = metadataFile.resolveSibling("." + metadataFile.getFileName() + ".crc");
Files.deleteIfExists(crc);
}
+
+ @Test
+ void testOrcTimestampNanoFiltering()
+ {
+ String tableName = "test_orc_timestamp_nano_filtering_" + randomNameSuffix();
+ try {
+ assertUpdate("CREATE TABLE " + tableName + " (d TIMESTAMP(9), b INTEGER) WITH (format = 'ORC')");
+
+ // Insert data with nanosecond precision
+ assertUpdate("INSERT INTO " + tableName + " VALUES " +
+ "(TIMESTAMP '2024-01-15 10:00:00.000000001', 1)," +
+ "(TIMESTAMP '2024-01-15 10:59:59.999999999', 2)," +
+ "(TIMESTAMP '2024-01-15 11:00:00.000000001', 3)," +
+ "(TIMESTAMP '2024-01-15 11:30:45.123456789', 4)", 4);
+
+ // Debug: Check what's actually in the table
+ assertThat(query("SELECT d, b FROM " + tableName + " ORDER BY b"))
+ .matches("VALUES " +
+ "(TIMESTAMP '2024-01-15 10:00:00.000000001', INTEGER '1'), " +
+ "(TIMESTAMP '2024-01-15 10:59:59.999999999', INTEGER '2'), " +
+ "(TIMESTAMP '2024-01-15 11:00:00.000000001', INTEGER '3'), " +
+ "(TIMESTAMP '2024-01-15 11:30:45.123456789', INTEGER '4')");
+
+ // Test filter at hour boundary - this is the failing case
+ assertThat(query("SELECT b FROM " + tableName + " WHERE d >= TIMESTAMP '2024-01-15 11:00:00.000000000' ORDER BY b"))
+ .matches("VALUES INTEGER '3', INTEGER '4'");
+
+ // Test filter with slightly later timestamp
+ assertThat(query("SELECT b FROM " + tableName + " WHERE d >= TIMESTAMP '2024-01-15 11:00:00.000000001' ORDER BY b"))
+ .matches("VALUES INTEGER '3', INTEGER '4'");
+
+ // Test filter that should return all rows
+ assertThat(query("SELECT b FROM " + tableName + " WHERE d >= TIMESTAMP '2024-01-15 10:00:00.000000000' ORDER BY b"))
+ .matches("VALUES INTEGER '1', INTEGER '2', INTEGER '3', INTEGER '4'");
+
+ // Test filter that should return first two rows
+ assertThat(query("SELECT b FROM " + tableName + " WHERE d < TIMESTAMP '2024-01-15 11:00:00.000000000' ORDER BY b"))
+ .matches("VALUES INTEGER '1', INTEGER '2'");
+ }
+ finally {
+ assertUpdate("DROP TABLE IF EXISTS " + tableName);
+ }
+ }
+
+ @Test
+ void testOrcTimestampNanoWithTimeZoneFiltering()
+ {
+ String tableName = "test_orc_timestamp_nano_tz_filtering_" + randomNameSuffix();
+ try {
+ assertUpdate("CREATE TABLE " + tableName + " (d TIMESTAMP(9) WITH TIME ZONE, b INTEGER) WITH (format = 'ORC')");
+
+ // Insert data with nanosecond precision
+ assertUpdate("INSERT INTO " + tableName + " VALUES " +
+ "(TIMESTAMP '2024-01-15 10:00:00.000000001 UTC', 1)," +
+ "(TIMESTAMP '2024-01-15 10:59:59.999999999 UTC', 2)," +
+ "(TIMESTAMP '2024-01-15 11:00:00.000000001 UTC', 3)," +
+ "(TIMESTAMP '2024-01-15 11:30:45.123456789 UTC', 4)", 4);
+
+ // Debug: Check what's actually in the table
+ assertThat(query("SELECT d, b FROM " + tableName + " ORDER BY b"))
+ .matches("VALUES " +
+ "(TIMESTAMP '2024-01-15 10:00:00.000000001 UTC', INTEGER '1'), " +
+ "(TIMESTAMP '2024-01-15 10:59:59.999999999 UTC', INTEGER '2'), " +
+ "(TIMESTAMP '2024-01-15 11:00:00.000000001 UTC', INTEGER '3'), " +
+ "(TIMESTAMP '2024-01-15 11:30:45.123456789 UTC', INTEGER '4')");
+
+ // Test filter at hour boundary - this is the failing case
+ assertThat(query("SELECT b FROM " + tableName + " WHERE d >= TIMESTAMP '2024-01-15 11:00:00.000000000 UTC' ORDER BY b"))
+ .matches("VALUES INTEGER '3', INTEGER '4'");
+ }
+ finally {
+ assertUpdate("DROP TABLE IF EXISTS " + tableName);
+ }
+ }
}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionFields.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionFields.java
index f181786748b8..d2f60cb17e6e 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionFields.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionFields.java
@@ -50,6 +50,14 @@ public void testParse()
assertParse("month(ts)", partitionSpec(builder -> builder.month("ts")));
assertParse("day(ts)", partitionSpec(builder -> builder.day("ts")));
assertParse("hour(ts)", partitionSpec(builder -> builder.hour("ts")));
+ assertParse("year(ts_nano)", partitionSpec(builder -> builder.year("ts_nano")));
+ assertParse("month(ts_nano)", partitionSpec(builder -> builder.month("ts_nano")));
+ assertParse("day(ts_nano)", partitionSpec(builder -> builder.day("ts_nano")));
+ assertParse("hour(ts_nano)", partitionSpec(builder -> builder.hour("ts_nano")));
+ assertParse("year(ts_nano_tz)", partitionSpec(builder -> builder.year("ts_nano_tz")));
+ assertParse("month(ts_nano_tz)", partitionSpec(builder -> builder.month("ts_nano_tz")));
+ assertParse("day(ts_nano_tz)", partitionSpec(builder -> builder.day("ts_nano_tz")));
+ assertParse("hour(ts_nano_tz)", partitionSpec(builder -> builder.hour("ts_nano_tz")));
assertParse("bucket(order_key, 42)", partitionSpec(builder -> builder.bucket("order_key", 42)));
assertParse("truncate(comment, 13)", partitionSpec(builder -> builder.truncate("comment", 13)));
assertParse("truncate(order_key, 88)", partitionSpec(builder -> builder.truncate("order_key", 88)));
@@ -220,7 +228,9 @@ private static PartitionSpec partitionSpec(Consumer consu
NestedField.required(19, "MixedTs", TimestampType.withoutZone()),
NestedField.optional(20, "MixedString", StringType.get()),
NestedField.required(21, "MixedNested", Types.StructType.of(
- NestedField.required(22, "MixedValue", StringType.get()))));
+ NestedField.required(22, "MixedValue", StringType.get()))),
+ NestedField.required(23, "ts_nano", Types.TimestampNanoType.withoutZone()),
+ NestedField.required(24, "ts_nano_tz", Types.TimestampNanoType.withZone()));
PartitionSpec.Builder builder = PartitionSpec.builderFor(schema);
consumer.accept(builder);
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionTransforms.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionTransforms.java
index affeeb9b0890..eaa8febe8a40 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionTransforms.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestPartitionTransforms.java
@@ -16,6 +16,7 @@
import org.apache.iceberg.transforms.Transforms;
import org.apache.iceberg.types.Types.DateType;
import org.apache.iceberg.types.Types.StringType;
+import org.apache.iceberg.types.Types.TimestampNanoType;
import org.apache.iceberg.types.Types.TimestampType;
import org.junit.jupiter.api.Test;
@@ -35,6 +36,9 @@ public class TestPartitionTransforms
{
private static final DateType ICEBERG_DATE = DateType.get();
private static final TimestampType ICEBERG_TIMESTAMP = TimestampType.withoutZone();
+ private static final TimestampType ICEBERG_TIMESTAMP_TZ = TimestampType.withZone();
+ private static final TimestampNanoType ICEBERG_TIMESTAMP_NANOS = TimestampNanoType.withoutZone();
+ private static final TimestampNanoType ICEBERG_TIMESTAMP_NANOS_TZ = TimestampNanoType.withZone();
@Test
public void testToStringMatchesSpecification()
@@ -77,6 +81,7 @@ public void testEpochTransforms()
}
long epochMicro = SECONDS.toMicros(epochSecond);
+ long epochNano = SECONDS.toNanos(epochSecond);
assertThat(actualYear)
.describedAs(time.toString())
.isEqualTo((int) Transforms.year().bind(ICEBERG_TIMESTAMP).apply(epochMicro));
@@ -89,6 +94,45 @@ public void testEpochTransforms()
assertThat(actualHour)
.describedAs(time.toString())
.isEqualTo((int) Transforms.hour().bind(ICEBERG_TIMESTAMP).apply(epochMicro));
+
+ assertThat(actualYear)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.year().bind(ICEBERG_TIMESTAMP_TZ).apply(epochMicro));
+ assertThat(actualMonth)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.month().bind(ICEBERG_TIMESTAMP_TZ).apply(epochMicro));
+ assertThat(actualDay)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.day().bind(ICEBERG_TIMESTAMP_TZ).apply(epochMicro));
+ assertThat(actualHour)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.hour().bind(ICEBERG_TIMESTAMP_TZ).apply(epochMicro));
+
+ assertThat(actualYear)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.year().bind(ICEBERG_TIMESTAMP_NANOS).apply(epochNano));
+ assertThat(actualMonth)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.month().bind(ICEBERG_TIMESTAMP_NANOS).apply(epochNano));
+ assertThat(actualDay)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.day().bind(ICEBERG_TIMESTAMP_NANOS).apply(epochNano));
+ assertThat(actualHour)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.hour().bind(ICEBERG_TIMESTAMP_NANOS).apply(epochNano));
+
+ assertThat(actualYear)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.year().bind(ICEBERG_TIMESTAMP_NANOS_TZ).apply(epochNano));
+ assertThat(actualMonth)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.month().bind(ICEBERG_TIMESTAMP_NANOS_TZ).apply(epochNano));
+ assertThat(actualDay)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.day().bind(ICEBERG_TIMESTAMP_NANOS_TZ).apply(epochNano));
+ assertThat(actualHour)
+ .describedAs(time.toString())
+ .isEqualTo((int) Transforms.hour().bind(ICEBERG_TIMESTAMP_NANOS_TZ).apply(epochNano));
}
}
}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestGlueIcebergUtil.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestGlueIcebergUtil.java
new file mode 100644
index 000000000000..9f72cd4c11bc
--- /dev/null
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestGlueIcebergUtil.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.iceberg.catalog.glue;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.PartitionSpec;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.SortOrder;
+import org.apache.iceberg.TableMetadata;
+import org.apache.iceberg.types.Type;
+import org.apache.iceberg.types.Types;
+import org.junit.jupiter.api.Test;
+import software.amazon.awssdk.services.glue.model.Column;
+import software.amazon.awssdk.services.glue.model.TableInput;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import static io.trino.plugin.iceberg.IcebergUtil.COLUMN_TRINO_TYPE_ID_PROPERTY;
+import static io.trino.plugin.iceberg.TypeConverter.toTrinoType;
+import static io.trino.plugin.iceberg.catalog.glue.GlueIcebergUtil.getTableInput;
+import static io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER;
+import static org.apache.iceberg.TableMetadata.newTableMetadata;
+import static org.apache.iceberg.TableProperties.FORMAT_VERSION;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestGlueIcebergUtil
+{
+ @Test
+ public void testLossyGlueMappingsPreserveTrinoTypeId()
+ {
+ Map expectedGlueTypes = new LinkedHashMap<>();
+ expectedGlueTypes.put("time_value", Types.TimeType.get());
+ expectedGlueTypes.put("uuid_value", Types.UUIDType.get());
+ expectedGlueTypes.put("timestamp_value", Types.TimestampType.withoutZone());
+ expectedGlueTypes.put("timestamptz_value", Types.TimestampType.withZone());
+ expectedGlueTypes.put("ts_nano", Types.TimestampNanoType.withoutZone());
+ expectedGlueTypes.put("ts_nano_tz", Types.TimestampNanoType.withZone());
+ expectedGlueTypes.put("binary_value", Types.BinaryType.get());
+ expectedGlueTypes.put("fixed_value", Types.FixedType.ofLength(16));
+
+ Map expectedLossyGlueType = new LinkedHashMap<>();
+ expectedLossyGlueType.put("time_value", "string");
+ expectedLossyGlueType.put("uuid_value", "string");
+ expectedLossyGlueType.put("timestamp_value", "timestamp");
+ expectedLossyGlueType.put("timestamptz_value", "timestamp");
+ expectedLossyGlueType.put("ts_nano", "timestamp");
+ expectedLossyGlueType.put("ts_nano_tz", "timestamp");
+ expectedLossyGlueType.put("binary_value", "binary");
+ expectedLossyGlueType.put("fixed_value", "binary");
+
+ List schemaColumns = new ArrayList<>(expectedGlueTypes.size());
+ int fieldId = 1;
+ for (Map.Entry entry : expectedGlueTypes.entrySet()) {
+ schemaColumns.add(Types.NestedField.optional(fieldId++, entry.getKey(), entry.getValue()));
+ }
+
+ Schema schema = new Schema(schemaColumns);
+ TableMetadata metadata = newTableMetadata(
+ schema,
+ PartitionSpec.unpartitioned(),
+ SortOrder.unsorted(),
+ "s3://test-bucket/test-table",
+ ImmutableMap.of(FORMAT_VERSION, "3"));
+
+ TableInput tableInput = getTableInput(
+ TESTING_TYPE_MANAGER,
+ "test_table",
+ Optional.empty(),
+ metadata,
+ metadata.location(),
+ "s3://test-bucket/test-table/metadata/00001.metadata.json",
+ ImmutableMap.of(),
+ true);
+
+ List columns = tableInput.storageDescriptor().columns();
+ assertThat(columns).hasSize(expectedGlueTypes.size());
+
+ for (Column column : columns) {
+ assertThat(column.type()).isEqualTo(expectedLossyGlueType.get(column.name()));
+ assertThat(column.parameters())
+ .containsEntry(COLUMN_TRINO_TYPE_ID_PROPERTY, toTrinoType(expectedGlueTypes.get(column.name()), TESTING_TYPE_MANAGER).getTypeId().getId());
+ }
+ }
+}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestOrcMetrics.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestOrcMetrics.java
new file mode 100644
index 000000000000..aaff0f5f0701
--- /dev/null
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestOrcMetrics.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.iceberg.util;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import io.trino.orc.metadata.ColumnMetadata;
+import io.trino.orc.metadata.OrcColumnId;
+import io.trino.orc.metadata.OrcType;
+import io.trino.orc.metadata.statistics.ColumnStatistics;
+import io.trino.orc.metadata.statistics.TimestampStatistics;
+import org.apache.iceberg.Metrics;
+import org.apache.iceberg.MetricsConfig;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.types.Types;
+import org.junit.jupiter.api.Test;
+
+import java.util.Optional;
+
+import static io.trino.orc.metadata.OrcType.OrcTypeKind.STRUCT;
+import static io.trino.orc.metadata.OrcType.OrcTypeKind.TIMESTAMP;
+import static io.trino.plugin.iceberg.util.OrcTypeConverter.ORC_ICEBERG_ID_KEY;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestOrcMetrics
+{
+ @Test
+ public void testTimestampBoundsAreDroppedWhenScalingOverflows()
+ {
+ Schema schema = new Schema(Types.NestedField.optional(1, "ts_nano", Types.TimestampNanoType.withoutZone()));
+
+ ColumnMetadata orcColumns = new ColumnMetadata<>(ImmutableList.of(
+ new OrcType(STRUCT, ImmutableList.of(new OrcColumnId(1)), ImmutableList.of("ts_nano"), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()),
+ new OrcType(TIMESTAMP, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of(ORC_ICEBERG_ID_KEY, "1"))));
+
+ ColumnMetadata columnStatistics = new ColumnMetadata<>(ImmutableList.of(
+ new ColumnStatistics(1L, 0, null, null, null, null, null, null, null, null, null, null),
+ new ColumnStatistics(1L, 0, null, null, null, null, null, null, new TimestampStatistics(0L, Long.MAX_VALUE), null, null, null)));
+
+ Metrics metrics = OrcMetrics.computeMetrics(MetricsConfig.getDefault(), schema, orcColumns, 1, Optional.of(columnStatistics));
+
+ assertThat(metrics.valueCounts()).containsEntry(1, 1L);
+ assertThat(metrics.nullValueCounts()).containsEntry(1, 0L);
+ assertThat(metrics.lowerBounds()).isNull();
+ assertThat(metrics.upperBounds()).isNull();
+ }
+}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestOrcTypeConverter.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestOrcTypeConverter.java
new file mode 100644
index 000000000000..feaf9f60b7d1
--- /dev/null
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestOrcTypeConverter.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.iceberg.util;
+
+import io.trino.orc.metadata.ColumnMetadata;
+import io.trino.orc.metadata.OrcColumnId;
+import io.trino.orc.metadata.OrcType;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.types.Types;
+import org.junit.jupiter.api.Test;
+
+import static io.trino.orc.metadata.OrcType.OrcTypeKind.TIMESTAMP;
+import static io.trino.orc.metadata.OrcType.OrcTypeKind.TIMESTAMP_INSTANT;
+import static io.trino.plugin.iceberg.util.OrcTypeConverter.ICEBERG_TIMESTAMP_UNIT;
+import static io.trino.plugin.iceberg.util.OrcTypeConverter.ICEBERG_TIMESTAMP_UNIT_MICROS;
+import static io.trino.plugin.iceberg.util.OrcTypeConverter.ICEBERG_TIMESTAMP_UNIT_NANOS;
+import static io.trino.plugin.iceberg.util.OrcTypeConverter.toOrcType;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestOrcTypeConverter
+{
+ @Test
+ public void testTimestampUnitsAreAnnotatedForOrc()
+ {
+ Schema schema = new Schema(
+ Types.NestedField.optional(1, "ts_micros", Types.TimestampType.withoutZone()),
+ Types.NestedField.optional(2, "ts_micros_tz", Types.TimestampType.withZone()),
+ Types.NestedField.optional(3, "ts_nanos", Types.TimestampNanoType.withoutZone()),
+ Types.NestedField.optional(4, "ts_nanos_tz", Types.TimestampNanoType.withZone()));
+
+ ColumnMetadata orcTypes = toOrcType(schema);
+
+ OrcType tsMicros = orcTypes.get(new OrcColumnId(1));
+ assertThat(tsMicros.getOrcTypeKind()).isEqualTo(TIMESTAMP);
+ assertThat(tsMicros.getAttributes()).containsEntry(ICEBERG_TIMESTAMP_UNIT, ICEBERG_TIMESTAMP_UNIT_MICROS);
+
+ OrcType tsMicrosTz = orcTypes.get(new OrcColumnId(2));
+ assertThat(tsMicrosTz.getOrcTypeKind()).isEqualTo(TIMESTAMP_INSTANT);
+ assertThat(tsMicrosTz.getAttributes()).containsEntry(ICEBERG_TIMESTAMP_UNIT, ICEBERG_TIMESTAMP_UNIT_MICROS);
+
+ OrcType tsNanos = orcTypes.get(new OrcColumnId(3));
+ assertThat(tsNanos.getOrcTypeKind()).isEqualTo(TIMESTAMP);
+ assertThat(tsNanos.getAttributes()).containsEntry(ICEBERG_TIMESTAMP_UNIT, ICEBERG_TIMESTAMP_UNIT_NANOS);
+
+ OrcType tsNanosTz = orcTypes.get(new OrcColumnId(4));
+ assertThat(tsNanosTz.getOrcTypeKind()).isEqualTo(TIMESTAMP_INSTANT);
+ assertThat(tsNanosTz.getAttributes()).containsEntry(ICEBERG_TIMESTAMP_UNIT, ICEBERG_TIMESTAMP_UNIT_NANOS);
+ }
+}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestTimestamps.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestTimestamps.java
new file mode 100644
index 000000000000..95ee95b37f7f
--- /dev/null
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/util/TestTimestamps.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.iceberg.util;
+
+import io.trino.spi.TrinoException;
+import io.trino.spi.type.LongTimestamp;
+import io.trino.spi.type.LongTimestampWithTimeZone;
+import org.junit.jupiter.api.Test;
+
+import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MICROSECOND;
+import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND;
+import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
+import static java.lang.Math.floorDiv;
+import static java.lang.Math.floorMod;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+public class TestTimestamps
+{
+ private static final long MIN_NANO_EPOCH_MICROS = floorDiv(Long.MIN_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final int MIN_NANO_OF_MICRO = (int) floorMod(Long.MIN_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final long MAX_NANO_EPOCH_MICROS = floorDiv(Long.MAX_VALUE, NANOSECONDS_PER_MICROSECOND);
+ private static final int MAX_NANO_OF_MICRO = (int) floorMod(Long.MAX_VALUE, NANOSECONDS_PER_MICROSECOND);
+
+ private static final long MIN_NANO_EPOCH_MILLIS = floorDiv(Long.MIN_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final int MIN_NANO_OF_MILLI = (int) floorMod(Long.MIN_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final long MAX_NANO_EPOCH_MILLIS = floorDiv(Long.MAX_VALUE, NANOSECONDS_PER_MILLISECOND);
+ private static final int MAX_NANO_OF_MILLI = (int) floorMod(Long.MAX_VALUE, NANOSECONDS_PER_MILLISECOND);
+
+ @Test
+ public void testTimestampNanosRoundTripAtLongBounds()
+ {
+ assertThat(Timestamps.timestampToNanos(Timestamps.timestampFromNanos(Long.MIN_VALUE))).isEqualTo(Long.MIN_VALUE);
+ assertThat(Timestamps.timestampToNanos(Timestamps.timestampFromNanos(Long.MAX_VALUE))).isEqualTo(Long.MAX_VALUE);
+ }
+
+ @Test
+ public void testTimestampTzNanosRoundTripAtLongBounds()
+ {
+ assertThat(Timestamps.timestampTzToNanos(Timestamps.timestampTzFromNanos(Long.MIN_VALUE))).isEqualTo(Long.MIN_VALUE);
+ assertThat(Timestamps.timestampTzToNanos(Timestamps.timestampTzFromNanos(Long.MAX_VALUE))).isEqualTo(Long.MAX_VALUE);
+ }
+
+ @Test
+ public void testTimestampNanosRejectsOutOfRangeBoundaryTuples()
+ {
+ assertThatThrownBy(() -> Timestamps.timestampToNanos(new LongTimestamp(MIN_NANO_EPOCH_MICROS, (MIN_NANO_OF_MICRO - 1) * PICOSECONDS_PER_NANOSECOND)))
+ .isInstanceOf(TrinoException.class)
+ .hasMessage("Timestamp value is outside the range supported by Iceberg nano timestamps");
+
+ assertThatThrownBy(() -> Timestamps.timestampToNanos(new LongTimestamp(MAX_NANO_EPOCH_MICROS, (MAX_NANO_OF_MICRO + 1) * PICOSECONDS_PER_NANOSECOND)))
+ .isInstanceOf(TrinoException.class)
+ .hasMessage("Timestamp value is outside the range supported by Iceberg nano timestamps");
+ }
+
+ @Test
+ public void testTimestampTzNanosRejectsOutOfRangeBoundaryTuples()
+ {
+ assertThatThrownBy(() -> Timestamps.timestampTzToNanos(LongTimestampWithTimeZone.fromEpochMillisAndFraction(MIN_NANO_EPOCH_MILLIS, (MIN_NANO_OF_MILLI - 1) * PICOSECONDS_PER_NANOSECOND, UTC_KEY)))
+ .isInstanceOf(TrinoException.class)
+ .hasMessage("Timestamp value is outside the range supported by Iceberg nano timestamps");
+
+ assertThatThrownBy(() -> Timestamps.timestampTzToNanos(LongTimestampWithTimeZone.fromEpochMillisAndFraction(MAX_NANO_EPOCH_MILLIS, (MAX_NANO_OF_MILLI + 1) * PICOSECONDS_PER_NANOSECOND, UTC_KEY)))
+ .isInstanceOf(TrinoException.class)
+ .hasMessage("Timestamp value is outside the range supported by Iceberg nano timestamps");
+ }
+}
diff --git a/plugin/trino-lakehouse/src/test/java/io/trino/plugin/lakehouse/TestLakehouseConnectorTest.java b/plugin/trino-lakehouse/src/test/java/io/trino/plugin/lakehouse/TestLakehouseConnectorTest.java
index ea96a5d578e5..9f83570222ce 100644
--- a/plugin/trino-lakehouse/src/test/java/io/trino/plugin/lakehouse/TestLakehouseConnectorTest.java
+++ b/plugin/trino-lakehouse/src/test/java/io/trino/plugin/lakehouse/TestLakehouseConnectorTest.java
@@ -204,7 +204,8 @@ protected void verifyConcurrentAddColumnFailurePermissible(Exception e)
protected void verifySetColumnTypeFailurePermissible(Throwable e)
{
assertThat(e).hasMessageMatching(".*(Failed to set column type: Cannot change (column type:|type from .* to )" +
- "|Time(stamp)? precision \\(3\\) not supported for Iceberg. Use \"time(stamp)?\\(6\\)\" instead" +
+ "|Time precision \\(3\\) not supported for Iceberg. Use \"time\\(6\\)\" instead" +
+ "|Timestamp precision \\(3\\) not supported for Iceberg. Use \"timestamp\\(6\\)\" or \"timestamp\\(9\\)\" instead" +
"|Type not supported for Iceberg: (tinyint|smallint|char\\(20\\))" +
"|Cannot update map keys).*");
}
@@ -213,7 +214,8 @@ protected void verifySetColumnTypeFailurePermissible(Throwable e)
protected void verifySetFieldTypeFailurePermissible(Throwable e)
{
assertThat(e).hasMessageMatching(".*(Failed to set field type: Cannot change (column type:|type from .* to )" +
- "|Time(stamp)? precision \\(3\\) not supported for Iceberg. Use \"time(stamp)?\\(6\\)\" instead" +
+ "|Time precision \\(3\\) not supported for Iceberg. Use \"time\\(6\\)\" instead" +
+ "|Timestamp precision \\(3\\) not supported for Iceberg. Use \"timestamp\\(6\\)\" or \"timestamp\\(9\\)\" instead" +
"|Type not supported for Iceberg: (tinyint|smallint|char\\(20\\))" +
"|Cannot update map keys).*");
}
diff --git a/plugin/trino-postgresql/pom.xml b/plugin/trino-postgresql/pom.xml
index 18005e7dd435..38a51a283e58 100644
--- a/plugin/trino-postgresql/pom.xml
+++ b/plugin/trino-postgresql/pom.xml
@@ -43,7 +43,7 @@
io.trino
- trino-geospatial
+ trino-geospatial-toolkit
@@ -71,6 +71,11 @@
jdbi3-core
+
+ org.locationtech.jts
+ jts-core
+
+
org.postgresql
postgresql
@@ -198,6 +203,12 @@
test
+
+ io.trino
+ trino-geospatial
+ test
+
+
io.trino
trino-jmx
diff --git a/plugin/trino-postgresql/src/main/java/io/trino/plugin/postgresql/PostgreSqlClient.java b/plugin/trino-postgresql/src/main/java/io/trino/plugin/postgresql/PostgreSqlClient.java
index fa07eb4a5d3c..15d37ba9762e 100644
--- a/plugin/trino-postgresql/src/main/java/io/trino/plugin/postgresql/PostgreSqlClient.java
+++ b/plugin/trino-postgresql/src/main/java/io/trino/plugin/postgresql/PostgreSqlClient.java
@@ -116,6 +116,7 @@
import io.trino.spi.type.VarcharType;
import org.jdbi.v3.core.Handle;
import org.jdbi.v3.core.Jdbi;
+import org.locationtech.jts.geom.Geometry;
import org.postgresql.core.TypeInfo;
import org.postgresql.jdbc.PgConnection;
@@ -157,10 +158,11 @@
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.slice.Slices.wrappedBuffer;
+import static io.trino.geospatial.serde.JtsGeometrySerde.deserialize;
+import static io.trino.geospatial.serde.JtsGeometrySerde.serialize;
import static io.trino.plugin.base.util.JsonTypeUtil.jsonParse;
import static io.trino.plugin.base.util.JsonTypeUtil.toJsonValue;
-import static io.trino.plugin.geospatial.GeoFunctions.stAsBinary;
-import static io.trino.plugin.geospatial.GeoFunctions.stGeomFromBinary;
+import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW;
import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale;
import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding;
import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode;
@@ -1918,33 +1920,39 @@ private static ObjectWriteFunction vectorWriteFunction()
private ColumnMapping geometryColumnMapping()
{
- return ColumnMapping.sliceMapping(
+ return ColumnMapping.objectMapping(
geometryType,
- (resultSet, columnIndex) -> {
+ ObjectReadFunction.of(Geometry.class, (resultSet, columnIndex) -> {
String hexWkb = resultSet.getString(columnIndex);
byte[] wkb = HexFormat.of().parseHex(hexWkb);
- return stGeomFromBinary(wrappedBuffer(wkb));
- },
+ return deserialize(wrappedBuffer(wkb));
+ }),
geometryWriteFunction(),
DISABLE_PUSHDOWN);
}
- private static SliceWriteFunction geometryWriteFunction()
+ private static ObjectWriteFunction geometryWriteFunction()
{
- return new SliceWriteFunction()
+ return new ObjectWriteFunction()
{
+ @Override
+ public Class> getJavaType()
+ {
+ return Geometry.class;
+ }
+
@Override
public String getBindExpression()
{
- return "ST_GeomFromWKB(?)";
+ return "ST_GeomFromEWKB(?)";
}
@Override
- public void set(PreparedStatement statement, int index, Slice slice)
+ public void set(PreparedStatement statement, int index, Object value)
throws SQLException
{
- byte[] bytes = stAsBinary(slice).getBytes();
- statement.setBytes(index, bytes);
+ Geometry geometry = (Geometry) value;
+ statement.setBytes(index, serialize(geometry).getBytes());
}
};
}
diff --git a/plugin/trino-postgresql/src/test/java/io/trino/plugin/postgresql/TestPostgreSqlGeometryType.java b/plugin/trino-postgresql/src/test/java/io/trino/plugin/postgresql/TestPostgreSqlGeometryType.java
index c8888cc6d503..17d591b28bd2 100644
--- a/plugin/trino-postgresql/src/test/java/io/trino/plugin/postgresql/TestPostgreSqlGeometryType.java
+++ b/plugin/trino-postgresql/src/test/java/io/trino/plugin/postgresql/TestPostgreSqlGeometryType.java
@@ -66,6 +66,30 @@ void testGeometryWrite()
}
}
+ @Test
+ void testGeometryWriteWithSrid()
+ {
+ try (TestTable table = new TestTable(postgreSqlServer::execute, "test_geometry_write", "(geom geometry)")) {
+ assertUpdate("INSERT INTO " + table.getName() + " VALUES (ST_SetSRID(ST_Point(1, 1), 4326))", 1);
+ assertThat(query("SELECT ST_SRID(geom) FROM " + table.getName()))
+ .matches("VALUES 4326");
+ assertThat(query("SELECT * FROM " + table.getName()))
+ .matches("VALUES ST_Point(1, 1)");
+ }
+ }
+
+ @Test
+ void testGeometryWriteWithSridIntoConstrainedColumn()
+ {
+ try (TestTable table = new TestTable(postgreSqlServer::execute, "test_geometry_write", "(geom geometry(point, 4326))")) {
+ assertUpdate("INSERT INTO " + table.getName() + " VALUES (ST_SetSRID(ST_Point(1, 1), 4326))", 1);
+ assertThat(query("SELECT ST_SRID(geom) FROM " + table.getName()))
+ .matches("VALUES 4326");
+ assertThat(query("SELECT * FROM " + table.getName()))
+ .matches("VALUES ST_Point(1, 1)");
+ }
+ }
+
@Test
void testGeometryNullRead()
{
diff --git a/plugin/trino-teradata/README.md b/plugin/trino-teradata/README.md
new file mode 100644
index 000000000000..e9e3817f684b
--- /dev/null
+++ b/plugin/trino-teradata/README.md
@@ -0,0 +1,42 @@
+# Teradata Connector Developer Notes
+
+The Teradata connector module has both unit tests and integration tests.
+The integration tests require access to a [Teradata ClearScape Analytics™ Experience](https://clearscape.teradata.com/sign-in).
+You can follow the steps below to run the integration tests locally.
+
+## Prerequisites
+
+#### 1. Create a new ClearScape Analytics™ Experience account
+
+If you don't already have one, sign up at:
+
+[Teradata ClearScape Analytics™ Experience](https://www.teradata.com/getting-started/demos/clearscape-analytics)
+
+#### 2. Login
+
+Sign in with your new account at:
+
+[ClearScape Analytics™ Experience Login](https://clearscape.teradata.com/sign-in)
+
+#### 3. Collect the API Token
+
+Use the **Copy API Token** button in the UI to retrieve your token.
+
+#### 4. Define the following environment variables
+
+⚠️ **Note:** The Teradata database password must be **at least 8 characters long**.
+
+```
+export CLEARSCAPE_TOKEN=
+export CLEARSCAPE_PASSWORD=
+```
+
+## Running Integration Tests
+
+Once the environment variables are set, run the integration tests with:
+
+⚠️ **Note:** Run the following command from the Trino parent directory.
+
+```
+ ./mvnw clean install -pl :trino-teradata
+```
diff --git a/plugin/trino-teradata/pom.xml b/plugin/trino-teradata/pom.xml
new file mode 100644
index 000000000000..809439a54f06
--- /dev/null
+++ b/plugin/trino-teradata/pom.xml
@@ -0,0 +1,342 @@
+
+
+ 4.0.0
+
+ io.trino
+ trino-root
+ 481-SNAPSHOT
+ ../../pom.xml
+
+
+ trino-teradata
+ trino-plugin
+ ${project.artifactId}
+ Trino - Teradata connector
+
+
+ true
+
+
+
+
+
+ com.google.guava
+ guava
+
+
+
+ com.google.inject
+ guice
+ classes
+
+
+
+ com.teradata.jdbc
+ terajdbc
+
+
+
+ io.airlift
+ configuration
+
+
+
+ io.airlift
+ log
+
+
+
+ io.trino
+ trino-base-jdbc
+
+
+
+ io.trino
+ trino-plugin-toolkit
+
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ provided
+
+
+
+ io.airlift
+ slice
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-api
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-api-incubator
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-context
+ provided
+
+
+
+ io.trino
+ trino-spi
+ provided
+
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+ runtime
+
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ runtime
+
+
+
+ com.google.errorprone
+ error_prone_annotations
+ runtime
+ true
+
+
+
+ io.airlift
+ json
+ runtime
+
+
+
+ io.airlift
+ log-manager
+ runtime
+
+
+
+ io.airlift
+ units
+ runtime
+
+
+
+ io.airlift
+ configuration-testing
+ test
+
+
+
+ io.airlift
+ junit-extensions
+ test
+
+
+
+ io.airlift
+ testing
+ test
+
+
+
+ io.airlift
+ tracing
+ test
+
+
+
+ io.trino
+ trino-base-jdbc
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-exchange-filesystem
+ test
+
+
+
+ io.trino
+ trino-exchange-filesystem
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-jmx
+ test
+
+
+
+ io.trino
+ trino-main
+ test
+
+
+
+ io.trino
+ trino-main
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-parser
+ test
+
+
+
+ io.trino
+ trino-testing
+ test
+
+
+
+ io.trino
+ trino-testing-containers
+ test
+
+
+
+ io.trino
+ trino-testing-services
+ test
+
+
+
+ io.trino
+ trino-tpch
+ test
+
+
+
+ io.trino.tpch
+ tpch
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+ org.jetbrains
+ annotations
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+
+
+ default
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ **/TestTeradataConnectorTest.java
+ **/TestTeradataTypeMapping.java
+
+
+
+
+
+
+
+
+ clearscape-tests
+
+ false
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ **/TestTeradataConnectorTest.java
+ **/TestTeradataTypeMapping.java
+
+ long_run, data_mapping
+
+
+
+
+
+
+
+ run-only-long-tests
+
+ false
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+ long_run
+
+
+
+
+
+
+
+ data-mapping
+
+ false
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+ data_mapping
+
+
+
+
+
+
+
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java
new file mode 100644
index 000000000000..167b2b1acc4b
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java
@@ -0,0 +1,357 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.inject.Inject;
+import io.airlift.log.Logger;
+import io.trino.plugin.base.mapping.IdentifierMapping;
+import io.trino.plugin.jdbc.BaseJdbcClient;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.CaseSensitivity;
+import io.trino.plugin.jdbc.ColumnMapping;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.JdbcColumnHandle;
+import io.trino.plugin.jdbc.JdbcOutputTableHandle;
+import io.trino.plugin.jdbc.JdbcTableHandle;
+import io.trino.plugin.jdbc.JdbcTypeHandle;
+import io.trino.plugin.jdbc.QueryBuilder;
+import io.trino.plugin.jdbc.RemoteTableName;
+import io.trino.plugin.jdbc.WriteMapping;
+import io.trino.plugin.jdbc.logging.RemoteQueryModifier;
+import io.trino.spi.TrinoException;
+import io.trino.spi.connector.ColumnMetadata;
+import io.trino.spi.connector.ColumnPosition;
+import io.trino.spi.connector.ConnectorSession;
+import io.trino.spi.connector.SchemaTableName;
+import io.trino.spi.type.CharType;
+import io.trino.spi.type.DecimalType;
+import io.trino.spi.type.Decimals;
+import io.trino.spi.type.Type;
+import io.trino.spi.type.VarcharType;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.OptionalLong;
+
+import static io.trino.plugin.jdbc.CaseSensitivity.CASE_INSENSITIVE;
+import static io.trino.plugin.jdbc.CaseSensitivity.CASE_SENSITIVE;
+import static io.trino.plugin.jdbc.PredicatePushdownController.CASE_INSENSITIVE_CHARACTER_PUSHDOWN;
+import static io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.charReadFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.dateColumnMappingUsingLocalDate;
+import static io.trino.plugin.jdbc.StandardColumnMappings.dateWriteFunctionUsingLocalDate;
+import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharReadFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
+import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling;
+import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
+import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.CharType.createCharType;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.RealType.REAL;
+import static io.trino.spi.type.SmallintType.SMALLINT;
+import static io.trino.spi.type.TinyintType.TINYINT;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static java.lang.String.format;
+
+public class TeradataClient
+ extends BaseJdbcClient
+{
+ private static final Logger log = Logger.get(TeradataClient.class);
+ private final long permanentSpace;
+
+ @Inject
+ public TeradataClient(
+ BaseJdbcConfig config,
+ TeradataConfig teradataConfig,
+ ConnectionFactory connectionFactory,
+ QueryBuilder queryBuilder,
+ IdentifierMapping identifierMapping,
+ RemoteQueryModifier remoteQueryModifier)
+ {
+ super("\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, remoteQueryModifier, true);
+ this.permanentSpace = teradataConfig.getPermanentSpace();
+ }
+
+ @Override
+ protected void createSchema(ConnectorSession session, Connection connection, String remoteSchemaName)
+ {
+ // Teradata requires database sizing parameters on creation.
+ // The permanent space allocation can be configured using the teradata.permanent-space property.
+ execute(session, format("CREATE DATABASE %s AS PERMANENT = %d", quoted(remoteSchemaName), permanentSpace));
+ }
+
+ @Override
+ protected void verifySchemaName(DatabaseMetaData databaseMetadata, String schemaName)
+ throws SQLException
+ {
+ int schemaNameLimit = databaseMetadata.getMaxSchemaNameLength();
+ if (schemaName.length() > schemaNameLimit) {
+ throw new TrinoException(
+ NOT_SUPPORTED,
+ format("Schema name must be shorter than or equal to '%s' characters but got '%s'", schemaNameLimit, schemaName.length()));
+ }
+ }
+
+ @Override
+ protected void verifyTableName(DatabaseMetaData databaseMetadata, String tableName)
+ throws SQLException
+ {
+ if (tableName.length() > databaseMetadata.getMaxTableNameLength()) {
+ throw new TrinoException(
+ NOT_SUPPORTED,
+ format("Table name must be shorter than or equal to '%s' characters but got '%s'", databaseMetadata.getMaxTableNameLength(), tableName.length()));
+ }
+ }
+
+ @Override
+ protected void verifyColumnName(DatabaseMetaData databaseMetadata, String columnName)
+ throws SQLException
+ {
+ if (columnName.length() > databaseMetadata.getMaxColumnNameLength()) {
+ throw new TrinoException(
+ NOT_SUPPORTED,
+ format("Column name must be shorter than or equal to '%s' characters but got '%s': '%s'", databaseMetadata.getMaxColumnNameLength(), columnName.length(), columnName));
+ }
+ }
+
+ @Override
+ protected void dropSchema(ConnectorSession session, Connection connection, String remoteSchemaName, boolean cascade)
+ throws SQLException
+ {
+ if (cascade) {
+ throw new TrinoException(
+ NOT_SUPPORTED,
+ "This connector does not support dropping schemas with CASCADE option");
+ }
+ String dropSchema = "DROP DATABASE " + quoted(remoteSchemaName);
+ execute(session, connection, dropSchema);
+ }
+
+ @Override
+ public void renameSchema(ConnectorSession session, String schemaName, String newSchemaName)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming schemas");
+ }
+
+ @Override
+ public OptionalLong delete(ConnectorSession session, JdbcTableHandle handle)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support modifying table rows");
+ }
+
+ @Override
+ public void truncateTable(ConnectorSession session, JdbcTableHandle handle)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support truncating tables");
+ }
+
+ @Override
+ public void dropColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping columns");
+ }
+
+ @Override
+ public void renameColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle jdbcColumn, String newColumnName)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming columns");
+ }
+
+ @Override
+ public void renameTable(ConnectorSession session, JdbcTableHandle handle, SchemaTableName newTableName)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables");
+ }
+
+ @Override
+ public JdbcOutputTableHandle beginInsertTable(ConnectorSession session, JdbcTableHandle tableHandle, List columns)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support inserts");
+ }
+
+ @Override
+ public void setColumnType(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Type type)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types");
+ }
+
+ @Override
+ public void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column, ColumnPosition position)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support adding columns");
+ }
+
+ @Override
+ public void dropNotNullConstraint(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping a not null constraint");
+ }
+
+ @Override
+ protected Map getCaseSensitivityForColumns(ConnectorSession session, Connection connection, SchemaTableName schemaTableName, RemoteTableName remoteTableName)
+ {
+ String sql = format("SELECT * FROM %s.%s WHERE 0=1", quoted(schemaTableName.getSchemaName()), quoted(schemaTableName.getTableName()));
+ try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) {
+ ImmutableMap.Builder columns = ImmutableMap.builder();
+ ResultSetMetaData metaData = preparedStatement.getMetaData();
+ int columnCount = metaData.getColumnCount();
+
+ for (int i = 1; i <= columnCount; i++) {
+ columns.put(
+ metaData.getColumnName(i),
+ metaData.isCaseSensitive(i) ? CASE_SENSITIVE : CASE_INSENSITIVE);
+ }
+
+ return columns.buildOrThrow();
+ }
+ catch (SQLException e) {
+ return ImmutableMap.of();
+ }
+ }
+
+ @Override
+ public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
+ {
+ Optional mapping = getForcedMappingToVarchar(typeHandle);
+ if (mapping.isPresent()) {
+ return mapping;
+ }
+
+ switch (typeHandle.jdbcType()) {
+ case Types.TINYINT:
+ return Optional.of(tinyintColumnMapping());
+ case Types.SMALLINT:
+ return Optional.of(smallintColumnMapping());
+ case Types.INTEGER:
+ return Optional.of(integerColumnMapping());
+ case Types.BIGINT:
+ return Optional.of(bigintColumnMapping());
+ case Types.REAL:
+ case Types.DOUBLE:
+ case Types.FLOAT:
+ // FLOAT is a Teradata synonym for REAL and DOUBLE PRECISION
+ return Optional.of(doubleColumnMapping());
+ case Types.NUMERIC:
+ case Types.DECIMAL:
+ return numberMapping(typeHandle);
+ case Types.CHAR:
+ return Optional.of(charColumnMapping(typeHandle.requiredColumnSize(), deriveCaseSensitivity(typeHandle.caseSensitivity())));
+ case Types.VARCHAR:
+ return Optional.of(varcharColumnMapping(typeHandle.requiredColumnSize(), deriveCaseSensitivity(typeHandle.caseSensitivity())));
+ case Types.DATE:
+ return Optional.of(dateColumnMappingUsingLocalDate());
+ }
+
+ if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
+ log.debug("Mapping unsupported Teradata type %s to VARCHAR", typeHandle);
+ return mapToUnboundedVarchar(typeHandle);
+ }
+
+ return Optional.empty();
+ }
+
+ private static Optional numberMapping(JdbcTypeHandle typeHandle)
+ {
+ int precision = typeHandle.requiredColumnSize();
+ int scale = typeHandle.requiredDecimalDigits();
+ if (precision > Decimals.MAX_PRECISION) {
+ // this will trigger for number(*) as precision is 40
+ return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale)));
+ }
+ return Optional.of(decimalColumnMapping(createDecimalType(precision, scale)));
+ }
+
+ private static ColumnMapping charColumnMapping(int charLength, boolean isCaseSensitive)
+ {
+ // Teradata supports max of 64k for char type
+ CharType charType = createCharType(charLength);
+ return ColumnMapping.sliceMapping(
+ charType,
+ charReadFunction(charType),
+ charWriteFunction(),
+ isCaseSensitive ? FULL_PUSHDOWN : CASE_INSENSITIVE_CHARACTER_PUSHDOWN);
+ }
+
+ private static ColumnMapping varcharColumnMapping(int varcharLength, boolean isCaseSensitive)
+ {
+ // Teradata supports max of 64k for varchar type
+ VarcharType varcharType = createVarcharType(varcharLength);
+ return ColumnMapping.sliceMapping(
+ varcharType,
+ varcharReadFunction(varcharType),
+ varcharWriteFunction(),
+ isCaseSensitive ? FULL_PUSHDOWN : CASE_INSENSITIVE_CHARACTER_PUSHDOWN);
+ }
+
+ private static boolean deriveCaseSensitivity(Optional caseSensitivity)
+ {
+ return caseSensitivity.equals(Optional.of(CASE_SENSITIVE));
+ }
+
+ @Override
+ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
+ {
+ return switch (type) {
+ case Type typeInstance when typeInstance == TINYINT -> WriteMapping.longMapping("smallint", tinyintWriteFunction());
+ case Type typeInstance when typeInstance == SMALLINT -> WriteMapping.longMapping("smallint", smallintWriteFunction());
+ case Type typeInstance when typeInstance == INTEGER -> WriteMapping.longMapping("integer", integerWriteFunction());
+ case Type typeInstance when typeInstance == BIGINT -> WriteMapping.longMapping("bigint", bigintWriteFunction());
+ case Type typeInstance when typeInstance == REAL -> WriteMapping.longMapping("FLOAT", realWriteFunction());
+ case Type typeInstance when typeInstance == DOUBLE -> WriteMapping.doubleMapping("double precision", doubleWriteFunction());
+ case Type typeInstance when typeInstance == DATE -> WriteMapping.longMapping("date", dateWriteFunctionUsingLocalDate());
+ case DecimalType decimalTypeInstance -> {
+ String dataType = format("decimal(%s, %s)", decimalTypeInstance.getPrecision(), decimalTypeInstance.getScale());
+ if (decimalTypeInstance.isShort()) {
+ yield WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalTypeInstance));
+ }
+ yield WriteMapping.objectMapping(dataType, longDecimalWriteFunction(decimalTypeInstance));
+ }
+ case CharType charTypeInstance -> WriteMapping.sliceMapping("char(" + charTypeInstance.getLength() + ")", charWriteFunction());
+ case VarcharType varcharTypeInstance -> {
+ String dataType = varcharTypeInstance.isUnbounded()
+ ? "clob"
+ : "varchar(" + varcharTypeInstance.getBoundedLength() + ")";
+ yield WriteMapping.sliceMapping(dataType, varcharWriteFunction());
+ }
+ default -> throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName());
+ };
+ }
+}
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java
new file mode 100644
index 000000000000..44566c748bac
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import com.google.inject.Binder;
+import com.google.inject.Provides;
+import com.google.inject.Scopes;
+import com.google.inject.Singleton;
+import io.airlift.configuration.AbstractConfigurationAwareModule;
+import io.opentelemetry.api.OpenTelemetry;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.DriverConnectionFactory;
+import io.trino.plugin.jdbc.ForBaseJdbc;
+import io.trino.plugin.jdbc.JdbcClient;
+import io.trino.plugin.jdbc.credential.CredentialProvider;
+
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+
+import static io.airlift.configuration.ConfigBinder.configBinder;
+
+public class TeradataClientModule
+ extends AbstractConfigurationAwareModule
+{
+ @Override
+ public void setup(Binder binder)
+ {
+ binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(TeradataClient.class).in(Scopes.SINGLETON);
+ configBinder(binder).bindConfig(TeradataConfig.class);
+ }
+
+ @Provides
+ @Singleton
+ @ForBaseJdbc
+ public static ConnectionFactory getConnectionFactory(BaseJdbcConfig config, CredentialProvider credentialProvider, OpenTelemetry openTelemetry)
+ throws SQLException
+ {
+ Driver driver = DriverManager.getDriver(config.getConnectionUrl());
+ return DriverConnectionFactory.builder(driver, config.getConnectionUrl(), credentialProvider)
+ .setOpenTelemetry(openTelemetry).build();
+ }
+}
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java
new file mode 100644
index 000000000000..93c9c119908e
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import io.airlift.configuration.Config;
+import io.airlift.configuration.ConfigDescription;
+import jakarta.validation.constraints.Min;
+
+public class TeradataConfig
+{
+ // Default to 60MB if not specified by user
+ private long permanentSpace = 60000000;
+
+ @Min(1)
+ public long getPermanentSpace()
+ {
+ return permanentSpace;
+ }
+
+ @Config("teradata.permanent-space")
+ @ConfigDescription("Permanent space allocation in bytes for new databases")
+ public TeradataConfig setPermanentSpace(long permanentSpace)
+ {
+ this.permanentSpace = permanentSpace;
+ return this;
+ }
+}
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java
new file mode 100644
index 000000000000..d11110edfbed
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import io.trino.plugin.jdbc.JdbcPlugin;
+
+public class TeradataPlugin
+ extends JdbcPlugin
+{
+ public TeradataPlugin()
+ {
+ super("teradata", TeradataClientModule::new);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java
new file mode 100644
index 000000000000..a52eb8909ade
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import io.trino.plugin.jdbc.JdbcConnectorFactory;
+import io.trino.spi.connector.ConnectorFactory;
+import io.trino.testing.TestingConnectorContext;
+import org.junit.jupiter.api.Test;
+
+import java.util.Map;
+
+import static com.google.common.collect.Iterables.getOnlyElement;
+import static org.assertj.core.api.Assertions.assertThat;
+
+final class TestTeradataPlugin
+{
+ @Test
+ void testCreateConnector()
+ {
+ TeradataPlugin plugin = new TeradataPlugin();
+ ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
+ assertThat(factory).isInstanceOf(JdbcConnectorFactory.class);
+ factory.create("test",
+ Map.of(
+ "connection-url", "jdbc:teradata://test/"),
+ new TestingConnectorContext())
+ .shutdown();
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java
new file mode 100644
index 000000000000..605faaf3d0cb
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+public record AuthenticationConfig(
+ String userName,
+ String password)
+{
+ public AuthenticationConfig()
+ {
+ this(null, null);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java
new file mode 100644
index 000000000000..c6f2d6818b1a
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java
@@ -0,0 +1,158 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import java.util.Map;
+
+import static java.util.Objects.requireNonNull;
+
+public class DatabaseConfig
+{
+ private final String jdbcUrl;
+ private final String hostName;
+ private final String databaseName;
+ private final boolean useClearScape;
+ private final AuthenticationConfig authConfig;
+ private final String clearScapeEnvName;
+ private final Map jdbcProperties;
+
+ private DatabaseConfig(Builder builder)
+ {
+ jdbcUrl = builder.jdbcUrl;
+ hostName = builder.hostName;
+ useClearScape = builder.useClearScape;
+ databaseName = requireNonNull(builder.databaseName, "databaseName is null");
+ authConfig = requireNonNull(builder.authConfig, "authConfig is null");
+ clearScapeEnvName = requireNonNull(builder.clearScapeEnvName, "clearScapeEnvName isnull");
+ jdbcProperties = requireNonNull(builder.jdbcProperties, "jdbcProperties is null");
+ }
+
+ public static Builder builder()
+ {
+ return new Builder();
+ }
+
+ public Builder toBuilder()
+ {
+ return builder()
+ .jdbcUrl(jdbcUrl)
+ .hostName(hostName)
+ .databaseName(databaseName)
+ .useClearScape(useClearScape)
+ .authConfig(authConfig)
+ .clearScapeEnvName(clearScapeEnvName)
+ .jdbcProperties(jdbcProperties);
+ }
+
+ public String getJdbcUrl()
+ {
+ return jdbcUrl;
+ }
+
+ public String getDatabaseName()
+ {
+ return databaseName;
+ }
+
+ public boolean isUseClearScape()
+ {
+ return useClearScape;
+ }
+
+ public AuthenticationConfig getAuthConfig()
+ {
+ return authConfig;
+ }
+
+ public String getClearScapeEnvName()
+ {
+ return clearScapeEnvName;
+ }
+
+ public Map getJdbcProperties()
+ {
+ return jdbcProperties;
+ }
+
+ public String getHostName()
+ {
+ return hostName;
+ }
+
+ public String getTMode()
+ {
+ if (jdbcProperties != null && jdbcProperties.containsKey("TMODE")) {
+ return jdbcProperties.get("TMODE");
+ }
+ return "ANSI";
+ }
+
+ public static class Builder
+ {
+ private String jdbcUrl;
+ private String hostName;
+ private String databaseName = "trino";
+ private boolean useClearScape;
+ private AuthenticationConfig authConfig = new AuthenticationConfig();
+ private String clearScapeEnvName;
+ private Map jdbcProperties;
+
+ public Builder jdbcUrl(String jdbcUrl)
+ {
+ this.jdbcUrl = jdbcUrl;
+ return this;
+ }
+
+ public Builder databaseName(String databaseName)
+ {
+ this.databaseName = databaseName;
+ return this;
+ }
+
+ public Builder useClearScape(boolean useClearScape)
+ {
+ this.useClearScape = useClearScape;
+ return this;
+ }
+
+ public Builder authConfig(AuthenticationConfig authConfig)
+ {
+ this.authConfig = authConfig;
+ return this;
+ }
+
+ public Builder clearScapeEnvName(String clearScapeEnvName)
+ {
+ this.clearScapeEnvName = clearScapeEnvName;
+ return this;
+ }
+
+ public Builder jdbcProperties(Map jdbcProperties)
+ {
+ this.jdbcProperties = jdbcProperties;
+ return this;
+ }
+
+ public Builder hostName(String hostName)
+ {
+ this.hostName = hostName;
+ return this;
+ }
+
+ public DatabaseConfig build()
+ {
+ return new DatabaseConfig(this);
+ }
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java
new file mode 100644
index 000000000000..4dbbb2d97742
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static io.trino.testing.SystemEnvironmentUtils.isEnvSet;
+import static io.trino.testing.SystemEnvironmentUtils.requireEnv;
+
+public final class DatabaseConfigFactory
+{
+ private DatabaseConfigFactory() {}
+
+ public static DatabaseConfig create(String envName)
+ {
+ String userName;
+ String password;
+ String hostName = null;
+
+ if (isEnvSet("CLEARSCAPE_TOKEN")) {
+ userName = TeradataTestConstants.CLEARSCAPE_USERNAME;
+ password = requireEnv("CLEARSCAPE_PASSWORD");
+ }
+ else {
+ userName = requireEnv("TERADATA_USERNAME");
+ password = requireEnv("TERADATA_PASSWORD");
+ hostName = requireEnv("TERADATA_HOSTNAME");
+ }
+
+ String databaseName = envName.replace("-", "_");
+
+ AuthenticationConfig authConfig = createAuthConfig(userName, password);
+ Map jdbcProperties = new HashMap<>();
+ jdbcProperties.put("TMODE", "ANSI");
+ jdbcProperties.put("CHARSET", "UTF8");
+
+ return DatabaseConfig.builder()
+ .hostName(hostName)
+ .databaseName(databaseName)
+ .useClearScape(hostName == null)
+ .authConfig(authConfig)
+ .clearScapeEnvName(envName)
+ .jdbcProperties(jdbcProperties)
+ .build();
+ }
+
+ private static AuthenticationConfig createAuthConfig(String username, String password)
+ {
+ return new AuthenticationConfig(username, password);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java
new file mode 100644
index 000000000000..64a5abe60d6e
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import com.google.common.collect.ImmutableList;
+import com.google.errorprone.annotations.CanIgnoreReturnValue;
+import io.airlift.log.Level;
+import io.airlift.log.Logger;
+import io.airlift.log.Logging;
+import io.trino.Session;
+import io.trino.metadata.QualifiedObjectName;
+import io.trino.plugin.teradata.TeradataPlugin;
+import io.trino.plugin.tpch.TpchPlugin;
+import io.trino.testing.DistributedQueryRunner;
+import io.trino.testing.QueryRunner;
+import io.trino.tpch.TpchTable;
+import org.intellij.lang.annotations.Language;
+
+import java.util.List;
+import java.util.Locale;
+
+import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
+import static io.trino.testing.TestingSession.testSessionBuilder;
+import static java.lang.String.format;
+import static java.util.Objects.requireNonNull;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public final class TeradataQueryRunner
+{
+ private TeradataQueryRunner() {}
+
+ public static Builder builder(TestingTeradataServer server)
+ {
+ return new Builder(server);
+ }
+
+ public static class Builder
+ extends DistributedQueryRunner.Builder
+ {
+ private final TestingTeradataServer server;
+ private List> initialTables = ImmutableList.of();
+
+ protected Builder(TestingTeradataServer server)
+ {
+ super(testSessionBuilder().setCatalog("teradata").setSchema(server.getDatabaseName()).build());
+ this.server = requireNonNull(server, "server is null");
+ }
+
+ @CanIgnoreReturnValue
+ public Builder setInitialTables(Iterable> initialTables)
+ {
+ this.initialTables = ImmutableList.copyOf(requireNonNull(initialTables, "initialTables is null"));
+ return this;
+ }
+
+ @Override
+ public DistributedQueryRunner build()
+ throws Exception
+ {
+ super.setAdditionalSetup(runner -> {
+ runner.installPlugin(new TpchPlugin());
+ runner.createCatalog("tpch", "tpch");
+
+ runner.installPlugin(new TeradataPlugin());
+ runner.createCatalog("teradata", "teradata", server.fetchCatalogProperties());
+
+ copyTpchTables(runner, "tpch", TINY_SCHEMA_NAME, initialTables);
+ });
+ return super.build();
+ }
+
+ private void copyTpchTables(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, Iterable> tables)
+ {
+ copyTpchTables(queryRunner, sourceCatalog, sourceSchema, queryRunner.getDefaultSession(), tables);
+ }
+
+ private void copyTpchTables(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, Session session, Iterable> tables)
+ {
+ for (TpchTable> table : tables) {
+ copyTable(queryRunner, sourceCatalog, sourceSchema, table.getTableName().toLowerCase(Locale.ENGLISH), session);
+ }
+ }
+
+ private void copyTable(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, String sourceTable, Session session)
+ {
+ QualifiedObjectName table = new QualifiedObjectName(sourceCatalog, sourceSchema, sourceTable);
+ if (!server.tableExists(sourceTable)) {
+ copyTable(queryRunner, table, session);
+ }
+ }
+
+ private void copyTable(QueryRunner queryRunner, QualifiedObjectName table, Session session)
+ {
+ @Language("SQL") String sql = format("CREATE TABLE %s AS SELECT * FROM %s", table.objectName(), table);
+ queryRunner.execute(session, sql);
+ assertThat(queryRunner.execute(session, "SELECT count(*) FROM " + table.objectName())
+ .getOnlyValue())
+ .as("Table is not loaded properly: %s", new Object[] {table.objectName()})
+ .isEqualTo(queryRunner.execute(session, "SELECT count(*) FROM " + table).getOnlyValue());
+ }
+
+ static void main()
+ throws Exception
+ {
+ Logging logger = Logging.initialize();
+ logger.setLevel("io.trino.plugin.teradata", Level.DEBUG);
+ logger.setLevel("io.trino", Level.INFO);
+ TestingTeradataServer server = new TestingTeradataServer("TeradataQueryRunner", false);
+ QueryRunner queryRunner = builder(server)
+ .addCoordinatorProperty("http-server.http.port", "8080")
+ .setInitialTables(TpchTable.getTables())
+ .build();
+
+ Logger log = Logger.get(TeradataQueryRunner.class);
+ log.info("======== SERVER STARTED ========");
+ log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl());
+ }
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java
new file mode 100644
index 000000000000..df34c711ca95
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+public final class TeradataTestConstants
+{
+ public static final String CLEARSCAPE_URL = "https://api.clearscape.teradata.com";
+ public static final String CLEARSCAPE_USERNAME = "demo_user";
+
+ private TeradataTestConstants() {}
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java
new file mode 100644
index 000000000000..a293c12b7392
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java
@@ -0,0 +1,596 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import com.google.common.collect.ImmutableList;
+import io.trino.Session;
+import io.trino.plugin.jdbc.BaseJdbcConnectorTest;
+import io.trino.sql.query.QueryAssertions;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.TestingConnectorBehavior;
+import io.trino.testing.TestingNames;
+import io.trino.testing.assertions.TrinoExceptionAssert;
+import io.trino.testing.sql.SqlExecutor;
+import io.trino.testing.sql.TestTable;
+import org.assertj.core.api.AssertProvider;
+import org.intellij.lang.annotations.Language;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.parallel.ResourceAccessMode;
+import org.junit.jupiter.api.parallel.ResourceLock;
+
+import java.util.List;
+import java.util.Optional;
+import java.util.OptionalInt;
+import java.util.function.Consumer;
+
+import static io.trino.plugin.teradata.integration.clearscape.ClearScapeEnvironmentUtils.generateUniqueEnvName;
+import static io.trino.spi.connector.ConnectorMetadata.MODIFYING_ROWS_MESSAGE;
+import static io.trino.testing.TestingNames.randomNameSuffix;
+import static java.lang.String.format;
+import static java.util.Objects.requireNonNull;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assumptions.abort;
+
+final class TestTeradataConnectorTest
+ extends BaseJdbcConnectorTest
+{
+ private static final int TERADATA_OBJECT_NAME_LIMIT = 128;
+
+ private TestingTeradataServer database;
+
+ @Override
+ protected SqlExecutor onRemoteDatabase()
+ {
+ return database;
+ }
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ database = closeAfterClass(new TestingTeradataServer(generateUniqueEnvName(getClass()), true));
+ return TeradataQueryRunner.builder(database).setInitialTables(REQUIRED_TPCH_TABLES).build();
+ }
+
+ @Override
+ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
+ {
+ return switch (connectorBehavior) {
+ case SUPPORTS_ADD_COLUMN,
+ SUPPORTS_AGGREGATION_PUSHDOWN,
+ SUPPORTS_COMMENT_ON_COLUMN,
+ SUPPORTS_COMMENT_ON_TABLE,
+ SUPPORTS_CREATE_MATERIALIZED_VIEW,
+ SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT,
+ SUPPORTS_CREATE_TABLE_WITH_DATA,
+ SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT,
+ SUPPORTS_CREATE_VIEW,
+ SUPPORTS_DELETE,
+ SUPPORTS_DEREFERENCE_PUSHDOWN,
+ SUPPORTS_DROP_COLUMN,
+ SUPPORTS_DROP_NOT_NULL_CONSTRAINT,
+ SUPPORTS_DROP_SCHEMA_CASCADE,
+ SUPPORTS_INSERT,
+ SUPPORTS_JOIN_PUSHDOWN,
+ SUPPORTS_JOIN_PUSHDOWN_WITH_DISTINCT_FROM,
+ SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_INEQUALITY,
+ SUPPORTS_LIMIT_PUSHDOWN,
+ SUPPORTS_MAP_TYPE,
+ SUPPORTS_MERGE,
+ SUPPORTS_NATIVE_QUERY,
+ SUPPORTS_NEGATIVE_DATE,
+ SUPPORTS_PREDICATE_ARITHMETIC_EXPRESSION_PUSHDOWN,
+ SUPPORTS_PREDICATE_EXPRESSION_PUSHDOWN,
+ SUPPORTS_PREDICATE_PUSHDOWN,
+ SUPPORTS_PREDICATE_PUSHDOWN_WITH_VARCHAR_INEQUALITY,
+ SUPPORTS_RENAME_COLUMN,
+ SUPPORTS_RENAME_SCHEMA,
+ SUPPORTS_RENAME_TABLE,
+ SUPPORTS_ROW_LEVEL_DELETE,
+ SUPPORTS_ROW_TYPE,
+ SUPPORTS_SET_COLUMN_TYPE,
+ SUPPORTS_TOPN_PUSHDOWN,
+ SUPPORTS_TOPN_PUSHDOWN_WITH_VARCHAR,
+ SUPPORTS_TRUNCATE,
+ SUPPORTS_UPDATE -> false;
+ case SUPPORTS_CREATE_SCHEMA,
+ SUPPORTS_CREATE_TABLE -> true;
+ default -> super.hasBehavior(connectorBehavior);
+ };
+ }
+
+ @AfterAll
+ public void cleanupTestDatabase()
+ {
+ database = null;
+ }
+
+ @Override
+ protected OptionalInt maxSchemaNameLength()
+ {
+ return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT);
+ }
+
+ @Override
+ protected void verifySchemaNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e)
+ .hasMessage(format("Schema name must be shorter than or equal to '%s' characters but got '%s'", TERADATA_OBJECT_NAME_LIMIT, TERADATA_OBJECT_NAME_LIMIT + 1));
+ }
+
+ @Override
+ protected OptionalInt maxColumnNameLength()
+ {
+ return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT);
+ }
+
+ @Override
+ protected void verifyColumnNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e)
+ .hasMessageMatching(format("Column name must be shorter than or equal to '%s' characters but got '%s': '.*'", TERADATA_OBJECT_NAME_LIMIT, TERADATA_OBJECT_NAME_LIMIT + 1));
+ }
+
+ @Override
+ @Test
+ @Tag("data_mapping")
+ public void testDataMappingSmokeTest()
+ {
+ super.testDataMappingSmokeTest();
+ }
+
+ @Override
+ protected OptionalInt maxTableNameLength()
+ {
+ return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT);
+ }
+
+ @Override
+ protected void verifyTableNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e)
+ .hasMessageMatching(format("Table name must be shorter than or equal to '%s' characters but got '%s'", TERADATA_OBJECT_NAME_LIMIT, TERADATA_OBJECT_NAME_LIMIT + 1));
+ }
+
+ @Override // Overriding this test case as Teradata defines varchar with a length.
+ @Test
+ public void testVarcharCastToDateInPredicate()
+ {
+ String tableName = "varchar_as_date_pred";
+ try (TestTable table = newTrinoTable(
+ tableName,
+ "(a varchar(50))",
+ ImmutableList.of(
+ "'999-09-09'",
+ "'1005-09-09'",
+ "'2005-06-06'", "'2005-06-6'", "'2005-6-06'", "'2005-6-6'", "' 2005-06-06'", "'2005-06-06 '", "' +2005-06-06'", "'02005-06-06'",
+ "'2005-09-06'", "'2005-09-6'", "'2005-9-06'", "'2005-9-6'", "' 2005-09-06'", "'2005-09-06 '", "' +2005-09-06'", "'02005-09-06'",
+ "'2005-09-09'", "'2005-09-9'", "'2005-9-09'", "'2005-9-9'", "' 2005-09-09'", "'2005-09-09 '", "' +2005-09-09'", "'02005-09-09'",
+ "'2005-09-10'", "'2005-9-10'", "' 2005-09-10'", "'2005-09-10 '", "' +2005-09-10'", "'02005-09-10'",
+ "'2005-09-20'", "'2005-9-20'", "' 2005-09-20'", "'2005-09-20 '", "' +2005-09-20'", "'02005-09-20'",
+ "'9999-09-09'",
+ "'99999-09-09'"))) {
+ for (String date : ImmutableList.of("2005-09-06", "2005-09-09", "2005-09-10")) {
+ for (String operator : ImmutableList.of("=", "<=", "<", ">", ">=", "!=", "IS DISTINCT FROM", "IS NOT DISTINCT FROM")) {
+ assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) %s DATE '%s'".formatted(table.getName(), operator, date)))
+ .hasCorrectResultsRegardlessOfPushdown();
+ }
+ }
+ }
+ try (TestTable table = newTrinoTable(
+ tableName,
+ "(a varchar(50))",
+ ImmutableList.of("'2005-06-bad-date'", "'2005-09-10'"))) {
+ assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) < DATE '2005-09-10'".formatted(table.getName())))
+ .failure().hasMessage("Value cannot be cast to date: " + "2005-06-bad-date");
+ verifyResultOrFailure(
+ query("SELECT a FROM %s WHERE CAST(a AS date) = DATE '2005-09-10'".formatted(table.getName())),
+ queryAssert -> queryAssert.skippingTypesCheck().matches("VALUES '2005-09-10'"),
+ failureAssert -> failureAssert
+ .hasMessage("Value cannot be cast to date: " + "2005-06-bad-date"));
+ }
+ try (TestTable table = newTrinoTable(
+ tableName,
+ "(a varchar(50))",
+ ImmutableList.of("'2005-09-10'"))) {
+ // 2005-09-01, when written as 2005-09-1, is a prefix of an existing data point: 2005-09-10
+ assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) != DATE '2005-09-01'".formatted(table.getName())))
+ .skippingTypesCheck().matches("VALUES '2005-09-10'");
+ }
+ }
+
+ @Override
+ // Overridden to handle Teradata specific WITH DATA syntax for table creation
+ @Test
+ public void testCreateTableAsSelect()
+ {
+ String tableName = "test_ctas" + randomNameSuffix();
+ assertUpdate(
+ "CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation",
+ "SELECT count(*) FROM nation");
+ assertTableColumnNames(tableName, "name", "regionkey");
+ assertThat(getTableComment(tableName)).isNull();
+ assertUpdate("DROP TABLE " + tableName);
+
+ assertUpdate(
+ "CREATE TABLE IF NOT EXISTS nation AS SELECT nationkey, regionkey FROM nation",
+ 0);
+ assertTableColumnNames("nation", "nationkey", "name", "regionkey", "comment");
+
+ assertCreateTableAsSelect(
+ "SELECT nationkey, name, regionkey FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ "SELECT mktsegment, sum(acctbal) x FROM customer GROUP BY mktsegment",
+ "SELECT count(DISTINCT mktsegment) FROM customer");
+
+ assertCreateTableAsSelect(
+ "SELECT count(*) x FROM nation JOIN region ON nation.regionkey = region.regionkey",
+ "SELECT 1");
+
+ assertCreateTableAsSelect(
+ "SELECT nationkey FROM nation ORDER BY nationkey LIMIT 10",
+ "SELECT 10");
+
+ assertCreateTableAsSelect(
+ "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 0 UNION ALL " +
+ "SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 1",
+ "SELECT name, nationkey, regionkey FROM nation",
+ "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(
+ Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(),
+ "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT count(*) + 1 FROM nation");
+
+ assertCreateTableAsSelect(
+ Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(),
+ "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT nationkey, regionkey FROM nation UNION ALL " +
+ "SELECT 1234567890, 123",
+ "SELECT count(*) + 1 FROM nation");
+
+ tableName = "test_ctas" + randomNameSuffix();
+ assertThat(query("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT name FROM nation")).succeeds();
+ assertThat(query("SELECT * from " + tableName)).matches("SELECT name FROM nation");
+ assertUpdate("DROP TABLE " + tableName);
+ }
+
+ @Override
+ // Overriding this test case as Teradata does not support negative dates.
+ @Test
+ public void testDateYearOfEraPredicate()
+ {
+ assertQuery(
+ "SELECT orderdate FROM orders WHERE orderdate = DATE '1997-09-14'",
+ "VALUES DATE '1997-09-14'");
+ }
+
+ @Override
+ // Override this test case as Teradata has different syntax for creating tables with AS SELECT statement.
+ @Test
+ public void verifySupportsRowLevelUpdateDeclaration()
+ {
+ String testTableName = "test_supports_update";
+ try (TestTable table = newTrinoTable(testTableName, "AS ( SELECT * FROM nation) WITH DATA")) {
+ assertQueryFails(
+ "UPDATE " + table.getName() + " SET nationkey = nationkey * 100 WHERE regionkey = 2",
+ MODIFYING_ROWS_MESSAGE);
+ }
+ }
+
+ @Override
+ // Overriding this test case as Teradata doesn't have support to (k, v) AS VALUES in insert statement
+ @Test
+ public void testCharVarcharComparison()
+ {
+ String testTableName = "test_char_varchar";
+ try (TestTable table = newTrinoTable(
+ testTableName,
+ "(k int, v char(3))",
+ ImmutableList.of(
+ "-1, CAST(NULL AS char(3))",
+ "3, CAST(' ' AS char(3))",
+ "6, CAST('x ' AS char(3))"))) {
+ assertQuery(
+ "SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(2))",
+ "VALUES (3, ' ')");
+ assertQuery(
+ "SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(4))",
+ "VALUES (3, ' ')");
+ assertQuery(
+ "SELECT k, v FROM " + table.getName() + " WHERE v = CAST('x ' AS varchar(2))",
+ "VALUES (6, 'x ')");
+ }
+ }
+
+ @Override
+ // Overriding this test case as Teradata doesn't have support to (k, v) AS VALUES in insert statement
+ @Test
+ public void testVarcharCharComparison()
+ {
+ try (TestTable table = newTrinoTable(
+ "test_varchar_char",
+ "(k int, v char(3))",
+ ImmutableList.of(
+ "-1, CAST(NULL AS varchar(3))",
+ "0, CAST('' AS varchar(3))",
+ "1, CAST(' ' AS varchar(3))",
+ "2, CAST(' ' AS varchar(3))",
+ "3, CAST(' ' AS varchar(3))",
+ "4, CAST('x' AS varchar(3))",
+ "5, CAST('x ' AS varchar(3))",
+ "6, CAST('x ' AS " + "varchar(3))"))) {
+ assertQuery(
+ "SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS char(2))",
+ "VALUES (0, ' '), (1, ' '), (2, ' '), (3, ' ')");
+ assertQuery(
+ "SELECT k, v FROM " + table.getName() + " WHERE v = CAST('x ' AS char(2))",
+ "VALUES (4, 'x '), (5, 'x '), (6, 'x ')");
+ }
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testShowCreateSchema()
+ {
+ super.testShowCreateSchema();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testCreateSchema()
+ {
+ super.testCreateSchema();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testCreateSchemaWithLongName()
+ {
+ super.testCreateSchemaWithLongName();
+ }
+
+ @Override
+ // Overriding as Teradata.query method allows SELECT statements
+ @Test
+ public void testExecuteProcedureWithInvalidQuery()
+ {
+ assertQuerySucceeds("CALL system.execute('SELECT 1')");
+ assertQueryFails(
+ "CALL system.execute('invalid')",
+ ".*Syntax error: expected something between the beginning of the request and the word 'invalid'.*");
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testRenameSchemaToLongName()
+ {
+ super.testRenameSchemaToLongName();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testRenameTableAcrossSchema()
+ throws Exception
+ {
+ super.testRenameTableAcrossSchema();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testRenameTableToUnqualifiedPreservesSchema()
+ throws Exception
+ {
+ super.testRenameTableToUnqualifiedPreservesSchema();
+ }
+
+ @Override
+ // Overriding to tag this test as long_run test case to avoid running in clearscape_tests profile
+ @Test
+ @Tag("long_run")
+ public void testSelectInformationSchemaColumns()
+ {
+ super.testSelectInformationSchemaColumns();
+ }
+
+ @Override
+ // Overriding to tag this test as long_run test case to avoid running in clearscape_tests profile
+ @Test
+ @Tag("long_run")
+ public void testCaseSensitiveDataMapping()
+ {
+ super.testCaseSensitiveDataMapping();
+ }
+
+ @Override
+ // Overriding as Teradata does not support insert operations. Base implementation does not have check insert support before running the test.
+ @Test
+ public void testInsertIntoNotNullColumn()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ // Overriding as Teradata does not support insert operations. Base implementation does not have check insert support before running the test.
+ @Test
+ public void testInsertWithoutTemporaryTable()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ // Overriding as base test tyring to insert data but this connector not support insert operations.
+ @Test
+ public void testColumnName()
+ {
+ abort("Skipping as connector does not support column level write operations");
+ }
+
+ @Override
+ // Overriding as this connector does not support creating table with UNICODE characters
+ @Test
+ public void testCreateTableAsSelectWithUnicode()
+ {
+ abort("Skipping as connector does not support creating table with UNICODE characters");
+ }
+
+ @Override
+ // Overriding as Teradata does not support insert operations. Base implementation does not have check insert support before running the test.
+ @Test
+ public void testUpdateNotNullColumn()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ // Overriding as Teradata does not support insert operations. Base implementation does not have check insert support before running the test.
+ @Test
+ public void testWriteBatchSizeSessionProperty()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ // Overriding as Teradata does not support insert operations. Base implementation does not have check insert support before running the test.
+ @Test
+ public void testWriteTaskParallelismSessionProperty()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Test
+ void testTeradataNumberDataType()
+ {
+ try (TestTable table = newTrinoTable(
+ "test_number",
+ "(id INTEGER, number_col NUMBER(10,2), number_default NUMBER, number_large NUMBER(38,10))",
+ ImmutableList.of(
+ "1, CAST(12345.67 AS NUMBER(10,2)), CAST(999999999999999 AS NUMBER), CAST(1234567890123456789012345678.1234567890 AS NUMBER(38,10))",
+ "2, CAST(-99999.99 AS NUMBER(10,2)), CAST(-123456789012345 AS NUMBER), CAST(-9999999999999999999999999999.9999999999 AS NUMBER(38,10))",
+ "3, CAST(0.00 AS NUMBER(10,2)), CAST" + "(0 AS NUMBER), CAST(0.0000000000 AS NUMBER(38,10))"))) {
+ assertThat(query(format("SELECT number_col FROM %s WHERE id = 1", table.getName())))
+ .matches("VALUES CAST(12345.67 AS DECIMAL(10,2))");
+ assertThat(query(format("SELECT number_default FROM %s WHERE id = 1", table.getName())))
+ .matches("VALUES CAST(999999999999999 AS DECIMAL(38,0))");
+ assertThat(query(format("SELECT number_large FROM %s WHERE id = 1", table.getName())))
+ .matches("VALUES CAST(1234567890123456789012345678.1234567890 AS DECIMAL(38,10))");
+ assertThat(query(format("SELECT number_col FROM %s WHERE id = 2", table.getName())))
+ .matches("VALUES CAST(-99999.99 AS DECIMAL(10,2))");
+ assertThat(query(format("SELECT number_col FROM %s WHERE id = 3", table.getName())))
+ .matches("VALUES CAST(0.00 AS DECIMAL(10,2))");
+ }
+ }
+
+ @Test
+ void testTeradataCharacterDataType()
+ {
+ try (TestTable table = newTrinoTable(
+ "test_character",
+ "(id INTEGER, char_col CHARACTER(5), char_default CHARACTER, char_large CHARACTER(100))",
+ ImmutableList.of(
+ "1, CAST('HELLO' AS CHARACTER(5)), CAST('A' AS CHARACTER), CAST('TERADATA' AS CHARACTER(100))",
+ "2, CAST('WORLD' AS CHARACTER(5)), CAST('B' AS CHARACTER), CAST('CHARACTER' AS CHARACTER(100))",
+ "3, CAST('' AS CHARACTER(5)), CAST('C' AS CHARACTER), CAST('' AS CHARACTER(100))"))) {
+ assertThat(query(format("SELECT char_col FROM %s WHERE id = 1", table.getName())))
+ .matches("VALUES CAST('HELLO' AS CHAR(5))");
+ assertThat(query(format("SELECT char_default FROM %s WHERE id = 1", table.getName())))
+ .matches("VALUES CAST('A' AS CHAR(1))");
+ assertThat(query(format("SELECT char_large FROM %s WHERE id = 1", table.getName())))
+ .matches("VALUES CAST('TERADATA' AS CHAR(100))");
+ assertThat(query(format("SELECT char_col FROM %s WHERE id = 3", table.getName())))
+ .matches("VALUES CAST('' AS CHAR(5))");
+ }
+ }
+
+ @Override
+ // Overridden to exclude data types that Teradata doesn't support or handles differently
+ protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup)
+ {
+ String typeName = dataMappingTestSetup.getTrinoTypeName();
+ return switch (typeName) {
+ // skipping date as during julian->gregorian date is handled differently in Teradata.
+ // tinyint, double and varchar with unbounded (need to handle special characters) are skipped and will handle it while improving write functionalities.
+ case "boolean",
+ "tinyint",
+ "date",
+ "real",
+ "double",
+ "varchar",
+ "time",
+ "time(6)",
+ "timestamp",
+ "timestamp(6)",
+ "varbinary",
+ "timestamp(3) with time zone",
+ "timestamp(6) with time zone",
+ "U&'a \\000a newline'" -> Optional.empty();
+ default -> Optional.of(dataMappingTestSetup);
+ };
+ }
+
+ @Override
+ // Overridden to use Teradata WITH DATA syntax for CREATE TABLE AS SELECT statements
+ protected void assertCreateTableAsSelect(Session session, String query, String expectedQuery, String rowCountQuery)
+ {
+ String table = "test_ctas_" + TestingNames.randomNameSuffix();
+ assertUpdate(session, "CREATE TABLE " + table + " AS ( " + query + ") WITH DATA", rowCountQuery);
+ assertQuery(session, "SELECT * FROM " + table, expectedQuery);
+ assertUpdate(session, "DROP TABLE " + table);
+ assertThat(getQueryRunner().tableExists(session, table)).isFalse();
+ }
+
+ @Override
+ // Overridden to handle Teradata schema.table naming format and table creation syntax
+ protected TestTable newTrinoTable(String namePrefix, @Language("SQL") String tableDefinition, List rowsToInsert)
+ {
+ String tableName;
+ if (namePrefix.contains(".")) {
+ tableName = namePrefix;
+ }
+ else {
+ String schemaName = getSession().getSchema().orElseThrow();
+ tableName = schemaName + "." + namePrefix;
+ }
+ return new TestTable(database, tableName, tableDefinition, rowsToInsert);
+ }
+
+ private static void verifyResultOrFailure(AssertProvider queryAssertProvider, Consumer verifyResults,
+ Consumer verifyFailure)
+ {
+ requireNonNull(verifyResults, "verifyResults is null");
+ requireNonNull(verifyFailure, "verifyFailure is null");
+ QueryAssertions.QueryAssert queryAssert = assertThat(queryAssertProvider);
+ verifyResults.accept(queryAssert);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java
new file mode 100644
index 000000000000..8dc919c48759
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java
@@ -0,0 +1,281 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import io.trino.testing.AbstractTestQueryFramework;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.datatype.CreateAndInsertDataSetup;
+import io.trino.testing.datatype.DataSetup;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+
+import java.sql.SQLException;
+
+import static io.trino.plugin.teradata.integration.clearscape.ClearScapeEnvironmentUtils.generateUniqueEnvName;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.CharType.createCharType;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.SmallintType.SMALLINT;
+import static io.trino.spi.type.TinyintType.TINYINT;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static io.trino.testing.datatype.SqlDataTypeTest.create;
+import static java.lang.String.format;
+import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
+
+final class TestTeradataTypeMapping
+ extends AbstractTestQueryFramework
+{
+ private TestingTeradataServer database;
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ database = closeAfterClass(new TestingTeradataServer(generateUniqueEnvName(getClass()), true));
+ return TeradataQueryRunner.builder(database).build();
+ }
+
+ @AfterAll
+ void cleanupTestClass()
+ {
+ database = null;
+ }
+
+ @Test
+ void testByteint()
+ {
+ create()
+ .addRoundTrip("byteint", "0", TINYINT, "CAST(0 AS TINYINT)")
+ .addRoundTrip("byteint", "127", TINYINT, "CAST(127 AS TINYINT)")
+ .addRoundTrip("byteint", "-128", TINYINT, "CAST(-128 AS TINYINT)")
+ .addRoundTrip("byteint", "null", TINYINT, "CAST(null AS TINYINT)")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("byteint"));
+ }
+
+ @Test
+ void testSmallint()
+ {
+ create()
+ .addRoundTrip("smallint", "0", SMALLINT, "CAST(0 AS SMALLINT)")
+ .addRoundTrip("smallint", "32767", SMALLINT, "CAST(32767 AS SMALLINT)")
+ .addRoundTrip("smallint", "-32768", SMALLINT, "CAST(-32768 AS SMALLINT)")
+ .addRoundTrip("smallint", "null", SMALLINT, "CAST(null AS SMALLINT)")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("smallint"));
+ }
+
+ @Test
+ void testInteger()
+ {
+ create()
+ .addRoundTrip("integer", "0", INTEGER, "0")
+ .addRoundTrip("integer", "2147483647", INTEGER, "2147483647")
+ .addRoundTrip("integer", "-2147483648", INTEGER, "-2147483648")
+ .addRoundTrip("integer", "NULL", INTEGER, "CAST(NULL AS INTEGER)")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("integer"));
+ }
+
+ @Test
+ void testBigint()
+ {
+ create()
+ .addRoundTrip("bigint", "0", BIGINT, "CAST(0 AS BIGINT)")
+ .addRoundTrip("bigint", "9223372036854775807", BIGINT, "9223372036854775807")
+ .addRoundTrip("bigint", "-9223372036854775808", BIGINT, "-9223372036854775808")
+ .addRoundTrip("bigint", "NULL", BIGINT, "CAST(NULL AS BIGINT)")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("bigint"));
+ }
+
+ @Test
+ void testFloat()
+ {
+ create()
+ .addRoundTrip("float", "0", DOUBLE, "CAST(0 AS DOUBLE)")
+ .addRoundTrip("real", "0", DOUBLE, "CAST(0 AS DOUBLE)")
+ .addRoundTrip("double precision", "0", DOUBLE, "CAST(0 AS DOUBLE)")
+ .addRoundTrip("float", "1.797e308", DOUBLE, "1.797e308")
+ .addRoundTrip("real", "1.797e308", DOUBLE, "1.797e308")
+ .addRoundTrip("double precision", "1.797e308", DOUBLE, "1.797e308")
+ .addRoundTrip("float", "2.226e-308", DOUBLE, "2.226e-308")
+ .addRoundTrip("real", "2.226e-308", DOUBLE, "2.226e-308")
+ .addRoundTrip("double precision", "2.226e-308", DOUBLE, "2.226e-308")
+ .addRoundTrip("float", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .addRoundTrip("real", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .addRoundTrip("double precision", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("float"));
+ }
+
+ @Test
+ void testDecimal()
+ {
+ create()
+ .addRoundTrip("decimal(3, 0)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 1)", "0.0", createDecimalType(3, 1), "CAST('0.0' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "0.0", createDecimalType(3, 1), "CAST('0.0' AS decimal(3, 1))")
+ .addRoundTrip("decimal(1, 0)", "1", createDecimalType(1, 0), "CAST('1' AS decimal(1, 0))")
+ .addRoundTrip("numeric(1, 0)", "1", createDecimalType(1, 0), "CAST('1' AS decimal(1, 0))")
+ .addRoundTrip("decimal(1, 0)", "-1", createDecimalType(1, 0), "CAST('-1' AS decimal(1, 0))")
+ .addRoundTrip("numeric(1, 0)", "-1", createDecimalType(1, 0), "CAST('-1' AS decimal(1, 0))")
+ .addRoundTrip("decimal(3, 0)", "1", createDecimalType(3, 0), "CAST('1' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "1", createDecimalType(3, 0), "CAST('1' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "-1", createDecimalType(3, 0), "CAST('-1' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "-1", createDecimalType(3, 0), "CAST('-1' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "123", createDecimalType(3, 0), "CAST('123' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "123", createDecimalType(3, 0), "CAST('123' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "-123", createDecimalType(3, 0), "CAST('-123' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "-123", createDecimalType(3, 0), "CAST('-123' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 1)", "10.0", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "10.0", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "12.3", createDecimalType(3, 1), "CAST('12.3' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "12.3", createDecimalType(3, 1), "CAST('12.3' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "-12.3", createDecimalType(3, 1), "CAST('-12.3' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "-12.3", createDecimalType(3, 1), "CAST('-12.3' AS decimal(3, 1))")
+ .addRoundTrip("decimal(38, 0)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("numeric(38, 0)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("decimal(38, 0)", "-12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('-12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("numeric(38, 0)", "-12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('-12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("decimal(1, 0)", "null", createDecimalType(1, 0), "CAST(null AS decimal(1, 0))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("decimal"));
+ }
+
+ @Test
+ void testNumber()
+ {
+ create()
+ .addRoundTrip("numeric(3)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))")
+ .addRoundTrip("number(5,2)", "0", createDecimalType(5, 2), "CAST('0' AS decimal(5, 2))")
+ .addRoundTrip("number(38)", "0", createDecimalType(38, 0), "CAST('0' AS decimal(38, 0))")
+ .addRoundTrip("number(38,2)", "123456789012345678901234567890123456.78", createDecimalType(38, 2), "CAST('123456789012345678901234567890123456.78' AS decimal(38, 2))")
+ .addRoundTrip("numeric(38)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("numeric(3)", "null", createDecimalType(3, 0), "CAST(null AS decimal(3, 0))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("number"));
+ }
+
+ @Test
+ void testChar()
+ {
+ create()
+ .addRoundTrip("char(3)", "''", createCharType(3), "CAST('' AS char(3))")
+ .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))")
+ .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))")
+ .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))")
+ .addRoundTrip("char(3)", "'A'", createCharType(3), "CAST('A' AS char(3))")
+ .addRoundTrip("char(3)", "'A '", createCharType(3), "CAST('A ' AS char(3))")
+ .addRoundTrip("char(3)", "' B '", createCharType(3), "CAST(' B ' AS char(3))")
+ .addRoundTrip("char(3)", "' C'", createCharType(3), "CAST(' C' AS char(3))")
+ .addRoundTrip("char(3)", "'AB'", createCharType(3), "CAST('AB' AS char(3))")
+ .addRoundTrip("char(3)", "'ABC'", createCharType(3), "CAST('ABC' AS char(3))")
+ .addRoundTrip("char(3)", "'A C'", createCharType(3), "CAST('A C' AS char(3))")
+ .addRoundTrip("char(3)", "' BC'", createCharType(3), "CAST(' BC' AS char(3))")
+ .addRoundTrip("char(3)", "null", createCharType(3), "CAST(null AS char(3))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("char"));
+ String tmode = database.getTMode();
+ if (tmode.equals("TERA")) {
+ // truncation
+ create()
+ .addRoundTrip("char(3)", "'ABCD'", createCharType(3), "CAST('ABCD' AS char(3))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("chart"));
+ }
+ else {
+ // Error on truncation
+ assertThatThrownBy(() ->
+ create()
+ .addRoundTrip("char(3)", "'ABCD'", createCharType(3), "CAST('ABCD' AS char(3))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("chart")))
+ .isInstanceOf(RuntimeException.class)
+ .hasCauseInstanceOf(SQLException.class)
+ .cause()
+ .hasMessageContaining("Right truncation of string data");
+ }
+ // max-size
+ create()
+ .addRoundTrip("char(64000)", "'max'", createCharType(64000), "CAST('max' AS char(64000))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("charl"));
+ }
+
+ @Test
+ void testVarchar()
+ {
+ create()
+ .addRoundTrip("varchar(32)", "''", createVarcharType(32), "CAST('' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'A'", createVarcharType(32), "CAST('A' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'A '", createVarcharType(32), "CAST('A ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' B '", createVarcharType(32), "CAST(' B ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' C'", createVarcharType(32), "CAST(' C' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'AB'", createVarcharType(32), "CAST('AB' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'ABC'", createVarcharType(32), "CAST('ABC' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'A C'", createVarcharType(32), "CAST('A C' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' BC'", createVarcharType(32), "CAST(' BC' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "null", createVarcharType(32), "CAST(null AS varchar(32))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("varchar"));
+ String teraMode = database.getTMode();
+ if (teraMode.equals("TERA")) {
+ // truncation
+ create()
+ .addRoundTrip("varchar(3)", "'ABCD'", createVarcharType(3), "CAST('ABCD' AS varchar(3))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("varchart"));
+ }
+ else {
+ // Error on truncation
+ assertThatThrownBy(() ->
+ create()
+ .addRoundTrip("varchar(3)", "'ABCD'", createVarcharType(3), "CAST('ABCD' AS varchar(3))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("varchart")))
+ .isInstanceOf(RuntimeException.class)
+ .hasCauseInstanceOf(SQLException.class)
+ .cause()
+ .hasMessageContaining("Right truncation of string data");
+ }
+ // max-size
+ create()
+ .addRoundTrip("long varchar", "'max'", createVarcharType(64000), "CAST('max' AS varchar(64000))")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("varcharl"));
+ }
+
+ @Test
+ void testDate()
+ {
+ create()
+ .addRoundTrip("date", "DATE '0001-01-01'", DATE, "DATE '0001-01-01'")
+ .addRoundTrip("date", "DATE '0012-12-12'", DATE, "DATE '0012-12-12'")
+ .addRoundTrip("date", "DATE '1500-01-01'", DATE, "DATE '1500-01-01'")
+ .addRoundTrip("date", "DATE '1582-10-04'", DATE, "DATE '1582-10-04'")
+ .addRoundTrip("date", "DATE '1582-10-15'", DATE, "DATE '1582-10-15'")
+ .addRoundTrip("date", "DATE '1952-04-03'", DATE, "DATE '1952-04-03'")
+ .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'")
+ .addRoundTrip("date", "DATE '1970-02-03'", DATE, "DATE '1970-02-03'")
+ .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'")
+ .addRoundTrip("date", "DATE '1983-04-01'", DATE, "DATE '1983-04-01'")
+ .addRoundTrip("date", "DATE '1983-10-01'", DATE, "DATE '1983-10-01'")
+ .addRoundTrip("date", "DATE '2017-07-01'", DATE, "DATE '2017-07-01'")
+ .addRoundTrip("date", "DATE '2017-01-01'", DATE, "DATE '2017-01-01'")
+ .addRoundTrip("date", "DATE '2024-02-29'", DATE, "DATE '2024-02-29'")
+ .addRoundTrip("date", "DATE '9999-12-30'", DATE, "DATE '9999-12-30'")
+ .addRoundTrip("date", "NULL", DATE, "CAST(NULL AS DATE)")
+ .execute(getQueryRunner(), testInsertIntoNotNullColumn("date"));
+ }
+
+ private DataSetup testInsertIntoNotNullColumn(String tableNamePrefix)
+ {
+ String prefix = format("%s.%s", database.getDatabaseName(), tableNamePrefix);
+ return new CreateAndInsertDataSetup(database, prefix);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java
new file mode 100644
index 000000000000..a4b859b4c499
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java
@@ -0,0 +1,403 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import io.trino.plugin.teradata.integration.clearscape.ClearScapeSetup;
+import io.trino.plugin.teradata.integration.clearscape.EnvironmentResponse;
+import io.trino.plugin.teradata.integration.clearscape.Model;
+import io.trino.testing.sql.SqlExecutor;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+import java.util.stream.Collectors;
+
+import static io.trino.testing.SystemEnvironmentUtils.isEnvSet;
+import static io.trino.testing.SystemEnvironmentUtils.requireEnv;
+import static java.lang.String.format;
+import static java.util.Objects.requireNonNull;
+
+public final class TestingTeradataServer
+ implements AutoCloseable, SqlExecutor
+{
+ private static final int MAX_RETRIES = 5;
+ private static final long BASE_RETRY_DELAY_MS = 1500L;
+ private static final long MAX_RETRY_DELAY_MS = 10_000L;
+ private static final Random RANDOM = new Random();
+ private static final int TERADATA_TRANSIENT_CONCURRENCY_ERROR_CODE = 3598;
+ private static final int TERADATA_CLOSED_CONNECTION_ERROR_CODE = 1095;
+ private static final int TERADATA_SOCKET_COMMUNICATION_FAILURE_ERROR_CODE = 804;
+
+ private volatile Connection connection;
+ private DatabaseConfig config;
+ private ClearScapeSetup clearScapeSetup;
+
+ public TestingTeradataServer(String envName, boolean destroyEnv)
+ {
+ requireNonNull(envName, "envName should not be null");
+ config = DatabaseConfigFactory.create(envName);
+ String hostName = config.getHostName();
+
+ // Initialize ClearScape Instance and get hostname from ClearScape API when used
+ if (config.isUseClearScape()) {
+ if (isEnvSet("CLEARSCAPE_DESTROY_ENV")) {
+ destroyEnv = Boolean.parseBoolean(requireEnv("CLEARSCAPE_DESTROY_ENV"));
+ }
+ clearScapeSetup = new ClearScapeSetup(
+ requireEnv("CLEARSCAPE_TOKEN"),
+ requireEnv("CLEARSCAPE_PASSWORD"),
+ config.getClearScapeEnvName(),
+ destroyEnv,
+ requireEnv("CLEARSCAPE_REGION"));
+ Model model = clearScapeSetup.initialize();
+ hostName = model.getHostName();
+ }
+ String jdbcUrl = buildJdbcUrl(hostName);
+ config = config.toBuilder()
+ .hostName(hostName)
+ .jdbcUrl(jdbcUrl)
+ .build();
+ // Recreate the connection with retries to handle transient ClearScape socket or connection closure issues.
+ connection = createConnectionWithRetries();
+ createTestDatabaseIfAbsent();
+ }
+
+ public Map fetchCatalogProperties()
+ {
+ Map properties = new HashMap<>();
+ properties.put("connection-url", config.getJdbcUrl());
+
+ AuthenticationConfig auth = config.getAuthConfig();
+ properties.put("connection-user", auth.userName());
+ properties.put("connection-password", auth.password());
+
+ return properties;
+ }
+
+ public void createTestDatabaseIfAbsent()
+ {
+ executeWithRetry(() -> {
+ if (!schemaExists(config.getDatabaseName())) {
+ execute(format("CREATE DATABASE \"%s\" AS PERM=100e6;", config.getDatabaseName()));
+ }
+ });
+ }
+
+ public void dropTestDatabaseIfExists()
+ {
+ executeWithRetry(() -> {
+ if (schemaExists(config.getDatabaseName())) {
+ execute(format("DELETE DATABASE \"%s\"", config.getDatabaseName()));
+ execute(format("DROP DATABASE \"%s\"", config.getDatabaseName()));
+ }
+ });
+ }
+
+ public boolean tableExists(String tableName)
+ {
+ ensureConnection();
+ String query = "SELECT count(1) FROM DBC.TablesV WHERE DataBaseName = ? AND TableName = ?";
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, config.getDatabaseName());
+ stmt.setString(2, tableName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException e) {
+ if (isConnectionException(e)) {
+ connection = createConnectionWithRetries();
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, config.getDatabaseName());
+ stmt.setString(2, tableName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException ex) {
+ throw new RuntimeException("Failed to check table existence: " + ex.getMessage(), ex);
+ }
+ }
+ throw new RuntimeException("Failed to check table existence: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void execute(String sql)
+ {
+ executeWithRetry(() -> doExecute(sql));
+ }
+
+ public String getDatabaseName()
+ {
+ return config.getDatabaseName();
+ }
+
+ public String getTMode()
+ {
+ return config.getTMode();
+ }
+
+ @Override
+ public void close()
+ {
+ try {
+ if (config.isUseClearScape()) {
+ EnvironmentResponse.State state = clearScapeSetup.status();
+ if (state == EnvironmentResponse.State.RUNNING) {
+ dropTestDatabaseIfExists();
+ }
+ }
+ else {
+ dropTestDatabaseIfExists();
+ }
+ }
+ finally {
+ try {
+ if (connection != null && !connection.isClosed()) {
+ connection.close();
+ }
+ }
+ catch (SQLException ignored) {
+ }
+ connection = null;
+ if (clearScapeSetup != null) {
+ try {
+ clearScapeSetup.cleanup();
+ }
+ catch (Exception ignored) {
+ }
+ }
+ }
+ }
+
+ @Override
+ public boolean supportsMultiRowInsert()
+ {
+ return false;
+ }
+
+ private String buildJdbcUrl(String hostName)
+ {
+ String baseUrl = format("jdbc:teradata://%s/", hostName);
+ String propertiesString = buildPropertiesString();
+ return propertiesString.isEmpty() ? baseUrl : baseUrl + propertiesString;
+ }
+
+ private String buildPropertiesString()
+ {
+ Map properties = config.getJdbcProperties();
+ if (properties == null || properties.isEmpty()) {
+ return "";
+ }
+ return properties.entrySet()
+ .stream()
+ .map(entry -> entry.getKey() + "=" + entry.getValue())
+ .collect(Collectors.joining(","));
+ }
+
+ private void doExecute(String sql)
+ {
+ ensureConnection();
+ try (Statement stmt = connection.createStatement()) {
+ if (config.getDatabaseName() != null && schemaExists(config.getDatabaseName())) {
+ stmt.execute(format("DATABASE \"%s\"", config.getDatabaseName()));
+ }
+ stmt.execute(sql);
+ }
+ catch (SQLException e) {
+ throw new RuntimeException("SQL execution failed: " + sql, e);
+ }
+ }
+
+ private boolean schemaExists(String schemaName)
+ {
+ ensureConnection();
+ String query = "SELECT COUNT(1) FROM DBC.DatabasesV WHERE DatabaseName = ?";
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, schemaName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException e) {
+ if (isConnectionException(e)) {
+ connection = createConnectionWithRetries();
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, schemaName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException ex) {
+ throw new RuntimeException("Failed to check schema existence", ex);
+ }
+ }
+ throw new RuntimeException("Failed to check schema existence", e);
+ }
+ }
+
+ private synchronized void ensureConnection()
+ {
+ try {
+ if (connection == null || connection.isClosed()) {
+ connection = createConnectionWithRetries();
+ }
+ }
+ catch (SQLException e) {
+ connection = createConnectionWithRetries();
+ }
+ }
+
+ private void executeWithRetry(Runnable operation)
+ {
+ int attempt = 0;
+
+ while (true) {
+ try {
+ operation.run();
+ return;
+ }
+ catch (RuntimeException e) {
+ attempt++;
+ Throwable cause = e.getCause();
+
+ // Connection-related: recreate connection and retry
+ if (cause instanceof SQLException sqlEx && isConnectionException(sqlEx) && attempt < MAX_RETRIES) {
+ connection = createConnectionWithRetries();
+ sleepUnchecked(computeBackoffDelay(attempt));
+ continue;
+ }
+
+ // Teradata transient concurrency error 3598: backoff & retry
+ if (isTeradataError3598(e) && attempt < MAX_RETRIES) {
+ long delay = computeBackoffDelay(attempt);
+ sleepUnchecked(delay);
+ continue;
+ }
+ throw e;
+ }
+ }
+ }
+
+ private Connection createConnectionWithRetries()
+ {
+ int attempt = 0;
+ while (true) {
+ try {
+ return createConnection();
+ }
+ catch (RuntimeException e) {
+ attempt++;
+ if (attempt >= MAX_RETRIES) {
+ throw new RuntimeException("Failed to create database connection after retries", e);
+ }
+ long delay = computeBackoffDelay(attempt);
+ sleepUnchecked(delay);
+ }
+ }
+ }
+
+ private Connection createConnection()
+ {
+ try {
+ Class.forName("com.teradata.jdbc.TeraDriver");
+ Properties props = buildConnectionProperties(config.getAuthConfig());
+ return DriverManager.getConnection(config.getJdbcUrl(), props);
+ }
+ catch (SQLException | ClassNotFoundException e) {
+ throw new RuntimeException("Failed to create database connection", e);
+ }
+ }
+
+ private boolean isTeradataError3598(Throwable t)
+ {
+ if (t == null) {
+ return false;
+ }
+ Throwable root = t;
+ while (root.getCause() != null && !(root instanceof SQLException)) {
+ root = root.getCause();
+ }
+ if (root instanceof SQLException sqlEx) {
+ try {
+ if (sqlEx.getErrorCode() == TERADATA_TRANSIENT_CONCURRENCY_ERROR_CODE) {
+ return true;
+ }
+ }
+ catch (Exception ignored) {
+ }
+ }
+ return false;
+ }
+
+ private boolean isConnectionException(SQLException e)
+ {
+ if (e == null) {
+ return false;
+ }
+ try {
+ int code = e.getErrorCode();
+ if (code == TERADATA_CLOSED_CONNECTION_ERROR_CODE || code == TERADATA_SOCKET_COMMUNICATION_FAILURE_ERROR_CODE) {
+ return true;
+ }
+ }
+ catch (Exception ignored) {
+ }
+
+ try {
+ return connection == null || connection.isClosed();
+ }
+ catch (SQLException ignored) {
+ }
+
+ return false;
+ }
+
+ private static Properties buildConnectionProperties(AuthenticationConfig auth)
+ {
+ Properties props = new Properties();
+ props.setProperty("logmech", "TD2");
+ props.setProperty("username", auth.userName());
+ props.setProperty("password", auth.password());
+ return props;
+ }
+
+ private static long computeBackoffDelay(int attempt)
+ {
+ // Calculates how long to wait before retrying an operation that failed
+ long base = BASE_RETRY_DELAY_MS * (1L << Math.max(0, attempt - 1));
+ long jitter = (long) (RANDOM.nextDouble() * BASE_RETRY_DELAY_MS);
+ long delay = Math.min(base + jitter, MAX_RETRY_DELAY_MS);
+ return Math.max(delay, BASE_RETRY_DELAY_MS);
+ }
+
+ private static void sleepUnchecked(long millis)
+ {
+ try {
+ Thread.sleep(millis);
+ }
+ catch (InterruptedException ie) {
+ Thread.currentThread().interrupt();
+ throw new RuntimeException("Interrupted during retry wait", ie);
+ }
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java
new file mode 100644
index 000000000000..b6174bd31309
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import java.util.concurrent.ThreadLocalRandom;
+
+import static java.util.Locale.ENGLISH;
+
+public final class ClearScapeEnvironmentUtils
+{
+ private static final int MAX_ENV_NAME_LENGTH = 20;
+
+ private ClearScapeEnvironmentUtils() {}
+
+ public static String generateUniqueEnvName(Class> testClass)
+ {
+ String prefix = testClass.getSimpleName().toLowerCase(ENGLISH);
+ String suffix = Long.toString(ThreadLocalRandom.current().nextLong(Long.MAX_VALUE), 36);
+ int suffixLength = 6;
+ if (suffix.length() > suffixLength) {
+ suffix = suffix.substring(0, suffixLength);
+ }
+ int prefixLength = MAX_ENV_NAME_LENGTH - suffixLength - 1;
+ if (prefix.length() > prefixLength) {
+ prefix = prefix.substring(0, prefixLength);
+ }
+ return prefix + "-" + suffix;
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java
new file mode 100644
index 000000000000..34f8d6cf407c
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import io.airlift.log.Logger;
+import io.trino.plugin.teradata.integration.TeradataTestConstants;
+
+import java.net.URISyntaxException;
+import java.util.regex.Pattern;
+
+import static java.util.Objects.requireNonNull;
+
+public class ClearScapeManager
+{
+ private static final Logger log = Logger.get(ClearScapeManager.class);
+ private static final Pattern ALLOWED_URL_PATTERN = Pattern.compile("^(https?://)(www\\.)?api.clearscape.teradata\\.com.*");
+ private final Model model;
+
+ public ClearScapeManager(Model model)
+ {
+ requireNonNull(model, "model is null");
+ this.model = model;
+ }
+
+ public void setup()
+ {
+ createAndStartClearScapeInstance();
+ }
+
+ public void stop()
+ {
+ stopClearScapeInstance();
+ }
+
+ public EnvironmentResponse.State status()
+ {
+ return getClearScapeInstanceStatus();
+ }
+
+ public void teardown()
+ {
+ shutdownAndDestroyClearScapeInstance();
+ }
+
+ private EnvironmentResponse.State getClearScapeInstanceStatus()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+
+ String token = model.getToken();
+ String name = model.getEnvName();
+ EnvironmentResponse response;
+ try {
+ response = teradataHttpClient.fetchEnvironment(new GetEnvironmentRequest(name), token);
+ }
+ catch (ClearScapeServiceException be) {
+ return EnvironmentResponse.State.TERMINATED;
+ }
+
+ if (response != null) {
+ return response.state();
+ }
+ return EnvironmentResponse.State.TERMINATED;
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to get status of ClearScape instance", e);
+ }
+ }
+
+ private void createAndStartClearScapeInstance()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+
+ String token = model.getToken();
+ String name = model.getEnvName();
+ EnvironmentResponse response = null;
+ try {
+ response = teradataHttpClient.fetchEnvironment(new GetEnvironmentRequest(name), token);
+ }
+ catch (ClearScapeServiceException be) {
+ log.info("Environment %s is not available. %s", name, be.getMessage());
+ }
+
+ if (response == null || response.ip() == null) {
+ CreateEnvironmentRequest request = new CreateEnvironmentRequest(
+ name,
+ model.getRegion(),
+ model.getPassword());
+ response = teradataHttpClient.createEnvironment(request, token).get();
+ }
+ else if (response.state() == EnvironmentResponse.State.STOPPED) {
+ EnvironmentRequest request = new EnvironmentRequest(name, new OperationRequest("start"));
+ teradataHttpClient.startEnvironment(request, token);
+ }
+ if (response != null) {
+ model.setHostName(response.ip());
+ }
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to create and start ClearScape instance", e);
+ }
+ }
+
+ private void stopClearScapeInstance()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+ String token = model.getToken();
+ String name = model.getEnvName();
+
+ EnvironmentResponse response = null;
+ try {
+ response = teradataHttpClient.fetchEnvironment(new GetEnvironmentRequest(name), token);
+ }
+ catch (ClearScapeServiceException be) {
+ log.info("Environment %s is not available. %s", name, be.getMessage());
+ }
+ if (response != null &&
+ response.ip() != null &&
+ response.state() == EnvironmentResponse.State.RUNNING) {
+ EnvironmentRequest request = new EnvironmentRequest(name, new OperationRequest("stop"));
+ teradataHttpClient.stopEnvironment(request, token);
+ }
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to stop ClearScape instance", e);
+ }
+ }
+
+ private void shutdownAndDestroyClearScapeInstance()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+ String token = model.getToken();
+ DeleteEnvironmentRequest request = new DeleteEnvironmentRequest(model.getEnvName());
+ teradataHttpClient.deleteEnvironment(request, token).get();
+ }
+ catch (ClearScapeServiceException be) {
+ log.info("Environment %s is not available. Error - %s",
+ model.getEnvName(), be.getMessage());
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to shutdown and destroy ClearScape instance", e);
+ }
+ }
+
+ private TeradataHttpClient getTeradataHttpClient()
+ throws URISyntaxException
+ {
+ String envUrl = TeradataTestConstants.CLEARSCAPE_URL;
+ if (isValidUrl(envUrl)) {
+ return new TeradataHttpClient(envUrl);
+ }
+ throw new URISyntaxException(envUrl, "Provide valid environment URL");
+ }
+
+ private static boolean isValidUrl(String url)
+ {
+ return ALLOWED_URL_PATTERN.matcher(url).matches();
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeServiceException.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeServiceException.java
new file mode 100644
index 000000000000..1d4ee10dfd83
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeServiceException.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public class ClearScapeServiceException
+ extends RuntimeException
+{
+ public ClearScapeServiceException(int statusCode, String body)
+ {
+ super(buildMessage(statusCode, requireNonNull(body, "body should not be null")));
+ }
+
+ private static String buildMessage(int statusCode, String body)
+ {
+ if (statusCode >= 400 && statusCode <= 499) {
+ return "Client error - " + statusCode + body;
+ }
+ if (statusCode >= 500 && statusCode <= 599) {
+ return "Server error - " + statusCode + body;
+ }
+ return "Unexpected error - " + statusCode + body;
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java
new file mode 100644
index 000000000000..b5151cdadff9
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import io.trino.plugin.teradata.integration.TeradataTestConstants;
+
+import static java.util.Objects.requireNonNull;
+
+public class ClearScapeSetup
+{
+ private final String token;
+ private final String password;
+ private final String envName;
+ private final String region;
+ private final boolean destroyEnv;
+ private ClearScapeManager manager;
+
+ public ClearScapeSetup(
+ String token,
+ String password,
+ String envName,
+ boolean destroyEnv,
+ String region)
+ {
+ this.token = requireNonNull(token, "token is null");
+ this.password = requireNonNull(password, "password is null");
+ this.envName = requireNonNull(envName, "envName is null");
+ this.region = requireNonNull(region, "region is null");
+ this.destroyEnv = destroyEnv;
+ }
+
+ public Model initialize()
+ {
+ try {
+ Model model = createModel();
+ manager = new ClearScapeManager(model);
+ manager.setup();
+ return model;
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to initialize ClearScape environment: " + envName, e);
+ }
+ }
+
+ private Model createModel()
+ {
+ return new Model(
+ envName,
+ null,
+ TeradataTestConstants.CLEARSCAPE_USERNAME,
+ password,
+ TeradataTestConstants.CLEARSCAPE_USERNAME,
+ token,
+ region);
+ }
+
+ public void cleanup()
+ {
+ if (manager == null) {
+ return;
+ }
+ if (destroyEnv) {
+ manager.teardown();
+ return;
+ }
+ manager.stop();
+ }
+
+ public EnvironmentResponse.State status()
+ {
+ if (manager == null) {
+ throw new IllegalStateException("ClearScape manager is not initialized");
+ }
+ return manager.status();
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java
new file mode 100644
index 000000000000..8ac35a095493
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record CreateEnvironmentRequest(
+ String name,
+ String region,
+ String password)
+{
+ public CreateEnvironmentRequest
+ {
+ requireNonNull(name, "name should not be null");
+ requireNonNull(region, "region should not be null");
+ requireNonNull(password, "password should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java
new file mode 100644
index 000000000000..12f3c311c169
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record DeleteEnvironmentRequest(String name)
+{
+ public DeleteEnvironmentRequest
+ {
+ requireNonNull(name, "name should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java
new file mode 100644
index 000000000000..a83993f4c24a
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record EnvironmentRequest(
+ String name,
+ OperationRequest request)
+{
+ public EnvironmentRequest
+ {
+ requireNonNull(name, "name must not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java
new file mode 100644
index 000000000000..0ed7109f86e9
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Locale.ENGLISH;
+import static java.util.Objects.requireNonNull;
+
+public record EnvironmentResponse(
+ State state,
+ String region,
+ String ip)
+{
+ public EnvironmentResponse
+ {
+ requireNonNull(state, "state must not be null");
+ requireNonNull(region, "region must not be null");
+ region = region.toUpperCase(ENGLISH);
+ }
+
+ public enum State
+ {
+ RUNNING,
+ STOPPED,
+ TERMINATED,
+ STOPPING
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java
new file mode 100644
index 000000000000..8eac68e2ffea
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record GetEnvironmentRequest(String name)
+{
+ public GetEnvironmentRequest
+ {
+ requireNonNull(name, "name should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java
new file mode 100644
index 000000000000..57fefef17f88
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public class Model
+{
+ final String envName;
+ final String userName;
+ final String password;
+ final String databaseName;
+ final String token;
+ final String region;
+ String hostName;
+
+ public Model(
+ String envName,
+ String hostName,
+ String userName,
+ String password,
+ String databaseName,
+ String token,
+ String region)
+ {
+ this.envName = requireNonNull(envName, "envName is null");
+ this.userName = requireNonNull(userName, "userName is null");
+ this.password = requireNonNull(password, "password is null");
+ this.databaseName = requireNonNull(databaseName, "databaseName is null");
+ this.token = requireNonNull(token, "token is null");
+ this.region = requireNonNull(region, "region is null");
+ this.hostName = hostName;
+ }
+
+ public String getEnvName()
+ {
+ return envName;
+ }
+
+ public String getHostName()
+ {
+ return hostName;
+ }
+
+ public void setHostName(String hostName)
+ {
+ this.hostName = hostName;
+ }
+
+ public String getPassword()
+ {
+ return password;
+ }
+
+ public String getToken()
+ {
+ return token;
+ }
+
+ public String getRegion()
+ {
+ return region;
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java
new file mode 100644
index 000000000000..61a4a2b273b8
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record OperationRequest(String operation)
+{
+ public OperationRequest
+ {
+ requireNonNull(operation, "operation should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java
new file mode 100644
index 000000000000..29ed4851c3cf
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.net.URI;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpResponse;
+import java.util.concurrent.CompletableFuture;
+
+import static com.google.common.net.HttpHeaders.AUTHORIZATION;
+import static com.google.common.net.HttpHeaders.CONTENT_TYPE;
+import static java.util.Objects.requireNonNull;
+
+public class TeradataHttpClient
+{
+ private static final String APPLICATION_JSON = "application/json";
+ private static final String BEARER = "Bearer ";
+
+ private final String baseUrl;
+ private final HttpClient httpClient;
+ private final ObjectMapper objectMapper;
+
+ public TeradataHttpClient(String baseUrl)
+ {
+ requireNonNull(baseUrl, "baseUrl should not be null");
+ this.baseUrl = baseUrl;
+ httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build();
+ objectMapper = JsonMapper.builder()
+ .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
+ .configure(MapperFeature.ALLOW_FINAL_FIELDS_AS_MUTATORS, false)
+ .build();
+ }
+
+ public CompletableFuture createEnvironment(CreateEnvironmentRequest createEnvironmentRequest, String token)
+ {
+ String requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(createEnvironmentRequest));
+ HttpRequest httpRequest = HttpRequest.newBuilder(URI.create(baseUrl.concat("/environments")))
+ .headers(
+ AUTHORIZATION, BEARER + token,
+ CONTENT_TYPE, APPLICATION_JSON)
+ .POST(HttpRequest.BodyPublishers.ofString(requestBody))
+ .build();
+ return httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString())
+ .thenApply(httpResponse -> handleHttpResponse(httpResponse, new TypeReference<>() {}));
+ }
+
+ public EnvironmentResponse fetchEnvironment(GetEnvironmentRequest getEnvironmentRequest, String token)
+ {
+ HttpRequest httpRequest = HttpRequest.newBuilder(URI.create(baseUrl
+ .concat("/environments/")
+ .concat(getEnvironmentRequest.name())))
+ .headers(AUTHORIZATION, BEARER + token)
+ .GET()
+ .build();
+ HttpResponse httpResponse = handleCheckedException(() -> httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()));
+ return handleHttpResponse(httpResponse, new TypeReference<>() {});
+ }
+
+ public CompletableFuture deleteEnvironment(DeleteEnvironmentRequest deleteEnvironmentRequest, String token)
+ {
+ HttpRequest httpRequest = HttpRequest.newBuilder(URI.create(baseUrl + "/environments/" + deleteEnvironmentRequest.name()))
+ .headers(AUTHORIZATION, BEARER + token)
+ .DELETE()
+ .build();
+
+ httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString());
+ return CompletableFuture.completedFuture(null);
+ }
+
+ public void startEnvironment(EnvironmentRequest environmentRequest, String token)
+ {
+ String requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(environmentRequest.request()));
+ getVoidCompletableFuture(environmentRequest.name(), token, requestBody);
+ }
+
+ public void stopEnvironment(EnvironmentRequest environmentRequest, String token)
+ {
+ String requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(environmentRequest.request()));
+ getVoidCompletableFuture(environmentRequest.name(), token, requestBody);
+ }
+
+ private void getVoidCompletableFuture(String name, String token, String jsonPayLoadString)
+ {
+ HttpRequest.BodyPublisher publisher = HttpRequest.BodyPublishers.ofString(jsonPayLoadString);
+ HttpRequest httpRequest = HttpRequest.newBuilder(URI.create(baseUrl + "/environments/" + name))
+ .headers(AUTHORIZATION, BEARER + token, CONTENT_TYPE, APPLICATION_JSON)
+ .method("PATCH", publisher)
+ .build();
+
+ httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString());
+ }
+
+ private T handleHttpResponse(HttpResponse httpResponse, TypeReference typeReference)
+ {
+ String body = httpResponse.body();
+ if (httpResponse.statusCode() >= 200 && httpResponse.statusCode() <= 299) {
+ return handleCheckedException(() -> {
+ if (typeReference.getType().getTypeName().equals(Void.class.getTypeName())) {
+ return null;
+ }
+ return objectMapper.readValue(body, typeReference);
+ });
+ }
+ throw new ClearScapeServiceException(httpResponse.statusCode(), body);
+ }
+
+ private static T handleCheckedException(CheckedSupplier checkedSupplier)
+ {
+ try {
+ return checkedSupplier.get();
+ }
+ catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new RuntimeException(e);
+ }
+ }
+
+ @FunctionalInterface
+ private interface CheckedSupplier
+ {
+ T get()
+ throws IOException, InterruptedException;
+ }
+}
diff --git a/pom.xml b/pom.xml
index 8c05e46c871f..9c16b70fd604 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,7 +5,7 @@
io.airlift
airbase
- 364
+ 365
io.trino
@@ -114,6 +114,7 @@
plugin/trino-snowflake
plugin/trino-spooling-filesystem
plugin/trino-sqlserver
+ plugin/trino-teradata
plugin/trino-teradata-functions
plugin/trino-thrift
plugin/trino-thrift-api
@@ -404,12 +405,6 @@
${dep.cassandra.version}
-
- com.esri.geometry
- esri-geometry-api
- 2.2.4
-
-
com.exasol
exasol-jdbc
@@ -2342,6 +2337,13 @@
provided
+
+ com.teradata.jdbc
+ terajdbc
+ 20.00.00.54
+ runtime
+
+
io.confluent
kafka-json-schema-serializer
diff --git a/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java b/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java
index 54cec1cb7271..a38ba4ecd1bf 100644
--- a/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java
+++ b/testing/trino-product-tests-groups/src/main/java/io/trino/tests/product/TestGroups.java
@@ -107,6 +107,7 @@ public final class TestGroups
public static final String PARQUET = "parquet";
public static final String IGNITE = "ignite";
public static final String FAULT_TOLERANT = "fault-tolerant";
+ public static final String TERADATA = "teradata";
private TestGroups() {}
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
index c15fbdf8bb52..ed917a5219a8 100644
--- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
@@ -75,6 +75,7 @@ public void extendEnvironment(Environment.Builder builder)
"singlestore",
"snowflake",
"sqlserver",
+ "teradata",
"tpcds",
"trino_thrift")
.forEach(connector -> builder.addConnector(
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeTeradata.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeTeradata.java
new file mode 100644
index 000000000000..5a729b7a54b4
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeTeradata.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.tests.product.launcher.env.environment;
+
+import com.google.inject.Inject;
+import io.trino.tests.product.launcher.docker.DockerFiles;
+import io.trino.tests.product.launcher.env.Environment.Builder;
+import io.trino.tests.product.launcher.env.EnvironmentProvider;
+import io.trino.tests.product.launcher.env.common.StandardMultinode;
+import io.trino.tests.product.launcher.env.common.TestsEnvironment;
+
+import static org.testcontainers.utility.MountableFile.forHostPath;
+
+@TestsEnvironment
+public class EnvMultinodeTeradata
+ extends EnvironmentProvider
+{
+ private final DockerFiles.ResourceProvider configDir;
+
+ @Inject
+ public EnvMultinodeTeradata(StandardMultinode standardMultinode, DockerFiles dockerFiles)
+ {
+ super(standardMultinode);
+ this.configDir = dockerFiles.getDockerFilesHostDirectory("conf/environment/multinode-teradata");
+ }
+
+ @Override
+ public void extendEnvironment(Builder builder)
+ {
+ builder.addConnector("teradata", forHostPath(configDir.getPath("teradata.properties")));
+ }
+}
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteTeradata.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteTeradata.java
new file mode 100644
index 000000000000..634cd5f92d16
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteTeradata.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.tests.product.launcher.suite.suites;
+
+import com.google.common.collect.ImmutableList;
+import io.trino.tests.product.launcher.env.EnvironmentConfig;
+import io.trino.tests.product.launcher.env.environment.EnvMultinodeTeradata;
+import io.trino.tests.product.launcher.suite.Suite;
+import io.trino.tests.product.launcher.suite.SuiteTestRun;
+
+import java.util.List;
+
+import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES;
+import static io.trino.tests.product.TestGroups.TERADATA;
+import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;
+
+public class SuiteTeradata
+ extends Suite
+{
+ @Override
+ public List getTestRuns(EnvironmentConfig config)
+ {
+ return ImmutableList.of(
+ testOnEnvironment(EnvMultinodeTeradata.class)
+ .withGroups(CONFIGURED_FEATURES, TERADATA)
+ .build());
+ }
+}
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/teradata.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/teradata.properties
new file mode 100644
index 000000000000..ede4d6db0d63
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/teradata.properties
@@ -0,0 +1,4 @@
+connector.name=teradata
+connection-url=jdbc:teradata://host.invalid/
+connection-user=root
+connection-password=secret
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-teradata/teradata.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-teradata/teradata.properties
new file mode 100644
index 000000000000..65d022bccb23
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-teradata/teradata.properties
@@ -0,0 +1,4 @@
+connector.name=teradata
+connection-url=jdbc:teradata://${ENV:TERADATA_HOSTNAME}/
+connection-user=${ENV:TERADATA_USERNAME}
+connection-password=${ENV:TERADATA_PASSWORD}
diff --git a/testing/trino-product-tests/pom.xml b/testing/trino-product-tests/pom.xml
index 88eac80ccf98..df163f275829 100644
--- a/testing/trino-product-tests/pom.xml
+++ b/testing/trino-product-tests/pom.xml
@@ -272,6 +272,12 @@
runtime
+
+ com.teradata.jdbc
+ terajdbc
+ runtime
+
+
io.confluent
kafka-protobuf-types
diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/teradata/TestTeradata.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/teradata/TestTeradata.java
new file mode 100644
index 000000000000..bc0316a21155
--- /dev/null
+++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/teradata/TestTeradata.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.tests.product.teradata;
+
+import io.trino.tempto.ProductTest;
+import io.trino.tempto.query.QueryResult;
+import org.testng.annotations.Test;
+
+import static io.trino.tempto.assertions.QueryAssert.Row.row;
+import static io.trino.testing.TestingNames.randomNameSuffix;
+import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
+import static io.trino.tests.product.TestGroups.TERADATA;
+import static io.trino.tests.product.utils.QueryExecutors.onTrino;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestTeradata
+ extends ProductTest
+{
+ @Test(groups = {TERADATA, PROFILE_SPECIFIC_TESTS})
+ public void testCreateTableAsSelect()
+ {
+ String databaseName = "teradata.test_" + randomNameSuffix();
+ String tableName = databaseName + ".nation_" + randomNameSuffix();
+ onTrino().executeQuery("CREATE SCHEMA " + databaseName);
+ QueryResult result = onTrino().executeQuery("CREATE TABLE " + tableName + " AS SELECT * FROM tpch.tiny.nation");
+ try {
+ assertThat(result).updatedRowsCountIsEqualTo(25);
+ assertThat(onTrino().executeQuery("SELECT COUNT(*) FROM " + tableName)).containsOnly(row(25));
+ }
+ finally {
+ onTrino().executeQuery("DROP TABLE " + tableName);
+ onTrino().executeQuery("DROP SCHEMA " + databaseName);
+ }
+ }
+}