diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index afbfeb41e482..1645a8e0b1a4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -516,6 +516,9 @@ jobs:
- { modules: plugin/trino-snowflake }
- { modules: plugin/trino-snowflake, profile: cloud-tests }
- { modules: plugin/trino-sqlserver }
+ - { modules: plugin/trino-teradata }
+ - { modules: plugin/trino-teradata, profile: clearscape-tests }
+ - { modules: plugin/trino-teradata, profile: run-only-long-tests }
- { modules: plugin/trino-vertica }
- { modules: testing/trino-faulttolerant-tests, profile: default }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta }
@@ -566,6 +569,8 @@ jobs:
&& ! (contains(matrix.modules, 'trino-filesystem-gcs') && contains(matrix.profile, 'cloud-tests'))
&& ! (contains(matrix.modules, 'trino-filesystem-s3') && contains(matrix.profile, 'cloud-tests'))
&& ! (contains(matrix.modules, 'trino-hdfs') && contains(matrix.profile, 'cloud-tests'))
+ && ! (contains(matrix.modules, 'trino-teradata') && contains(matrix.profile, 'clearscape-tests'))
+ && ! (contains(matrix.modules, 'trino-teradata') && contains(matrix.profile, 'run-only-long-tests'))
run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ matrix.profile != '' && format('-P {0}', matrix.profile) || '' }}
# Additional tests for selected modules
- name: HDFS file system cache isolated JVM tests
@@ -791,6 +796,25 @@ jobs:
# Cancelled workflows may have left the ephemeral cluster running
if: always()
run: .github/bin/redshift/delete-aws-redshift.sh
+ - name: Teradata Tests
+ id: tests-teradata
+ env:
+ CLEARSCAPE_TOKEN: ${{ secrets.CLEARSCAPE_TOKEN }}
+ CLEARSCAPE_PASSWORD: ${{ secrets.CLEARSCAPE_PASSWORD }}
+ CLEARSCAPE_REGION: ${{ vars.CLEARSCAPE_REGION }}
+ if: matrix.modules == 'plugin/trino-teradata' && contains(matrix.profile, 'clearscape-tests') && env.CLEARSCAPE_TOKEN != '' && env.CLEARSCAPE_PASSWORD != '' && env.CLEARSCAPE_REGION != ''
+ run: |
+ $MAVEN test ${MAVEN_TEST} -pl :trino-teradata -Pclearscape-tests
+ - name: Teradata Long running Tests
+ id: tests-long-run-teradata
+ env:
+ CLEARSCAPE_TOKEN: ${{ secrets.CLEARSCAPE_TOKEN }}
+ CLEARSCAPE_PASSWORD: ${{ secrets.CLEARSCAPE_PASSWORD }}
+ CLEARSCAPE_REGION: ${{ vars.CLEARSCAPE_REGION }}
+ TERADATA_LONG_TESTS: ${{ vars.TERADATA_LONG_TESTS }}
+ if: matrix.modules == 'plugin/trino-teradata' && contains(matrix.profile, 'run-only-long-tests') && env.CLEARSCAPE_TOKEN != '' && env.CLEARSCAPE_PASSWORD != '' && env.CLEARSCAPE_REGION != '' && env.TERADATA_LONG_TESTS == 'true'
+ run: |
+ $MAVEN test ${MAVEN_TEST} -pl :trino-teradata -Prun-only-long-tests
- name: Sanitize artifact name
if: always()
run: |
@@ -818,6 +842,8 @@ jobs:
|| steps.tests-bq-smoke.outcome == 'failure'
|| steps.tests-iceberg.outcome == 'failure'
|| steps.tests-redshift.outcome == 'failure'
+ || steps.tests-teradata.outcome == 'failure'
+ || steps.tests-long-run-teradata.outcome == 'failure'
|| steps.tests-snowflake.outcome == 'failure'
}}
upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }}
diff --git a/core/trino-server/src/main/provisio/trino.xml b/core/trino-server/src/main/provisio/trino.xml
index 97a366b39290..e4a8e7bc8e4c 100644
--- a/core/trino-server/src/main/provisio/trino.xml
+++ b/core/trino-server/src/main/provisio/trino.xml
@@ -284,6 +284,12 @@
+
+
+
+
+
+
diff --git a/docs/src/main/sphinx/connector.md b/docs/src/main/sphinx/connector.md
index a954b30cf059..1a77556400c3 100644
--- a/docs/src/main/sphinx/connector.md
+++ b/docs/src/main/sphinx/connector.md
@@ -42,6 +42,7 @@ SingleStore
Snowflake
SQL Server
System
+Teradata
Thrift
TPC-DS
TPC-H
diff --git a/docs/src/main/sphinx/connector/teradata.md b/docs/src/main/sphinx/connector/teradata.md
new file mode 100644
index 000000000000..d0c73f4a2688
--- /dev/null
+++ b/docs/src/main/sphinx/connector/teradata.md
@@ -0,0 +1,214 @@
+# Teradata connector
+
+```{raw} html
+
+```
+
+The Teradata connector allows querying and creating tables in an external
+[Teradata](https://www.teradata.com/) database. This can be used to join
+data between different systems like Teradata and Hive, or between different Teradata instances.
+
+## Requirements
+
+To connect to Teradata, you need:
+
+- Teradata Database
+- Network access from the Trino coordinator and workers to Teradata. Port
+ 1025 is the default port
+
+## Configuration
+
+To configure the Teradata connector, create a catalog properties file in
+`etc/catalog` named, for example, `example.properties`, to mount the Teradata
+connector as the `teradata` catalog. Create the file with the following
+contents, replacing the connection properties as appropriate for your setup:
+
+```properties
+connector.name=teradata
+connection-url=jdbc:teradata://example.teradata.com/CHARSET=UTF8,TMODE=ANSI,LOGMECH=TD2
+connection-user=***
+connection-password=***
+```
+
+The `connection-url` defines the connection information and parameters to pass
+to the Teradata JDBC driver. The supported parameters for the URL are
+available in the
+[Teradata JDBC documentation](https://teradata-docs.s3.amazonaws.com/doc/connectivity/jdbc/reference/current/jdbcug_chapter_2.html#BABJIHBJ).
+For example, the following `connection-url` configures character encoding,
+transaction mode, and authentication.
+
+```properties
+connection-url=jdbc:teradata://example.teradata.com/CHARSET=UTF8,TMODE=ANSI,LOGMECH=TD2
+```
+
+The `connection-user` and `connection-password` are typically required and
+determine the user credentials for the connection, often a service user.
+
+### Connection security
+
+If you have TLS configured with a globally-trusted certificate installed on
+your data source, you can enable TLS between your cluster and the data
+source by appending parameters to the JDBC connection string set in the
+connection-url catalog configuration property.
+
+For example, to specify `SSLMODE`:
+
+```properties
+connection-url=jdbc:teradata://example.teradata.com/SSLMODE=REQUIRED
+```
+
+For more information on TLS configuration options, see the
+Teradata [JDBC documentation](https://teradata-docs.s3.amazonaws.com/doc/connectivity/jdbc/reference/current/jdbcug_chapter_2.html#URL_SSLMODE).
+
+```{include} jdbc-authentication.fragment
+```
+
+### Multiple Teradata databases
+
+You can have as many catalogs as you need, so if you have additional Teradata
+databases, simply add another properties file to etc/catalog with a different
+name, making sure it ends in .properties.
+For example, if you name the property file sales.properties, Trino creates a
+catalog named sales using the configured connector.
+
+## Type mapping
+
+Because Trino and Teradata each support types that the other does not, this
+connector {ref}`modifies some types ` when reading data.
+Refer to the following sections for type mapping when reading data from
+Teradata to Trino.
+
+### Teradata type to Trino type mapping
+
+The connector maps Teradata types to the corresponding Trino types following
+this table:
+
+:::{list-table} Teradata type to Trino type mapping
+:widths: 40, 40, 20
+:header-rows: 1
+
+* - Teradata type
+ - Trino type
+ - Notes
+* - `TINYINT`
+ - `TINYINT`
+ -
+* - `SMALLINT`
+ - `SMALLINT`
+ -
+* - `INTEGER`
+ - `INTEGER`
+ -
+* - `BIGINT`
+ - `BIGINT`
+ -
+* - `REAL`
+ - `DOUBLE`
+ -
+* - `DOUBLE`
+ - `DOUBLE`
+ -
+* - `FLOAT`
+ - `DOUBLE`
+ -
+* - `NUMBER(p, s)`
+ - `DECIMAL(p, s)`
+ -
+* - `NUMERIC(p, s)`
+ - `DECIMAL(p, s)`
+ -
+* - `DECIMAL(p, s)`
+ - `DECIMAL(p, s)`
+ -
+* - `CHAR(n)`
+ - `CHAR(n)`
+ -
+* - `CHARACTER(n)`
+ - `CHAR(n)`
+ -
+* - `VARCHAR(n)`
+ - `VARCHAR(n)`
+ -
+* - `DATE`
+ - `DATE`
+ -
+:::
+
+No other types are supported.
+
+### Trino type to Teradata type mapping
+
+The connector maps Trino types to the corresponding Teradata types following
+this table:
+
+:::{list-table} Trino type to Teradata type mapping
+:widths: 40, 40, 20
+:header-rows: 1
+
+* - Trino type
+ - Teradata type
+ - Notes
+* - `TINYINT`
+ - `SMALLINT`
+ -
+* - `SMALLINT`
+ - `SMALLINT`
+ -
+* - `INTEGER`
+ - `INTEGER`
+ -
+* - `BIGINT`
+ - `BIGINT`
+ -
+* - `REAL`
+ - `FLOAT`
+ -
+* - `DOUBLE`
+ - `DOUBLE`
+ -
+* - `DATE`
+ - `DATE`
+ -
+::::
+
+No other types are supported.
+
+```{include} jdbc-type-mapping.fragment
+```
+
+## Querying Teradata
+
+The Teradata connector provides a schema for every Teradata database. You can
+see the available Teradata databases by running SHOW SCHEMAS:
+
+```sql
+SHOW SCHEMAS FROM teradata;
+```
+
+If you have a Teradata database named sales, you can view the tables in this
+database by running SHOW TABLES:
+
+```sql
+SHOW TABLES FROM teradata.sales;
+```
+
+You can see a list of the columns in the orders table in the sales database
+using either of the following:
+
+```sql
+DESCRIBE teradata.sales.orders;
+SHOW COLUMNS FROM teradata.sales.orders;
+```
+
+Finally, you can access the orders table in the sales database:
+
+```sql
+SELECT * FROM teradata.sales.orders;
+```
+
+## SQL support
+
+The connector provides read access to data and metadata in
+a Teradata database. The connector supports the {ref}`globally available
+` and {ref}`read operation `
+statements.
diff --git a/plugin/trino-teradata/README.md b/plugin/trino-teradata/README.md
new file mode 100644
index 000000000000..e9e3817f684b
--- /dev/null
+++ b/plugin/trino-teradata/README.md
@@ -0,0 +1,42 @@
+# Teradata Connector Developer Notes
+
+The Teradata connector module has both unit tests and integration tests.
+The integration tests require access to a [Teradata ClearScape Analytics™ Experience](https://clearscape.teradata.com/sign-in).
+You can follow the steps below to run the integration tests locally.
+
+## Prerequisites
+
+#### 1. Create a new ClearScape Analytics™ Experience account
+
+If you don't already have one, sign up at:
+
+[Teradata ClearScape Analytics™ Experience](https://www.teradata.com/getting-started/demos/clearscape-analytics)
+
+#### 2. Login
+
+Sign in with your new account at:
+
+[ClearScape Analytics™ Experience Login](https://clearscape.teradata.com/sign-in)
+
+#### 3. Collect the API Token
+
+Use the **Copy API Token** button in the UI to retrieve your token.
+
+#### 4. Define the following environment variables
+
+⚠️ **Note:** The Teradata database password must be **at least 8 characters long**.
+
+```
+export CLEARSCAPE_TOKEN=
+export CLEARSCAPE_PASSWORD=
+```
+
+## Running Integration Tests
+
+Once the environment variables are set, run the integration tests with:
+
+⚠️ **Note:** Run the following command from the Trino parent directory.
+
+```
+ ./mvnw clean install -pl :trino-teradata
+```
diff --git a/plugin/trino-teradata/pom.xml b/plugin/trino-teradata/pom.xml
new file mode 100644
index 000000000000..4a466d369b6d
--- /dev/null
+++ b/plugin/trino-teradata/pom.xml
@@ -0,0 +1,346 @@
+
+
+ 4.0.0
+
+ io.trino
+ trino-root
+ 479-SNAPSHOT
+ ../../pom.xml
+
+
+ trino-teradata
+ trino-plugin
+ ${project.artifactId}
+ Trino - Teradata connector
+
+
+ true
+ true
+
+
+
+
+ com.google.inject
+ guice
+ classes
+
+
+
+ io.airlift
+ configuration
+
+
+
+ io.trino
+ trino-base-jdbc
+
+
+
+ io.trino
+ trino-plugin-toolkit
+
+
+
+ jakarta.validation
+ jakarta.validation-api
+
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ provided
+
+
+
+ io.airlift
+ slice
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-api
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-api-incubator
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-context
+ provided
+
+
+
+ io.trino
+ trino-spi
+ provided
+
+
+
+ org.openjdk.jol
+ jol-core
+ provided
+
+
+
+ com.teradata.jdbc
+ terajdbc
+ 20.00.00.49
+ runtime
+
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+ test
+
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ test
+
+
+
+ com.google.errorprone
+ error_prone_annotations
+ test
+ true
+
+
+
+ com.google.guava
+ guava
+ test
+
+
+
+ io.airlift
+ concurrent
+ test
+
+
+
+ io.airlift
+ configuration-testing
+ test
+
+
+
+ io.airlift
+ json
+ test
+
+
+
+ io.airlift
+ junit-extensions
+ test
+
+
+
+ io.airlift
+ log
+ test
+
+
+
+ io.airlift
+ log-manager
+ test
+
+
+
+ io.airlift
+ testing
+ test
+
+
+
+ io.airlift
+ tracing
+ test
+
+
+
+ io.airlift
+ units
+ test
+
+
+
+ io.trino
+ trino-base-jdbc
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-exchange-filesystem
+ test
+
+
+
+ io.trino
+ trino-exchange-filesystem
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-jmx
+ test
+
+
+
+ io.trino
+ trino-main
+ test
+
+
+
+ io.trino
+ trino-main
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-parser
+ test
+
+
+
+ io.trino
+ trino-plugin-toolkit
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-testing
+ test
+
+
+
+ io.trino
+ trino-testing-containers
+ test
+
+
+
+ io.trino
+ trino-testing-services
+ test
+
+
+
+ io.trino
+ trino-tpch
+ test
+
+
+
+ io.trino.tpch
+ tpch
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+ org.jetbrains
+ annotations
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+
+
+ default
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ **/TestTeradataConnectorTest.java
+ **/TestTeradataTypeMapping.java
+
+
+
+
+
+
+
+
+ clearscape-tests
+
+ false
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ **/TestTeradataConnectorTest.java
+ **/TestTeradataTypeMapping.java
+
+ long_run
+
+
+
+
+
+
+
+ run-only-long-tests
+
+ false
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+ long_run
+
+
+
+
+
+
+
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java
new file mode 100644
index 000000000000..8347ba4ac0a3
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClient.java
@@ -0,0 +1,382 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import com.google.inject.Inject;
+import io.trino.plugin.base.mapping.IdentifierMapping;
+import io.trino.plugin.jdbc.BaseJdbcClient;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.CaseSensitivity;
+import io.trino.plugin.jdbc.ColumnMapping;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.JdbcColumnHandle;
+import io.trino.plugin.jdbc.JdbcOutputTableHandle;
+import io.trino.plugin.jdbc.JdbcTableHandle;
+import io.trino.plugin.jdbc.JdbcTypeHandle;
+import io.trino.plugin.jdbc.PredicatePushdownController;
+import io.trino.plugin.jdbc.QueryBuilder;
+import io.trino.plugin.jdbc.RemoteTableName;
+import io.trino.plugin.jdbc.WriteMapping;
+import io.trino.plugin.jdbc.logging.RemoteQueryModifier;
+import io.trino.spi.TrinoException;
+import io.trino.spi.connector.ColumnMetadata;
+import io.trino.spi.connector.ColumnPosition;
+import io.trino.spi.connector.ConnectorSession;
+import io.trino.spi.connector.SchemaTableName;
+import io.trino.spi.type.CharType;
+import io.trino.spi.type.DecimalType;
+import io.trino.spi.type.Decimals;
+import io.trino.spi.type.Type;
+import io.trino.spi.type.VarcharType;
+import org.weakref.jmx.$internal.guava.collect.ImmutableMap;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.OptionalLong;
+
+import static io.trino.plugin.jdbc.CaseSensitivity.CASE_INSENSITIVE;
+import static io.trino.plugin.jdbc.CaseSensitivity.CASE_SENSITIVE;
+import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
+import static io.trino.plugin.jdbc.PredicatePushdownController.CASE_INSENSITIVE_CHARACTER_PUSHDOWN;
+import static io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.charReadFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.dateColumnMappingUsingLocalDate;
+import static io.trino.plugin.jdbc.StandardColumnMappings.dateWriteFunctionUsingLocalDate;
+import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharReadFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
+import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling;
+import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
+import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.CharType.createCharType;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.RealType.REAL;
+import static io.trino.spi.type.SmallintType.SMALLINT;
+import static io.trino.spi.type.TinyintType.TINYINT;
+import static io.trino.spi.type.VarcharType.createUnboundedVarcharType;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static java.lang.String.format;
+
+public class TeradataClient
+ extends BaseJdbcClient
+{
+ private static final PredicatePushdownController TERADATA_STRING_PUSHDOWN = FULL_PUSHDOWN;
+ private final TeradataConfig.TeradataCaseSensitivity teradataJDBCCaseSensitivity;
+
+ @Inject
+ public TeradataClient(
+ BaseJdbcConfig config,
+ TeradataConfig teradataConfig,
+ ConnectionFactory connectionFactory,
+ QueryBuilder queryBuilder,
+ IdentifierMapping identifierMapping,
+ RemoteQueryModifier remoteQueryModifier)
+ {
+ super("\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, remoteQueryModifier, true);
+ this.teradataJDBCCaseSensitivity = teradataConfig.getTeradataCaseSensitivity();
+ }
+
+ @Override
+ protected void createSchema(ConnectorSession session, Connection connection, String remoteSchemaName)
+ {
+ execute(session, format(
+ "CREATE DATABASE %s AS PERMANENT = 60000000, SPOOL = 120000000",
+ quoted(remoteSchemaName)));
+ }
+
+ @Override
+ protected void copyTableSchema(ConnectorSession session, Connection connection, String catalogName, String schemaName, String tableName, String newTableName,
+ List columnNames)
+ {
+ String tableCopyFormat = "CREATE TABLE %s AS ( SELECT * FROM %s ) WITH DATA";
+ String sql = format(
+ tableCopyFormat,
+ quoted(catalogName, schemaName, newTableName),
+ quoted(catalogName, schemaName, tableName));
+ try {
+ execute(session, connection, sql);
+ }
+ catch (SQLException e) {
+ throw new TrinoException(JDBC_ERROR, e);
+ }
+ }
+
+ @Override
+ protected void verifySchemaName(DatabaseMetaData databaseMetadata, String schemaName)
+ throws SQLException
+ {
+ int schemaNameLimit = databaseMetadata.getMaxSchemaNameLength();
+ if (schemaName.length() > schemaNameLimit) {
+ throw new TrinoException(NOT_SUPPORTED, format("Schema name must be shorter than or equal to '%s' characters but got '%s'", schemaNameLimit, schemaName.length()));
+ }
+ }
+
+ @Override
+ protected void verifyTableName(DatabaseMetaData databaseMetadata, String tableName)
+ throws SQLException
+ {
+ if (tableName.length() > databaseMetadata.getMaxTableNameLength()) {
+ throw new TrinoException(NOT_SUPPORTED, format("Table name must be shorter than or equal to '%s' characters but got '%s'", databaseMetadata.getMaxTableNameLength(),
+ tableName.length()));
+ }
+ }
+
+ @Override
+ protected void verifyColumnName(DatabaseMetaData databaseMetadata, String columnName)
+ throws SQLException
+ {
+ if (columnName.length() > databaseMetadata.getMaxColumnNameLength()) {
+ throw new TrinoException(NOT_SUPPORTED, format("Column name must be shorter than or equal to '%s' characters but got '%s': '%s'",
+ databaseMetadata.getMaxColumnNameLength(), columnName.length(), columnName));
+ }
+ }
+
+ @Override
+ protected void dropSchema(ConnectorSession session, Connection connection, String remoteSchemaName, boolean cascade)
+ throws SQLException
+ {
+ if (cascade) {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping schemas with CASCADE option");
+ }
+ String dropSchema = "DROP DATABASE " + quoted(remoteSchemaName);
+ execute(session, connection, dropSchema);
+ }
+
+ @Override
+ public void renameSchema(ConnectorSession session, String schemaName, String newSchemaName)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming schema");
+ }
+
+ @Override
+ public OptionalLong delete(ConnectorSession session, JdbcTableHandle handle)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support modifying table rows");
+ }
+
+ @Override
+ public void truncateTable(ConnectorSession session, JdbcTableHandle handle)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support truncating tables");
+ }
+
+ @Override
+ public void dropColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping columns");
+ }
+
+ @Override
+ public void renameColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle jdbcColumn, String newColumnName)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming columns");
+ }
+
+ @Override
+ public void renameTable(ConnectorSession session, JdbcTableHandle handle, SchemaTableName newTableName)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables");
+ }
+
+ @Override
+ public JdbcOutputTableHandle beginInsertTable(ConnectorSession session, JdbcTableHandle tableHandle, List columns)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support inserts");
+ }
+
+ @Override
+ public void setColumnType(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Type type)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types");
+ }
+
+ @Override
+ public void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column, ColumnPosition position)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support add column operations");
+ }
+
+ @Override
+ public void dropNotNullConstraint(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping a not null constraint");
+ }
+
+ @Override
+ protected Map getCaseSensitivityForColumns(ConnectorSession session, Connection connection, SchemaTableName schemaTableName,
+ RemoteTableName remoteTableName)
+ {
+ // try to use result set metadata from select * from table to populate the mapping
+ try {
+ HashMap caseMap = new HashMap<>();
+ String sql = format("select * from %s.%s where 0=1", schemaTableName.getSchemaName(), schemaTableName.getTableName());
+ PreparedStatement pstmt = connection.prepareStatement(sql);
+ ResultSetMetaData rsmd = pstmt.getMetaData();
+ int columnCount = rsmd.getColumnCount();
+ for (int i = 1; i <= columnCount; i++) {
+ caseMap.put(rsmd.getColumnName(i), rsmd.isCaseSensitive(i) ? CASE_SENSITIVE : CASE_INSENSITIVE);
+ }
+ pstmt.close();
+ return caseMap;
+ }
+ catch (SQLException e) {
+ // behavior of base jdbc
+ return ImmutableMap.of();
+ }
+ }
+
+ @Override
+ public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
+ {
+ // this method should ultimately encompass all the expected teradata data types
+ Optional mapping = getForcedMappingToVarchar(typeHandle);
+ if (mapping.isPresent()) {
+ return mapping;
+ }
+
+ switch (typeHandle.jdbcType()) {
+ case Types.TINYINT:
+ return Optional.of(tinyintColumnMapping());
+ case Types.SMALLINT:
+ return Optional.of(smallintColumnMapping());
+ case Types.INTEGER:
+ return Optional.of(integerColumnMapping());
+ case Types.BIGINT:
+ return Optional.of(bigintColumnMapping());
+ case Types.REAL:
+ case Types.DOUBLE:
+ case Types.FLOAT:
+ // teradata float is 64 bit
+ // trino double is 64 bit
+ // teradata float / real / double precision all map to jdbc type float
+ return Optional.of(doubleColumnMapping());
+ case Types.NUMERIC:
+ case Types.DECIMAL:
+ return numberMapping(typeHandle);
+ case Types.CHAR:
+ return Optional.of(charColumnMapping(typeHandle.requiredColumnSize(), deriveCaseSensitivity(typeHandle.caseSensitivity().orElse(null))));
+ case Types.VARCHAR:
+ // see prior note on trino case sensitivity
+ return Optional.of(varcharColumnMapping(typeHandle.requiredColumnSize(), deriveCaseSensitivity(typeHandle.caseSensitivity().orElse(null))));
+ case Types.DATE:
+ return Optional.of(dateColumnMappingUsingLocalDate());
+ }
+
+ if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
+ return mapToUnboundedVarchar(typeHandle);
+ }
+
+ return Optional.empty();
+ }
+
+ private static Optional numberMapping(JdbcTypeHandle typeHandle)
+ {
+ int precision = typeHandle.requiredColumnSize();
+ int scale = typeHandle.requiredDecimalDigits();
+ if (precision > Decimals.MAX_PRECISION) {
+ // this will trigger for number(*) as precision is 40
+ return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale)));
+ }
+ return Optional.of(decimalColumnMapping(createDecimalType(precision, scale)));
+ }
+
+ private static ColumnMapping charColumnMapping(int charLength, boolean isCaseSensitive)
+ {
+ if (charLength > CharType.MAX_LENGTH) {
+ return varcharColumnMapping(charLength, isCaseSensitive);
+ }
+ CharType charType = createCharType(charLength);
+ return ColumnMapping.sliceMapping(
+ charType,
+ charReadFunction(charType),
+ charWriteFunction(),
+ isCaseSensitive ? TERADATA_STRING_PUSHDOWN : CASE_INSENSITIVE_CHARACTER_PUSHDOWN);
+ }
+
+ private static ColumnMapping varcharColumnMapping(int varcharLength, boolean isCaseSensitive)
+ {
+ VarcharType varcharType = varcharLength <= VarcharType.MAX_LENGTH
+ ? createVarcharType(varcharLength)
+ : createUnboundedVarcharType();
+ return ColumnMapping.sliceMapping(
+ varcharType,
+ varcharReadFunction(varcharType),
+ varcharWriteFunction(),
+ isCaseSensitive ? TERADATA_STRING_PUSHDOWN : CASE_INSENSITIVE_CHARACTER_PUSHDOWN);
+ }
+
+ private boolean deriveCaseSensitivity(CaseSensitivity caseSensitivity)
+ {
+ return switch (teradataJDBCCaseSensitivity) {
+ case CASE_INSENSITIVE -> false;
+ case CASE_SENSITIVE -> true;
+ default -> caseSensitivity != null;
+ };
+ }
+
+ @Override
+ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
+ {
+ return switch (type) {
+ case Type typeInstance when typeInstance == TINYINT -> WriteMapping.longMapping("smallint", tinyintWriteFunction());
+ case Type typeInstance when typeInstance == SMALLINT -> WriteMapping.longMapping("smallint", smallintWriteFunction());
+ case Type typeInstance when typeInstance == INTEGER -> WriteMapping.longMapping("integer", integerWriteFunction());
+ case Type typeInstance when typeInstance == BIGINT -> WriteMapping.longMapping("bigint", bigintWriteFunction());
+ case Type typeInstance when typeInstance == REAL -> WriteMapping.longMapping("FLOAT", realWriteFunction());
+ case Type typeInstance when typeInstance == DOUBLE -> WriteMapping.doubleMapping("double precision", doubleWriteFunction());
+ case Type typeInstance when typeInstance == DATE -> WriteMapping.longMapping("date", dateWriteFunctionUsingLocalDate());
+ case DecimalType decimalTypeInstance -> {
+ String dataType = String.format("decimal(%s, %s)", decimalTypeInstance.getPrecision(), decimalTypeInstance.getScale());
+ if (decimalTypeInstance.isShort()) {
+ yield WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalTypeInstance));
+ }
+ yield WriteMapping.objectMapping(dataType, longDecimalWriteFunction(decimalTypeInstance));
+ }
+ case CharType charTypeInstance -> WriteMapping.sliceMapping("char(" + charTypeInstance.getLength() + ")", charWriteFunction());
+ case VarcharType varcharTypeInstance -> {
+ String dataType = varcharTypeInstance.isUnbounded()
+ ? "clob"
+ : "varchar(" + varcharTypeInstance.getBoundedLength() + ")";
+ yield WriteMapping.sliceMapping(dataType, varcharWriteFunction());
+ }
+ default -> throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName());
+ };
+ }
+}
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java
new file mode 100644
index 000000000000..081f9807b59b
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataClientModule.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import com.google.inject.Binder;
+import com.google.inject.Provides;
+import com.google.inject.Scopes;
+import com.google.inject.Singleton;
+import io.airlift.configuration.AbstractConfigurationAwareModule;
+import io.opentelemetry.api.OpenTelemetry;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.DriverConnectionFactory;
+import io.trino.plugin.jdbc.ForBaseJdbc;
+import io.trino.plugin.jdbc.JdbcClient;
+import io.trino.plugin.jdbc.JdbcJoinPushdownSupportModule;
+import io.trino.plugin.jdbc.JdbcStatisticsConfig;
+import io.trino.plugin.jdbc.credential.CredentialProvider;
+
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.Properties;
+
+import static io.airlift.configuration.ConfigBinder.configBinder;
+
+public class TeradataClientModule
+ extends AbstractConfigurationAwareModule
+{
+ @Provides
+ @Singleton
+ @ForBaseJdbc
+ public static ConnectionFactory getConnectionFactory(BaseJdbcConfig config, CredentialProvider credentialProvider, OpenTelemetry openTelemetry)
+ throws SQLException
+ {
+ Driver driver = DriverManager.getDriver(config.getConnectionUrl());
+ Properties connectionProperties = new Properties();
+ connectionProperties.setProperty("LOGMECH", "TD2");
+ return DriverConnectionFactory.builder(driver, config.getConnectionUrl(), credentialProvider)
+ .setConnectionProperties(connectionProperties)
+ .setOpenTelemetry(openTelemetry).build();
+ }
+
+ @Override
+ public void setup(Binder binder)
+ {
+ configBinder(binder).bindConfig(TeradataConfig.class);
+ binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(TeradataClient.class).in(Scopes.SINGLETON);
+ configBinder(binder).bindConfig(JdbcStatisticsConfig.class);
+ install(new JdbcJoinPushdownSupportModule());
+ }
+}
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java
new file mode 100644
index 000000000000..e0450da835ab
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataConfig.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import io.airlift.configuration.Config;
+import io.airlift.configuration.ConfigDescription;
+import jakarta.validation.constraints.NotNull;
+
+public class TeradataConfig
+{
+ private TeradataCaseSensitivity teradataCaseSensitivity = TeradataCaseSensitivity.CASE_SENSITIVE;
+
+ @NotNull
+ public TeradataCaseSensitivity getTeradataCaseSensitivity()
+ {
+ return teradataCaseSensitivity;
+ }
+
+ @Config("teradata.case-sensitivity")
+ @ConfigDescription("How char/varchar columns' case sensitivity will be exposed to Trino (default: CASE_SENSITIVE). Possible values: CASE_INSENSITIVE, CASE_SENSITIVE, AS_DEFINED.")
+ public TeradataConfig setTeradataCaseSensitivity(TeradataCaseSensitivity teradataCaseSensitivity)
+ {
+ this.teradataCaseSensitivity = teradataCaseSensitivity;
+ return this;
+ }
+
+ public enum TeradataCaseSensitivity
+ {
+ CASE_INSENSITIVE, CASE_SENSITIVE, AS_DEFINED
+ }
+}
diff --git a/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java
new file mode 100644
index 000000000000..d11110edfbed
--- /dev/null
+++ b/plugin/trino-teradata/src/main/java/io/trino/plugin/teradata/TeradataPlugin.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import io.trino.plugin.jdbc.JdbcPlugin;
+
+public class TeradataPlugin
+ extends JdbcPlugin
+{
+ public TeradataPlugin()
+ {
+ super("teradata", TeradataClientModule::new);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataConfig.java
new file mode 100644
index 000000000000..a9a57054ef96
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataConfig.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import com.google.common.collect.ImmutableMap;
+import io.trino.plugin.teradata.TeradataConfig.TeradataCaseSensitivity;
+import org.junit.jupiter.api.Test;
+
+import java.util.Map;
+
+import static io.airlift.configuration.testing.ConfigAssertions.assertFullMapping;
+import static io.airlift.configuration.testing.ConfigAssertions.assertRecordedDefaults;
+import static io.airlift.configuration.testing.ConfigAssertions.recordDefaults;
+
+public class TestTeradataConfig
+{
+ @Test
+ public void testDefaults()
+ {
+ assertRecordedDefaults(recordDefaults(TeradataConfig.class)
+ .setTeradataCaseSensitivity(TeradataCaseSensitivity.CASE_SENSITIVE));
+ }
+
+ @Test
+ public void testExplicitPropertyMappings()
+ {
+ Map properties = ImmutableMap.builder()
+ .put("teradata.case-sensitivity", "as-defined")
+ .buildOrThrow();
+
+ TeradataConfig expected = new TeradataConfig()
+ .setTeradataCaseSensitivity(TeradataCaseSensitivity.AS_DEFINED);
+
+ assertFullMapping(properties, expected);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java
new file mode 100644
index 000000000000..857b215ebf28
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/TestTeradataPlugin.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata;
+
+import io.trino.plugin.jdbc.JdbcConnectorFactory;
+import io.trino.spi.connector.ConnectorFactory;
+import io.trino.testing.TestingConnectorContext;
+import org.junit.jupiter.api.Test;
+
+import java.util.Map;
+
+import static com.google.common.collect.Iterables.getOnlyElement;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestTeradataPlugin
+{
+ @Test
+ public void testCreateConnector()
+ {
+ TeradataPlugin plugin = new TeradataPlugin();
+ ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
+ assertThat(factory).isInstanceOf(JdbcConnectorFactory.class);
+ factory.create("test",
+ Map.of(
+ "connection-url", "jdbc:teradata://test/"),
+ new TestingConnectorContext())
+ .shutdown();
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java
new file mode 100644
index 000000000000..605faaf3d0cb
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/AuthenticationConfig.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+public record AuthenticationConfig(
+ String userName,
+ String password)
+{
+ public AuthenticationConfig()
+ {
+ this(null, null);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java
new file mode 100644
index 000000000000..0cc9e8ed1168
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfig.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import java.util.Map;
+
+public class DatabaseConfig
+{
+ private final String jdbcUrl;
+ private final String hostName;
+ private final String databaseName;
+ private final boolean useClearScape;
+ private final AuthenticationConfig authConfig;
+ private final String clearScapeEnvName;
+ private final Map jdbcProperties;
+
+ private DatabaseConfig(Builder builder)
+ {
+ this.jdbcUrl = builder.jdbcUrl;
+ this.hostName = builder.hostName;
+ this.databaseName = builder.databaseName;
+ this.useClearScape = builder.useClearScape;
+ this.authConfig = builder.authConfig;
+ this.clearScapeEnvName = builder.clearScapeEnvName;
+ this.jdbcProperties = builder.jdbcProperties;
+ }
+
+ public static Builder builder()
+ {
+ return new Builder();
+ }
+
+ public Builder toBuilder()
+ {
+ return builder()
+ .jdbcUrl(this.jdbcUrl)
+ .hostName(this.hostName)
+ .databaseName(this.databaseName)
+ .useClearScape(this.useClearScape)
+ .authConfig(this.authConfig)
+ .clearScapeEnvName(this.clearScapeEnvName)
+ .jdbcProperties(this.jdbcProperties);
+ }
+
+ public String getJdbcUrl()
+ {
+ return jdbcUrl;
+ }
+
+ public String getDatabaseName()
+ {
+ return databaseName;
+ }
+
+ public boolean isUseClearScape()
+ {
+ return useClearScape;
+ }
+
+ public AuthenticationConfig getAuthConfig()
+ {
+ return authConfig;
+ }
+
+ public String getClearScapeEnvName()
+ {
+ return clearScapeEnvName;
+ }
+
+ public Map getJdbcProperties()
+ {
+ return jdbcProperties;
+ }
+
+ public String getHostName()
+ {
+ return hostName;
+ }
+
+ public String getTMode()
+ {
+ if (jdbcProperties != null && jdbcProperties.containsKey("TMODE")) {
+ return jdbcProperties.get("TMODE");
+ }
+ return "ANSI";
+ }
+
+ public static class Builder
+ {
+ private String jdbcUrl;
+ private String hostName;
+ private String databaseName = "trino";
+ private boolean useClearScape;
+ private AuthenticationConfig authConfig = new AuthenticationConfig();
+ private String clearScapeEnvName;
+ private Map jdbcProperties;
+
+ public Builder jdbcUrl(String jdbcUrl)
+ {
+ this.jdbcUrl = jdbcUrl;
+ return this;
+ }
+
+ public Builder databaseName(String databaseName)
+ {
+ this.databaseName = databaseName;
+ return this;
+ }
+
+ public Builder useClearScape(boolean useClearScape)
+ {
+ this.useClearScape = useClearScape;
+ return this;
+ }
+
+ public Builder authConfig(AuthenticationConfig authConfig)
+ {
+ this.authConfig = authConfig;
+ return this;
+ }
+
+ public Builder clearScapeEnvName(String clearScapeEnvName)
+ {
+ this.clearScapeEnvName = clearScapeEnvName;
+ return this;
+ }
+
+ public Builder jdbcProperties(Map jdbcProperties)
+ {
+ this.jdbcProperties = jdbcProperties;
+ return this;
+ }
+
+ public Builder hostName(String hostName)
+ {
+ this.hostName = hostName;
+ return this;
+ }
+
+ public DatabaseConfig build()
+ {
+ return new DatabaseConfig(this);
+ }
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java
new file mode 100644
index 000000000000..24be04ec2d81
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/DatabaseConfigFactory.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static io.trino.testing.SystemEnvironmentUtils.isEnvSet;
+import static io.trino.testing.SystemEnvironmentUtils.requireEnv;
+
+public class DatabaseConfigFactory
+{
+ private DatabaseConfigFactory() {}
+
+ public static DatabaseConfig create(String envName)
+ {
+ String userName;
+ String password;
+ String hostName = null;
+
+ if (isEnvSet("CLEARSCAPE_TOKEN")) {
+ userName = TeradataTestConstants.ENV_CLEARSCAPE_USERNAME;
+ password = requireEnv("CLEARSCAPE_PASSWORD");
+ }
+ else {
+ userName = requireEnv("TERADATA_USERNAME");
+ password = requireEnv("TERADATA_PASSWORD");
+ hostName = requireEnv("TERADATA_HOSTNAME");
+ }
+
+ String databaseName = envName.replace("-", "_");
+
+ AuthenticationConfig authConfig = createAuthConfig(userName, password);
+ return DatabaseConfig.builder()
+ .hostName(hostName)
+ .databaseName(databaseName)
+ .useClearScape(hostName == null)
+ .authConfig(authConfig)
+ .clearScapeEnvName(envName)
+ .jdbcProperties(getJdbcProperties())
+ .build();
+ }
+
+ public static Map getJdbcProperties()
+ {
+ Map propsMap = new HashMap<>();
+ propsMap.put("TMODE", "ANSI");
+ propsMap.put("CHARSET", "UTF8");
+ return propsMap;
+ }
+
+ private static AuthenticationConfig createAuthConfig(String username, String password)
+ {
+ return new AuthenticationConfig(username, password);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java
new file mode 100644
index 000000000000..661538739395
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataQueryRunner.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import com.google.common.collect.ImmutableList;
+import com.google.errorprone.annotations.CanIgnoreReturnValue;
+import io.airlift.log.Level;
+import io.airlift.log.Logger;
+import io.airlift.log.Logging;
+import io.trino.Session;
+import io.trino.metadata.QualifiedObjectName;
+import io.trino.plugin.teradata.TeradataPlugin;
+import io.trino.plugin.tpch.TpchPlugin;
+import io.trino.testing.DistributedQueryRunner;
+import io.trino.testing.QueryRunner;
+import io.trino.tpch.TpchTable;
+import org.intellij.lang.annotations.Language;
+
+import java.util.List;
+import java.util.Locale;
+
+import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
+import static io.trino.testing.TestingSession.testSessionBuilder;
+import static java.util.Objects.requireNonNull;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public final class TeradataQueryRunner
+{
+ private TeradataQueryRunner() {}
+
+ public static Builder builder(TestingTeradataServer server)
+ {
+ return new Builder(server);
+ }
+
+ public static class Builder
+ extends DistributedQueryRunner.Builder
+ {
+ private final TestingTeradataServer server;
+ private List> initialTables = ImmutableList.of();
+
+ protected Builder(TestingTeradataServer server)
+ {
+ super(testSessionBuilder().setCatalog("teradata").setSchema(server.getDatabaseName()).build());
+ this.server = requireNonNull(server, "server is null");
+ }
+
+ public void copyTable(QueryRunner queryRunner, QualifiedObjectName table, Session session)
+ {
+ @Language("SQL") String sql = String.format("CREATE TABLE %s AS SELECT * FROM %s", table.objectName(), table);
+ queryRunner.execute(session, sql);
+ assertThat(queryRunner.execute(session, "SELECT count(*) FROM " + table.objectName()).getOnlyValue()).as("Table is not loaded properly: %s", new Object[] {
+ table.objectName()}).isEqualTo(queryRunner.execute(session, "SELECT count(*) FROM " + table).getOnlyValue());
+ }
+
+ public void copyTpchTables(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, Session session, Iterable> tables)
+ {
+ for (TpchTable> table : tables) {
+ copyTable(queryRunner, sourceCatalog, sourceSchema, table.getTableName().toLowerCase(Locale.ENGLISH), session);
+ }
+ }
+
+ public void copyTpchTables(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, Iterable> tables)
+ {
+ copyTpchTables(queryRunner, sourceCatalog, sourceSchema, queryRunner.getDefaultSession(), tables);
+ }
+
+ public void copyTable(QueryRunner queryRunner, String sourceCatalog, String sourceSchema, String sourceTable, Session session)
+ {
+ QualifiedObjectName table = new QualifiedObjectName(sourceCatalog, sourceSchema, sourceTable);
+ if (!server.isTableExists(sourceTable)) {
+ copyTable(queryRunner, table, session);
+ }
+ }
+
+ @CanIgnoreReturnValue
+ public Builder setInitialTables(Iterable> initialTables)
+ {
+ this.initialTables = ImmutableList.copyOf(requireNonNull(initialTables, "initialTables is null"));
+ return this;
+ }
+
+ @Override
+ public DistributedQueryRunner build()
+ throws Exception
+ {
+ super.setAdditionalSetup(runner -> {
+ runner.installPlugin(new TpchPlugin());
+ runner.createCatalog("tpch", "tpch");
+
+ runner.installPlugin(new TeradataPlugin());
+ runner.createCatalog("teradata", "teradata", server.getCatalogProperties());
+
+ copyTpchTables(runner, "tpch", TINY_SCHEMA_NAME, initialTables);
+ });
+ return super.build();
+ }
+
+ public static void main(String[] args)
+ throws Exception
+ {
+ Logging logger = Logging.initialize();
+ logger.setLevel("io.trino.plugin.teradata", Level.DEBUG);
+ logger.setLevel("io.trino", Level.INFO);
+ TestingTeradataServer server = new TestingTeradataServer("TeradataQueryRunner", false);
+ QueryRunner queryRunner = builder(server).addCoordinatorProperty("http-server.http.port", "8080").setInitialTables(TpchTable.getTables()).build();
+
+ Logger log = Logger.get(TeradataQueryRunner.class);
+ log.info("======== SERVER STARTED ========");
+ log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl());
+ }
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java
new file mode 100644
index 000000000000..45d0232bb3e0
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TeradataTestConstants.java
@@ -0,0 +1,20 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+public interface TeradataTestConstants
+{
+ String ENV_CLEARSCAPE_URL = "https://api.clearscape.teradata.com";
+ String ENV_CLEARSCAPE_USERNAME = "demo_user";
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java
new file mode 100644
index 000000000000..41861ccc07aa
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataConnectorTest.java
@@ -0,0 +1,554 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import io.trino.Session;
+import io.trino.plugin.jdbc.BaseJdbcConnectorTest;
+import io.trino.sql.query.QueryAssertions;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.TestingConnectorBehavior;
+import io.trino.testing.TestingNames;
+import io.trino.testing.assertions.TrinoExceptionAssert;
+import io.trino.testing.sql.SqlExecutor;
+import io.trino.testing.sql.TestTable;
+import org.assertj.core.api.AssertProvider;
+import org.intellij.lang.annotations.Language;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.parallel.ResourceAccessMode;
+import org.junit.jupiter.api.parallel.ResourceLock;
+
+import java.util.List;
+import java.util.Optional;
+import java.util.OptionalInt;
+import java.util.function.Consumer;
+
+import static io.trino.plugin.teradata.integration.clearscape.ClearScapeEnvironmentUtils.generateUniqueEnvName;
+import static io.trino.testing.TestingNames.randomNameSuffix;
+import static java.lang.String.format;
+import static java.util.Objects.requireNonNull;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assumptions.abort;
+
+final class TestTeradataConnectorTest
+ extends BaseJdbcConnectorTest
+{
+ private static final int TERADATA_OBJECT_NAME_LIMIT = 128;
+
+ private TestingTeradataServer database;
+
+ private static void verifyResultOrFailure(AssertProvider queryAssertProvider, Consumer verifyResults,
+ Consumer verifyFailure)
+ {
+ requireNonNull(verifyResults, "verifyResults is null");
+ requireNonNull(verifyFailure, "verifyFailure is null");
+ QueryAssertions.QueryAssert queryAssert = assertThat(queryAssertProvider);
+ verifyResults.accept(queryAssert);
+ }
+
+ @Override
+ protected SqlExecutor onRemoteDatabase()
+ {
+ return database;
+ }
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ database = closeAfterClass(new TestingTeradataServer(generateUniqueEnvName(getClass()), true));
+ // Register this specific instance for this test class
+ return TeradataQueryRunner.builder(database).setInitialTables(REQUIRED_TPCH_TABLES).build();
+ }
+
+ @Override
+ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
+ {
+ return switch (connectorBehavior) {
+ case SUPPORTS_ADD_COLUMN,
+ SUPPORTS_AGGREGATION_PUSHDOWN,
+ SUPPORTS_COMMENT_ON_COLUMN,
+ SUPPORTS_COMMENT_ON_TABLE,
+ SUPPORTS_CREATE_MATERIALIZED_VIEW,
+ SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT,
+ SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT,
+ SUPPORTS_CREATE_VIEW,
+ SUPPORTS_DELETE,
+ SUPPORTS_DEREFERENCE_PUSHDOWN,
+ SUPPORTS_DROP_COLUMN,
+ SUPPORTS_DROP_SCHEMA_CASCADE,
+ SUPPORTS_INSERT,
+ SUPPORTS_JOIN_PUSHDOWN,
+ SUPPORTS_JOIN_PUSHDOWN_WITH_DISTINCT_FROM,
+ SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_INEQUALITY,
+ SUPPORTS_LIMIT_PUSHDOWN,
+ SUPPORTS_MAP_TYPE,
+ SUPPORTS_MERGE,
+ SUPPORTS_NATIVE_QUERY,
+ SUPPORTS_NEGATIVE_DATE,
+ SUPPORTS_PREDICATE_ARITHMETIC_EXPRESSION_PUSHDOWN,
+ SUPPORTS_PREDICATE_EXPRESSION_PUSHDOWN,
+ SUPPORTS_PREDICATE_PUSHDOWN,
+ SUPPORTS_PREDICATE_PUSHDOWN_WITH_VARCHAR_INEQUALITY,
+ SUPPORTS_RENAME_COLUMN,
+ SUPPORTS_RENAME_SCHEMA,
+ SUPPORTS_RENAME_TABLE,
+ SUPPORTS_ROW_LEVEL_DELETE,
+ SUPPORTS_ROW_TYPE,
+ SUPPORTS_SET_COLUMN_TYPE,
+ SUPPORTS_TOPN_PUSHDOWN,
+ SUPPORTS_TOPN_PUSHDOWN_WITH_VARCHAR,
+ SUPPORTS_TRUNCATE,
+ SUPPORTS_UPDATE -> false;
+ case SUPPORTS_CREATE_SCHEMA,
+ SUPPORTS_CREATE_TABLE -> true;
+ default -> super.hasBehavior(connectorBehavior);
+ };
+ }
+
+ @AfterAll
+ public void cleanupTestDatabase()
+ {
+ database = null;
+ }
+
+ @Override
+ protected OptionalInt maxSchemaNameLength()
+ {
+ return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT);
+ }
+
+ @Override // Override because the expected error message is different
+ protected void verifySchemaNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessage(format("Schema name must be shorter than or equal to '%s' characters but got '%s'", TERADATA_OBJECT_NAME_LIMIT, TERADATA_OBJECT_NAME_LIMIT + 1));
+ }
+
+ @Override // Override because Teradata Object name limit is 128 characters
+ protected OptionalInt maxColumnNameLength()
+ {
+ return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT);
+ }
+
+ @Override // Override because the expected error message is different
+ protected void verifyColumnNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessageMatching(format("Column name must be shorter than or equal to '%s' characters but got '%s': '.*'", TERADATA_OBJECT_NAME_LIMIT,
+ TERADATA_OBJECT_NAME_LIMIT + 1));
+ }
+
+ @Override // Override to skip the data mapping smoke test
+ @Test
+ public void testDataMappingSmokeTest()
+ {
+ skipTestUnless(false);
+ }
+
+ @Override // Override because Teradata Table name limit is 128 characters
+ protected OptionalInt maxTableNameLength()
+ {
+ return OptionalInt.of(TERADATA_OBJECT_NAME_LIMIT);
+ }
+
+ @Override // Override because the expected error message is different
+ protected void verifyTableNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessageMatching(format("Table name must be shorter than or equal to '%s' characters but got '%s'", TERADATA_OBJECT_NAME_LIMIT,
+ TERADATA_OBJECT_NAME_LIMIT + 1));
+ }
+
+ @Override // Overriding this test case as Teradata defines varchar with a length.
+ @Test
+ public void testVarcharCastToDateInPredicate()
+ {
+ String tableName = "varchar_as_date_pred";
+ try (TestTable table = newTrinoTable(tableName, "(a varchar(50))", List.of("'999-09-09'", "'1005-09-09'", "'2005-06-06'", "'2005-06-6'", "'2005-6-06'", "'2005-6-6'", "' " +
+ "2005-06-06'", "'2005-06-06 '", "' +2005-06-06'", "'02005-06-06'", "'2005-09-06'", "'2005-09-6'", "'2005-9-06'", "'2005-9-6'", "' 2005-09-06'", "'2005-09-06 '",
+ "' +2005-09-06'", "'02005-09-06'", "'2005-09-09'", "'2005-09-9'", "'2005-9-09'", "'2005-9-9'", "' 2005-09-09'", "'2005-09-09 '", "' +2005-09-09'", "'02005-09-09" +
+ "'", "'2005-09-10'", "'2005-9-10'", "' 2005-09-10'", "'2005-09-10 '", "' +2005-09-10'", "'02005-09-10'", "'2005-09-20'", "'2005-9-20'", "' 2005-09-20'",
+ "'2005-09-20 '", "' +2005-09-20'", "'02005-09-20'", "'9999-09-09'", "'99999-09-09'"))) {
+ for (String date : List.of("2005-09-06", "2005-09-09", "2005-09-10")) {
+ for (String operator : List.of("=", "<=", "<", ">", ">=", "!=", "IS DISTINCT FROM", "IS NOT DISTINCT FROM")) {
+ assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) %s DATE '%s'".formatted(table.getName(), operator, date))).hasCorrectResultsRegardlessOfPushdown();
+ }
+ }
+ }
+ try (TestTable table = newTrinoTable(tableName, "(a varchar(50))", List.of("'2005-06-bad-date'", "'2005-09-10'"))) {
+ assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) < DATE '2005-09-10'".formatted(table.getName()))).failure().hasMessage("Value cannot be cast to date: " +
+ "2005-06-bad-date");
+ verifyResultOrFailure(query("SELECT a FROM %s WHERE CAST(a AS date) = DATE '2005-09-10'".formatted(table.getName())),
+ queryAssert -> queryAssert.skippingTypesCheck().matches("VALUES '2005-09-10'"), failureAssert -> failureAssert.hasMessage("Value cannot be cast to date: " +
+ "2005-06-bad-date"));
+ }
+ try (TestTable table = newTrinoTable(tableName, "(a varchar(50))", List.of("'2005-09-10'"))) {
+ // 2005-09-01, when written as 2005-09-1, is a prefix of an existing data point: 2005-09-10
+ assertThat(query("SELECT a FROM %s WHERE CAST(a AS date) != DATE '2005-09-01'".formatted(table.getName()))).skippingTypesCheck().matches("VALUES '2005-09-10'");
+ }
+ }
+
+ // Tests CREATE TABLE AS SELECT functionality with Teradata syntax
+ // Overridden to handle Teradata's specific "WITH DATA" syntax for table creation
+ @Override
+ @Test
+ public void testCreateTableAsSelect()
+ {
+ String tableName = "test_ctas" + randomNameSuffix();
+ assertUpdate("CREATE TABLE IF NOT EXISTS " + tableName + " AS SELECT name, regionkey FROM nation", "SELECT count(*) FROM nation");
+ assertTableColumnNames(tableName, "name", "regionkey");
+ assertThat(getTableComment(tableName)).isNull();
+ assertUpdate("DROP TABLE " + tableName);
+
+ // Some connectors support CREATE TABLE AS but not the ordinary CREATE TABLE. Let's test CTAS IF NOT EXISTS with a table that is guaranteed to exist.
+ assertUpdate("CREATE TABLE IF NOT EXISTS nation AS SELECT nationkey, regionkey FROM nation", 0);
+ assertTableColumnNames("nation", "nationkey", "name", "regionkey", "comment");
+
+ assertCreateTableAsSelect("SELECT nationkey, name, regionkey FROM nation", "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect("SELECT mktsegment, sum(acctbal) x FROM customer GROUP BY mktsegment", "SELECT count(DISTINCT mktsegment) FROM customer");
+
+ assertCreateTableAsSelect("SELECT count(*) x FROM nation JOIN region ON nation.regionkey = region.regionkey", "SELECT 1");
+
+ assertCreateTableAsSelect("SELECT nationkey FROM nation ORDER BY nationkey LIMIT 10", "SELECT 10");
+
+ // Tests for CREATE TABLE with UNION ALL: exercises PushTableWriteThroughUnion optimizer
+
+ assertCreateTableAsSelect("SELECT name, nationkey, regionkey FROM nation WHERE nationkey % 2 = 0 UNION ALL " + "SELECT name, nationkey, regionkey FROM nation WHERE " +
+ "nationkey % 2 = 1", "SELECT name, nationkey, regionkey FROM nation", "SELECT count(*) FROM nation");
+
+ assertCreateTableAsSelect(Session.builder(getSession()).setSystemProperty("redistribute_writes", "true").build(), "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey " +
+ "FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT nationkey, regionkey FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT count(*) + 1 FROM " +
+ "nation");
+
+ assertCreateTableAsSelect(Session.builder(getSession()).setSystemProperty("redistribute_writes", "false").build(), "SELECT CAST(nationkey AS BIGINT) nationkey, regionkey" +
+ " FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT nationkey, regionkey FROM nation UNION ALL " + "SELECT 1234567890, 123", "SELECT count(*) + 1 FROM " +
+ "nation");
+
+ tableName = "test_ctas" + randomNameSuffix();
+ assertThat(query("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT name FROM nation")).succeeds();
+ assertThat(query("SELECT * from " + tableName)).matches("SELECT name FROM nation");
+ assertUpdate("DROP TABLE " + tableName);
+ }
+
+ @Override // Overriding this test case as Teradata does not support negative dates.
+ @Test
+ public void testDateYearOfEraPredicate()
+ {
+ assertQuery("SELECT orderdate FROM orders WHERE orderdate = DATE '1997-09-14'", "VALUES DATE '1997-09-14'");
+ }
+
+ @Override // Override this test case as Teradata has different syntax for creating tables with AS SELECT statement.
+ @Test
+ public void verifySupportsRowLevelUpdateDeclaration()
+ {
+ String testTableName = "test_supports_update";
+ try (TestTable table = newTrinoTable(testTableName, "AS ( SELECT * FROM nation) WITH DATA")) {
+ assertQueryFails("UPDATE " + table.getName() + " SET nationkey = nationkey * 100 WHERE regionkey = 2", "This connector does not support modifying table rows");
+ }
+ }
+
+ @Override // Overriding this test case as Teradata doesn't have support to (k, v) AS VALUES in insert statement
+ @Test
+ public void testCharVarcharComparison()
+ {
+ String testTableName = "test_char_varchar";
+ try (TestTable table = newTrinoTable(testTableName, "(k int, v char(3))", List.of("-1, CAST(NULL AS char(3))", "3, CAST(' ' AS char(3))", "6, CAST('x ' AS char(3))"))) {
+ assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(2))", "VALUES (3, ' ')");
+ assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(4))", "VALUES (3, ' ')");
+ assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST('x ' AS varchar(2))", "VALUES (6, 'x ')");
+ }
+ }
+
+ @Override // Overriding this test case as Teradata doesn't have support to (k, v) AS VALUES in insert statement
+ @Test
+ public void testVarcharCharComparison()
+ {
+ try (TestTable table = newTrinoTable("test_varchar_char", "(k int, v char(3))", List.of("-1, CAST(NULL AS varchar(3))", "0, CAST('' AS varchar(3))", "1, CAST(' ' AS" +
+ " varchar(3))", "2, CAST(' ' AS varchar(3))", "3, CAST(' ' AS varchar(3))", "4, CAST('x' AS varchar(3))", "5, CAST('x ' AS varchar(3))",
+ "6, CAST('x ' AS " + "varchar(3))"))) {
+ // Teradata's CHAR type automatically pads values with spaces to the defined length
+ assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS char(2))", "VALUES (0, ' '), (1, ' '), (2, ' '), (3, ' ')");
+ assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST('x ' AS char(2))", "VALUES (4, 'x '), (5, 'x '), (6, 'x ')");
+ }
+ }
+
+ // Filters data mapping test data for Teradata compatibility
+ // Overridden to exclude data types that Teradata doesn't support or handles differently
+ @Override
+ protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup)
+ {
+ String typeName = dataMappingTestSetup.getTrinoTypeName();
+ return switch (typeName) {
+ // skipping date as during julian->gregorian date is handled differently in Teradata. tinyint, double and varchar with unbounded (need to handle special characters)
+ // is skipped and will handle it while improving
+ // write functionalities.
+ case "boolean", "tinyint", "date", "real", "double", "varchar", "time", "time(6)", "timestamp", "timestamp(6)", "varbinary", "timestamp(3) with time zone",
+ "timestamp(6) with time zone", "U&'a \\000a newline'" -> Optional.empty();
+ default -> Optional.of(dataMappingTestSetup);
+ };
+ }
+
+ @Override
+ @Test
+ public void testTimestampWithTimeZoneCastToDatePredicate()
+ {
+ abort("Skipping as connector does not support Timestamp with Time Zone data type");
+ }
+
+ @Override
+ @Test
+ public void testTimestampWithTimeZoneCastToTimestampPredicate()
+ {
+ abort("Skipping as connector does not support Timestamp with Time Zone data type");
+ }
+
+ @Override
+ @Test
+ public void testRenameSchema()
+ {
+ abort("Skipping as connector does not support RENAME SCHEMA");
+ }
+
+ @Override
+ @Test
+ public void testColumnName()
+ {
+ abort("Skipping as connector does not support column level write operations");
+ }
+
+ @Override
+ @Test
+ public void testCreateTableAsSelectWithUnicode()
+ {
+ abort("Skipping as connector does not support creating table with UNICODE characters");
+ }
+
+ @Override
+ @Test
+ public void testUpdateNotNullColumn()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ @Test
+ public void testWriteBatchSizeSessionProperty()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ @Test
+ public void testInsertWithoutTemporaryTable()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ @Test
+ public void testWriteTaskParallelismSessionProperty()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ @Test
+ public void testInsertIntoNotNullColumn()
+ {
+ abort("Skipping as connector does not support insert operations");
+ }
+
+ @Override
+ @Test
+ public void testDropSchemaCascade()
+ {
+ abort("Skipping as connector does not support dropping schemas with CASCADE option");
+ }
+
+ @Override
+ @Test
+ public void testAddColumn()
+ {
+ abort("Skipping as connector does not support column level write operations");
+ }
+
+ @Override
+ @Test
+ public void testDropNonEmptySchemaWithTable()
+ {
+ abort("Skipping as connector does not support drop schemas");
+ }
+
+ @Override
+ @Test
+ public void verifySupportsUpdateDeclaration()
+ {
+ abort("Skipping as connector does not support update operations");
+ }
+
+ @Override
+ @Test
+ public void testDropNotNullConstraint()
+ {
+ abort("Skipping as connector does not support dropping a not null constraint");
+ }
+
+ @Override
+ @Test
+ public void testExecuteProcedureWithInvalidQuery()
+ {
+ abort("Skipping as connector does not support execute procedure");
+ }
+
+ @Override
+ @Test
+ public void testCreateTableAsSelectNegativeDate()
+ {
+ abort("Skipping as connector does not support creating table with negative date");
+ }
+
+ // Creates CTAS queries with proper session and row count validation
+ // Overridden to use Teradata's "WITH DATA" syntax for CREATE TABLE AS SELECT statements
+ @Override
+ protected void assertCreateTableAsSelect(Session session, String query, String expectedQuery, String rowCountQuery)
+ {
+ String table = "test_ctas_" + TestingNames.randomNameSuffix();
+ assertUpdate(session, "CREATE TABLE " + table + " AS ( " + query + ") WITH DATA", rowCountQuery);
+ assertQuery(session, "SELECT * FROM " + table, expectedQuery);
+ assertUpdate(session, "DROP TABLE " + table);
+ assertThat(getQueryRunner().tableExists(session, table)).isFalse();
+ }
+
+ // Creates new Trino test tables with proper schema handling
+ // Overridden to handle Teradata's schema.table naming format and table creation syntax
+ @Override
+ protected TestTable newTrinoTable(String namePrefix, @Language("SQL") String tableDefinition, List rowsToInsert)
+ {
+ String tableName;
+
+ // Check if namePrefix already contains schema (contains a dot)
+ if (namePrefix.contains(".")) {
+ // namePrefix already has schema.tablename format
+ tableName = namePrefix;
+ }
+ else {
+ // Append current schema to namePrefix
+ String schemaName = getSession().getSchema().orElseThrow();
+ tableName = schemaName + "." + namePrefix;
+ }
+ return new TestTable(database, tableName, tableDefinition, rowsToInsert);
+ }
+
+ @Test
+ public void testTeradataNumberDataType()
+ {
+ try (TestTable table = newTrinoTable("test_number", "(id INTEGER, " + "number_col NUMBER(10,2), " + "number_default NUMBER, " + "number_large NUMBER(38,10))", List.of(
+ "1, CAST(12345.67 AS NUMBER(10,2)), CAST(999999999999999 AS NUMBER), CAST(1234567890123456789012345678.1234567890 AS NUMBER(38,10))", "2, CAST(-99999.99 AS " +
+ "NUMBER(10,2)), CAST(-123456789012345 AS NUMBER), CAST(-9999999999999999999999999999.9999999999 AS NUMBER(38,10))",
+ "3, CAST(0.00 AS NUMBER(10,2)), CAST" + "(0 AS NUMBER), CAST(0.0000000000 AS NUMBER(38,10))"))) {
+ assertThat(query(format("SELECT number_col FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST(12345.67 AS DECIMAL(10,2))");
+ assertThat(query(format("SELECT number_default FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST(999999999999999 AS DECIMAL(38,0))");
+ assertThat(query(format("SELECT number_large FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST(1234567890123456789012345678.1234567890 AS DECIMAL(38,10)"
+ + ")");
+ assertThat(query(format("SELECT number_col FROM %s WHERE id = 2", table.getName()))).matches("VALUES CAST(-99999.99 AS DECIMAL(10,2))");
+ assertThat(query(format("SELECT number_col FROM %s WHERE id = 3", table.getName()))).matches("VALUES CAST(0.00 AS DECIMAL(10,2))");
+ }
+ }
+
+ @Test
+ public void testTeradataCharacterDataType()
+ {
+ try (TestTable table = newTrinoTable("test_character", "(id INTEGER, " + "char_col CHARACTER(5), " + "char_default CHARACTER, " + "char_large CHARACTER(100))", List.of(
+ "1, CAST('HELLO' AS CHARACTER(5)), CAST('A' AS CHARACTER), CAST('TERADATA' AS CHARACTER(100))",
+ "2, CAST('WORLD' AS CHARACTER(5)), CAST('B' AS CHARACTER), CAST" + "('CHARACTER' AS CHARACTER(100))", "3, CAST('' AS CHARACTER(5)), CAST('C' AS CHARACTER), CAST" +
+ "('' AS CHARACTER(100))"))) {
+ assertThat(query(format("SELECT char_col FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST('HELLO' AS CHAR(5))");
+ assertThat(query(format("SELECT char_default FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST('A' AS CHAR(1))");
+ assertThat(query(format("SELECT char_large FROM %s WHERE id = 1", table.getName()))).matches("VALUES CAST('TERADATA' AS CHAR(100))");
+ assertThat(query(format("SELECT char_col FROM %s WHERE id = 3", table.getName()))).matches("VALUES CAST('' AS CHAR(5))");
+ }
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testShowCreateSchema()
+ {
+ super.testShowCreateSchema();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testCreateSchema()
+ {
+ super.testCreateSchema();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testCreateSchemaWithLongName()
+ {
+ super.testCreateSchemaWithLongName();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testRenameSchemaToLongName()
+ {
+ super.testRenameSchemaToLongName();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testRenameTableAcrossSchema()
+ throws Exception
+ {
+ super.testRenameTableAcrossSchema();
+ }
+
+ @Override
+ // Overriding to add ResourceLock to run sequential this test along with other tests labeled with TERADATA_SCHEMA to avoid issue Concurrent change conflict on database
+ @Test
+ @ResourceLock(value = "TERADATA_SCHEMA", mode = ResourceAccessMode.READ_WRITE)
+ public void testRenameTableToUnqualifiedPreservesSchema()
+ throws Exception
+ {
+ super.testRenameTableToUnqualifiedPreservesSchema();
+ }
+
+ @Override // Overriding to tag this test as long_run test case to avoid running in clearscape_tests profile
+ @Test
+ @Tag("long_run")
+ public void testSelectInformationSchemaColumns()
+ {
+ super.testSelectInformationSchemaColumns();
+ }
+
+ @Override // Overriding to tag this test as long_run test case to avoid running in clearscape_tests profile
+ @Test
+ @Tag("long_run")
+ public void testCaseSensitiveDataMapping()
+ {
+ super.testCaseSensitiveDataMapping();
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java
new file mode 100644
index 000000000000..157f21ef8d62
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestTeradataTypeMapping.java
@@ -0,0 +1,282 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import io.trino.testing.AbstractTestQueryFramework;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.datatype.CreateAndInsertDataSetup;
+import io.trino.testing.datatype.DataSetup;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+
+import java.sql.SQLException;
+
+import static io.trino.plugin.teradata.integration.clearscape.ClearScapeEnvironmentUtils.generateUniqueEnvName;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.CharType.createCharType;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.SmallintType.SMALLINT;
+import static io.trino.spi.type.TinyintType.TINYINT;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static io.trino.testing.datatype.SqlDataTypeTest.create;
+import static java.lang.String.format;
+import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
+
+final class TestTeradataTypeMapping
+ extends AbstractTestQueryFramework
+{
+ private TestingTeradataServer database;
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ database = closeAfterClass(new TestingTeradataServer(generateUniqueEnvName(getClass()), true));
+ // Register this specific instance for this test class
+ return TeradataQueryRunner.builder(database).build();
+ }
+
+ @AfterAll
+ void cleanupTestClass()
+ {
+ database = null;
+ }
+
+ @Test
+ void testByteint()
+ {
+ create()
+ .addRoundTrip("byteint", "0", TINYINT, "CAST(0 AS TINYINT)")
+ .addRoundTrip("byteint", "127", TINYINT, "CAST(127 AS TINYINT)")
+ .addRoundTrip("byteint", "-128", TINYINT, "CAST(-128 AS TINYINT)")
+ .addRoundTrip("byteint", "null", TINYINT, "CAST(null AS TINYINT)")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("byteint"));
+ }
+
+ @Test
+ void testSmallint()
+ {
+ create()
+ .addRoundTrip("smallint", "0", SMALLINT, "CAST(0 AS SMALLINT)")
+ .addRoundTrip("smallint", "32767", SMALLINT, "CAST(32767 AS SMALLINT)")
+ .addRoundTrip("smallint", "-32768", SMALLINT, "CAST(-32768 AS SMALLINT)")
+ .addRoundTrip("smallint", "null", SMALLINT, "CAST(null AS SMALLINT)")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("smallint"));
+ }
+
+ @Test
+ void testInteger()
+ {
+ create()
+ .addRoundTrip("integer", "0", INTEGER, "0")
+ .addRoundTrip("integer", "2147483647", INTEGER, "2147483647")
+ .addRoundTrip("integer", "-2147483648", INTEGER, "-2147483648")
+ .addRoundTrip("integer", "NULL", INTEGER, "CAST(NULL AS INTEGER)")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("integer"));
+ }
+
+ @Test
+ void testBigint()
+ {
+ create()
+ .addRoundTrip("bigint", "0", BIGINT, "CAST(0 AS BIGINT)")
+ .addRoundTrip("bigint", "9223372036854775807", BIGINT, "9223372036854775807")
+ .addRoundTrip("bigint", "-9223372036854775808", BIGINT, "-9223372036854775808")
+ .addRoundTrip("bigint", "NULL", BIGINT, "CAST(NULL AS BIGINT)")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("bigint"));
+ }
+
+ @Test
+ void testFloat()
+ {
+ create()
+ .addRoundTrip("float", "0", DOUBLE, "CAST(0 AS DOUBLE)")
+ .addRoundTrip("real", "0", DOUBLE, "CAST(0 AS DOUBLE)")
+ .addRoundTrip("double precision", "0", DOUBLE, "CAST(0 AS DOUBLE)")
+ .addRoundTrip("float", "1.797e308", DOUBLE, "1.797e308")
+ .addRoundTrip("real", "1.797e308", DOUBLE, "1.797e308")
+ .addRoundTrip("double precision", "1.797e308", DOUBLE, "1.797e308")
+ .addRoundTrip("float", "2.226e-308", DOUBLE, "2.226e-308")
+ .addRoundTrip("real", "2.226e-308", DOUBLE, "2.226e-308")
+ .addRoundTrip("double precision", "2.226e-308", DOUBLE, "2.226e-308")
+ .addRoundTrip("float", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .addRoundTrip("real", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .addRoundTrip("double precision", "NULL", DOUBLE, "CAST(NULL AS DOUBLE)")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("float"));
+ }
+
+ @Test
+ void testDecimal()
+ {
+ create()
+ .addRoundTrip("decimal(3, 0)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 1)", "0.0", createDecimalType(3, 1), "CAST('0.0' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "0.0", createDecimalType(3, 1), "CAST('0.0' AS decimal(3, 1))")
+ .addRoundTrip("decimal(1, 0)", "1", createDecimalType(1, 0), "CAST('1' AS decimal(1, 0))")
+ .addRoundTrip("numeric(1, 0)", "1", createDecimalType(1, 0), "CAST('1' AS decimal(1, 0))")
+ .addRoundTrip("decimal(1, 0)", "-1", createDecimalType(1, 0), "CAST('-1' AS decimal(1, 0))")
+ .addRoundTrip("numeric(1, 0)", "-1", createDecimalType(1, 0), "CAST('-1' AS decimal(1, 0))")
+ .addRoundTrip("decimal(3, 0)", "1", createDecimalType(3, 0), "CAST('1' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "1", createDecimalType(3, 0), "CAST('1' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "-1", createDecimalType(3, 0), "CAST('-1' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "-1", createDecimalType(3, 0), "CAST('-1' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "123", createDecimalType(3, 0), "CAST('123' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "123", createDecimalType(3, 0), "CAST('123' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "-123", createDecimalType(3, 0), "CAST('-123' AS decimal(3, 0))")
+ .addRoundTrip("numeric(3, 0)", "-123", createDecimalType(3, 0), "CAST('-123' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 1)", "10.0", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "10.0", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "12.3", createDecimalType(3, 1), "CAST('12.3' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "12.3", createDecimalType(3, 1), "CAST('12.3' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "-12.3", createDecimalType(3, 1), "CAST('-12.3' AS decimal(3, 1))")
+ .addRoundTrip("numeric(3, 1)", "-12.3", createDecimalType(3, 1), "CAST('-12.3' AS decimal(3, 1))")
+ .addRoundTrip("decimal(38, 0)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("numeric(38, 0)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("decimal(38, 0)", "-12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('-12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("numeric(38, 0)", "-12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('-12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("decimal(1, 0)", "null", createDecimalType(1, 0), "CAST(null AS decimal(1, 0))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("decimal"));
+ }
+
+ @Test
+ void testNumber()
+ {
+ create()
+ .addRoundTrip("numeric(3)", "0", createDecimalType(3, 0), "CAST('0' AS decimal(3, 0))")
+ .addRoundTrip("number(5,2)", "0", createDecimalType(5, 2), "CAST('0' AS decimal(5, 2))")
+ .addRoundTrip("number(38)", "0", createDecimalType(38, 0), "CAST('0' AS decimal(38, 0))")
+ .addRoundTrip("number(38,2)", "123456789012345678901234567890123456.78", createDecimalType(38, 2), "CAST('123456789012345678901234567890123456.78' AS decimal(38, 2))")
+ .addRoundTrip("numeric(38)", "12345678901234567890123456789012345678", createDecimalType(38, 0), "CAST('12345678901234567890123456789012345678' AS decimal(38, 0))")
+ .addRoundTrip("numeric(3)", "null", createDecimalType(3, 0), "CAST(null AS decimal(3, 0))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("number"));
+ }
+
+ @Test
+ void testChar()
+ {
+ create()
+ .addRoundTrip("char(3)", "''", createCharType(3), "CAST('' AS char(3))")
+ .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))")
+ .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))")
+ .addRoundTrip("char(3)", "' '", createCharType(3), "CAST(' ' AS char(3))")
+ .addRoundTrip("char(3)", "'A'", createCharType(3), "CAST('A' AS char(3))")
+ .addRoundTrip("char(3)", "'A '", createCharType(3), "CAST('A ' AS char(3))")
+ .addRoundTrip("char(3)", "' B '", createCharType(3), "CAST(' B ' AS char(3))")
+ .addRoundTrip("char(3)", "' C'", createCharType(3), "CAST(' C' AS char(3))")
+ .addRoundTrip("char(3)", "'AB'", createCharType(3), "CAST('AB' AS char(3))")
+ .addRoundTrip("char(3)", "'ABC'", createCharType(3), "CAST('ABC' AS char(3))")
+ .addRoundTrip("char(3)", "'A C'", createCharType(3), "CAST('A C' AS char(3))")
+ .addRoundTrip("char(3)", "' BC'", createCharType(3), "CAST(' BC' AS char(3))")
+ .addRoundTrip("char(3)", "null", createCharType(3), "CAST(null AS char(3))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("char"));
+ String tmode = database.getTMode();
+ if (tmode.equals("TERA")) {
+ // truncation
+ create()
+ .addRoundTrip("char(3)", "'ABCD'", createCharType(3), "CAST('ABCD' AS char(3))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("chart"));
+ }
+ else {
+ // Error on truncation
+ assertThatThrownBy(() ->
+ create()
+ .addRoundTrip("char(3)", "'ABCD'", createCharType(3), "CAST('ABCD' AS char(3))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("chart")))
+ .isInstanceOf(RuntimeException.class)
+ .hasCauseInstanceOf(SQLException.class)
+ .cause()
+ .hasMessageContaining("Right truncation of string data");
+ }
+ // max-size
+ create()
+ .addRoundTrip("char(64000)", "'max'", createCharType(64000), "CAST('max' AS char(64000))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("charl"));
+ }
+
+ @Test
+ void testVarchar()
+ {
+ create()
+ .addRoundTrip("varchar(32)", "''", createVarcharType(32), "CAST('' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' '", createVarcharType(32), "CAST(' ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'A'", createVarcharType(32), "CAST('A' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'A '", createVarcharType(32), "CAST('A ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' B '", createVarcharType(32), "CAST(' B ' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' C'", createVarcharType(32), "CAST(' C' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'AB'", createVarcharType(32), "CAST('AB' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'ABC'", createVarcharType(32), "CAST('ABC' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "'A C'", createVarcharType(32), "CAST('A C' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "' BC'", createVarcharType(32), "CAST(' BC' AS varchar(32))")
+ .addRoundTrip("varchar(32)", "null", createVarcharType(32), "CAST(null AS varchar(32))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varchar"));
+ String teraMode = database.getTMode();
+ if (teraMode.equals("TERA")) {
+ // truncation
+ create()
+ .addRoundTrip("varchar(3)", "'ABCD'", createVarcharType(3), "CAST('ABCD' AS varchar(3))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varchart"));
+ }
+ else {
+ // Error on truncation
+ assertThatThrownBy(() ->
+ create()
+ .addRoundTrip("varchar(3)", "'ABCD'", createVarcharType(3), "CAST('ABCD' AS varchar(3))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varchart")))
+ .isInstanceOf(RuntimeException.class)
+ .hasCauseInstanceOf(SQLException.class)
+ .cause()
+ .hasMessageContaining("Right truncation of string data");
+ }
+ // max-size
+ create()
+ .addRoundTrip("long varchar", "'max'", createVarcharType(64000), "CAST('max' AS varchar(64000))")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("varcharl"));
+ }
+
+ @Test
+ void testDate()
+ {
+ create()
+ .addRoundTrip("date", "DATE '0001-01-01'", DATE, "DATE '0001-01-01'")
+ .addRoundTrip("date", "DATE '0012-12-12'", DATE, "DATE '0012-12-12'")
+ .addRoundTrip("date", "DATE '1500-01-01'", DATE, "DATE '1500-01-01'")
+ .addRoundTrip("date", "DATE '1582-10-04'", DATE, "DATE '1582-10-04'")
+ .addRoundTrip("date", "DATE '1582-10-15'", DATE, "DATE '1582-10-15'")
+ .addRoundTrip("date", "DATE '1952-04-03'", DATE, "DATE '1952-04-03'")
+ .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'")
+ .addRoundTrip("date", "DATE '1970-02-03'", DATE, "DATE '1970-02-03'")
+ .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'")
+ .addRoundTrip("date", "DATE '1983-04-01'", DATE, "DATE '1983-04-01'")
+ .addRoundTrip("date", "DATE '1983-10-01'", DATE, "DATE '1983-10-01'")
+ .addRoundTrip("date", "DATE '2017-07-01'", DATE, "DATE '2017-07-01'")
+ .addRoundTrip("date", "DATE '2017-01-01'", DATE, "DATE '2017-01-01'")
+ .addRoundTrip("date", "DATE '2024-02-29'", DATE, "DATE '2024-02-29'")
+ .addRoundTrip("date", "DATE '9999-12-30'", DATE, "DATE '9999-12-30'")
+ .addRoundTrip("date", "NULL", DATE, "CAST(NULL AS DATE)")
+ .execute(getQueryRunner(), teradataJDBCCreateAndInsert("date"));
+ }
+
+ private DataSetup teradataJDBCCreateAndInsert(String tableNamePrefix)
+ {
+ String prefix = format("%s.%s", database.getDatabaseName(), tableNamePrefix);
+ return new CreateAndInsertDataSetup(database, prefix);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java
new file mode 100644
index 000000000000..defad5ff9128
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/TestingTeradataServer.java
@@ -0,0 +1,396 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration;
+
+import io.trino.plugin.teradata.integration.clearscape.ClearScapeSetup;
+import io.trino.plugin.teradata.integration.clearscape.EnvironmentResponse;
+import io.trino.plugin.teradata.integration.clearscape.Model;
+import io.trino.testing.sql.SqlExecutor;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+
+import static io.trino.testing.SystemEnvironmentUtils.isEnvSet;
+import static io.trino.testing.SystemEnvironmentUtils.requireEnv;
+
+public final class TestingTeradataServer
+ implements AutoCloseable, SqlExecutor
+{
+ private static final int MAX_RETRIES = 5;
+ private static final long BASE_RETRY_DELAY_MS = 1500L;
+ private static final long MAX_RETRY_DELAY_MS = 10_000L;
+ private static final Random RANDOM = new Random();
+
+ private volatile Connection connection;
+ private DatabaseConfig config;
+ private ClearScapeSetup clearScapeSetup;
+
+ public TestingTeradataServer(String envName, boolean destroyEnv)
+ {
+ config = DatabaseConfigFactory.create(envName);
+ String hostName = config.getHostName();
+
+ // Initialize ClearScape Instance and get hostname from ClearScape API when used
+ if (config.isUseClearScape()) {
+ if (isEnvSet("CLEARSCAPE_DESTROY_ENV")) {
+ destroyEnv = Boolean.parseBoolean(requireEnv("CLEARSCAPE_DESTROY_ENV"));
+ }
+ clearScapeSetup = new ClearScapeSetup(
+ requireEnv("CLEARSCAPE_TOKEN"),
+ requireEnv("CLEARSCAPE_PASSWORD"),
+ config.getClearScapeEnvName(),
+ destroyEnv,
+ requireEnv("CLEARSCAPE_REGION"));
+ Model model = clearScapeSetup.initialize();
+ hostName = model.getHostName();
+ }
+ String jdbcUrl = buildJdbcUrl(hostName);
+ config = config.toBuilder()
+ .hostName(hostName)
+ .jdbcUrl(jdbcUrl)
+ .build();
+ // Recreate the connection with retries to handle transient ClearScape socket or connection closure issues.
+ connection = createConnectionWithRetries();
+ createTestDatabaseIfAbsent();
+ }
+
+ private static Properties buildConnectionProperties(AuthenticationConfig auth)
+ {
+ Properties props = new Properties();
+ props.setProperty("logmech", "TD2");
+ props.setProperty("username", auth.userName());
+ props.setProperty("password", auth.password());
+ return props;
+ }
+
+ private static long computeBackoffDelay(int attempt)
+ {
+ // Calculates how long to wait before retrying an operation that failed
+ long base = BASE_RETRY_DELAY_MS * (1L << Math.max(0, attempt - 1));
+ long jitter = (long) (RANDOM.nextDouble() * BASE_RETRY_DELAY_MS);
+ long delay = Math.min(base + jitter, MAX_RETRY_DELAY_MS);
+ return Math.max(delay, BASE_RETRY_DELAY_MS);
+ }
+
+ private static void sleepUnchecked(long millis)
+ {
+ try {
+ Thread.sleep(millis);
+ }
+ catch (InterruptedException ie) {
+ Thread.currentThread().interrupt();
+ throw new RuntimeException("Interrupted during retry wait", ie);
+ }
+ }
+
+ public Map getCatalogProperties()
+ {
+ Map properties = new HashMap<>();
+ properties.put("connection-url", config.getJdbcUrl());
+
+ AuthenticationConfig auth = config.getAuthConfig();
+ properties.put("connection-user", auth.userName());
+ properties.put("connection-password", auth.password());
+
+ return properties;
+ }
+
+ public void createTestDatabaseIfAbsent()
+ {
+ executeWithRetry(() -> {
+ if (!schemaExists(config.getDatabaseName())) {
+ execute(String.format("CREATE DATABASE \"%s\" AS PERM=100e6;", config.getDatabaseName()));
+ }
+ });
+ }
+
+ public void dropTestDatabaseIfExists()
+ {
+ executeWithRetry(() -> {
+ if (schemaExists(config.getDatabaseName())) {
+ execute(String.format("DELETE DATABASE \"%s\"", config.getDatabaseName()));
+ execute(String.format("DROP DATABASE \"%s\"", config.getDatabaseName()));
+ }
+ });
+ }
+
+ public boolean isTableExists(String tableName)
+ {
+ ensureConnection();
+ String query = "SELECT count(1) FROM DBC.TablesV WHERE DataBaseName = ? AND TableName = ?";
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, config.getDatabaseName());
+ stmt.setString(2, tableName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException e) {
+ if (isConnectionException(e)) {
+ connection = createConnectionWithRetries();
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, config.getDatabaseName());
+ stmt.setString(2, tableName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException ex) {
+ throw new RuntimeException("Failed to check table existence: " + ex.getMessage(), ex);
+ }
+ }
+ throw new RuntimeException("Failed to check table existence: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void execute(String sql)
+ {
+ executeWithRetry(() -> doExecute(sql));
+ }
+
+ public String getDatabaseName()
+ {
+ return config.getDatabaseName();
+ }
+
+ public String getTMode()
+ {
+ return config.getTMode();
+ }
+
+ @Override
+ public void close()
+ {
+ try {
+ if (config.isUseClearScape()) {
+ EnvironmentResponse.State state = clearScapeSetup.status();
+ if (state == EnvironmentResponse.State.RUNNING) {
+ dropTestDatabaseIfExists();
+ }
+ }
+ else {
+ dropTestDatabaseIfExists();
+ }
+ }
+ finally {
+ try {
+ if (connection != null && !connection.isClosed()) {
+ connection.close();
+ }
+ }
+ catch (SQLException ignored) {
+ }
+ connection = null;
+ if (clearScapeSetup != null) {
+ try {
+ clearScapeSetup.cleanup();
+ }
+ catch (Exception ignored) {
+ }
+ }
+ }
+ }
+
+ @Override
+ public boolean supportsMultiRowInsert()
+ {
+ return false;
+ }
+
+ private String buildJdbcUrl(String hostName)
+ {
+ String baseUrl = String.format("jdbc:teradata://%s/", hostName);
+ String propertiesString = buildPropertiesString();
+ return propertiesString.isEmpty() ? baseUrl : baseUrl + propertiesString;
+ }
+
+ private String buildPropertiesString()
+ {
+ Map properties = config.getJdbcProperties();
+ if (properties == null || properties.isEmpty()) {
+ return "";
+ }
+ return properties.entrySet()
+ .stream()
+ .map(entry -> entry.getKey() + "=" + entry.getValue())
+ .collect(java.util.stream.Collectors.joining(","));
+ }
+
+ private Connection createConnection()
+ {
+ try {
+ Class.forName("com.teradata.jdbc.TeraDriver");
+ Properties props = buildConnectionProperties(config.getAuthConfig());
+ return DriverManager.getConnection(config.getJdbcUrl(), props);
+ }
+ catch (SQLException | ClassNotFoundException e) {
+ throw new RuntimeException("Failed to create database connection", e);
+ }
+ }
+
+ private Connection createConnectionWithRetries()
+ {
+ int attempt = 0;
+ while (true) {
+ try {
+ return createConnection();
+ }
+ catch (RuntimeException e) {
+ attempt++;
+ if (attempt >= MAX_RETRIES) {
+ throw new RuntimeException("Failed to create database connection after retries", e);
+ }
+ long delay = computeBackoffDelay(attempt);
+ sleepUnchecked(delay);
+ }
+ }
+ }
+
+ private void doExecute(String sql)
+ {
+ ensureConnection();
+ try (Statement stmt = connection.createStatement()) {
+ if (config.getDatabaseName() != null && schemaExists(config.getDatabaseName())) {
+ stmt.execute(String.format("DATABASE \"%s\"", config.getDatabaseName()));
+ }
+ stmt.execute(sql);
+ }
+ catch (SQLException e) {
+ throw new RuntimeException("SQL execution failed: " + sql, e);
+ }
+ }
+
+ private synchronized void ensureConnection()
+ {
+ try {
+ if (connection == null || connection.isClosed()) {
+ connection = createConnectionWithRetries();
+ }
+ }
+ catch (SQLException e) {
+ connection = createConnectionWithRetries();
+ }
+ }
+
+ private boolean schemaExists(String schemaName)
+ {
+ ensureConnection();
+ String query = "SELECT COUNT(1) FROM DBC.DatabasesV WHERE DatabaseName = ?";
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, schemaName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException e) {
+ if (isConnectionException(e)) {
+ connection = createConnectionWithRetries();
+ try (PreparedStatement stmt = connection.prepareStatement(query)) {
+ stmt.setString(1, schemaName);
+ try (ResultSet rs = stmt.executeQuery()) {
+ return rs.next() && rs.getInt(1) > 0;
+ }
+ }
+ catch (SQLException ex) {
+ throw new RuntimeException("Failed to check schema existence", ex);
+ }
+ }
+ throw new RuntimeException("Failed to check schema existence", e);
+ }
+ }
+
+ private boolean isTeradataError3598(Throwable t)
+ {
+ if (t == null) {
+ return false;
+ }
+ Throwable root = t;
+ while (root.getCause() != null && !(root instanceof SQLException)) {
+ root = root.getCause();
+ }
+ if (root instanceof SQLException sqlEx) {
+ try {
+ if (sqlEx.getErrorCode() == 3598) {
+ return true;
+ }
+ }
+ catch (Exception ignored) {
+ }
+ }
+ return false;
+ }
+
+ private boolean isConnectionException(SQLException e)
+ {
+ if (e == null) {
+ return false;
+ }
+ try {
+ int code = e.getErrorCode();
+ if (code == 1095 || code == 804) { // 1095 == closed connection, 804 socket communication failure
+ return true;
+ }
+ }
+ catch (Exception ignored) {
+ }
+
+ try {
+ return connection == null || connection.isClosed();
+ }
+ catch (SQLException ignored) {
+ }
+
+ return false;
+ }
+
+ private void executeWithRetry(Runnable operation)
+ {
+ int attempt = 0;
+
+ while (true) {
+ try {
+ operation.run();
+ return;
+ }
+ catch (RuntimeException e) {
+ attempt++;
+ Throwable cause = e.getCause();
+
+ // Connection-related: recreate connection and retry
+ if (cause instanceof SQLException sqlEx && isConnectionException(sqlEx) && attempt < MAX_RETRIES) {
+ connection = createConnectionWithRetries();
+ sleepUnchecked(computeBackoffDelay(attempt));
+ continue;
+ }
+
+ // Teradata transient concurrency error 3598: backoff & retry
+ if (isTeradataError3598(e) && attempt < MAX_RETRIES) {
+ long delay = computeBackoffDelay(attempt);
+ sleepUnchecked(delay);
+ continue;
+ }
+ throw e;
+ }
+ }
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/BaseException.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/BaseException.java
new file mode 100644
index 000000000000..7ef38ef96e0f
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/BaseException.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+public class BaseException
+ extends RuntimeException
+{
+ protected final int statusCode;
+
+ public BaseException(int statusCode, String body)
+ {
+ super(body);
+ this.statusCode = statusCode;
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java
new file mode 100644
index 000000000000..94d9c0b9e1e5
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeEnvironmentUtils.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import java.util.concurrent.ThreadLocalRandom;
+
+import static java.util.Locale.ENGLISH;
+
+public final class ClearScapeEnvironmentUtils
+{
+ private static final int MAX_ENV_NAME_LENGTH = 40; // Adjust based on ClearScape limits
+
+ private ClearScapeEnvironmentUtils() {}
+
+ public static String generateUniqueEnvName(Class> testClass)
+ {
+ String className = testClass.getSimpleName().toLowerCase(ENGLISH);
+ String suffix = Long.toString(ThreadLocalRandom.current().nextLong(Long.MAX_VALUE), 36);
+ String envName = className + "-" + suffix;
+ // Truncate if too long
+ if (envName.length() > MAX_ENV_NAME_LENGTH) {
+ envName = envName.substring(0, MAX_ENV_NAME_LENGTH);
+ }
+ return envName;
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java
new file mode 100644
index 000000000000..99cf05c6cfbe
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeManager.java
@@ -0,0 +1,175 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import io.airlift.log.Logger;
+import io.trino.plugin.teradata.integration.TeradataTestConstants;
+
+import java.net.URISyntaxException;
+import java.util.regex.Pattern;
+
+public class ClearScapeManager
+{
+ private static final Logger log = Logger.get(ClearScapeManager.class);
+ private static final Pattern ALLOWED_URL_PATTERN =
+ Pattern.compile("^(https?://)(www\\.)?api.clearscape.teradata\\.com.*");
+ private Model model;
+
+ private boolean isValidUrl(String url)
+ {
+ return ALLOWED_URL_PATTERN.matcher(url).matches();
+ }
+
+ private TeradataHttpClient getTeradataHttpClient()
+ throws URISyntaxException
+ {
+ String envUrl = TeradataTestConstants.ENV_CLEARSCAPE_URL;
+ if (isValidUrl(envUrl)) {
+ return new TeradataHttpClient(envUrl);
+ }
+ else {
+ throw new URISyntaxException(envUrl, "Provide valid environment URL");
+ }
+ }
+
+ public void init(Model model)
+ {
+ this.model = model;
+ }
+
+ public void setup()
+ {
+ createAndStartClearScapeInstance();
+ }
+
+ public void stop()
+ {
+ stopClearScapeInstance();
+ }
+
+ public EnvironmentResponse.State status()
+ {
+ return getClearScapeInstanceStatus();
+ }
+
+ public void teardown()
+ {
+ shutdownAndDestroyClearScapeInstance();
+ }
+
+ private EnvironmentResponse.State getClearScapeInstanceStatus()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+
+ String token = this.model.getToken();
+ String name = this.model.getEnvName();
+ EnvironmentResponse response = null;
+ try {
+ response = teradataHttpClient.getEnvironment(new GetEnvironmentRequest(name), token);
+ }
+ catch (BaseException be) {
+ return EnvironmentResponse.State.TERMINATED;
+ }
+
+ if (response != null) {
+ return response.state();
+ }
+ else {
+ return EnvironmentResponse.State.TERMINATED;
+ }
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to get status of ClearScape instance", e);
+ }
+ }
+
+ private void createAndStartClearScapeInstance()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+
+ String token = this.model.getToken();
+ String name = this.model.getEnvName();
+ EnvironmentResponse response = null;
+ try {
+ response = teradataHttpClient.getEnvironment(new GetEnvironmentRequest(name), token);
+ }
+ catch (BaseException be) {
+ log.info("Environment %s is not available. %s", name, be.getMessage());
+ }
+
+ if (response == null || response.ip() == null) {
+ CreateEnvironmentRequest request = new CreateEnvironmentRequest(
+ name,
+ model.getRegion(),
+ model.getPassword());
+ response = teradataHttpClient.createEnvironment(request, token).get();
+ }
+ else if (response.state() == EnvironmentResponse.State.STOPPED) {
+ EnvironmentRequest request = new EnvironmentRequest(name, new OperationRequest("start"));
+ teradataHttpClient.startEnvironment(request, token);
+ }
+ if (response != null) {
+ model.setHostName(response.ip());
+ }
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to create and start ClearScape instance", e);
+ }
+ }
+
+ private void stopClearScapeInstance()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+ String token = this.model.getToken();
+ String name = this.model.getEnvName();
+
+ EnvironmentResponse response = null;
+ try {
+ response = teradataHttpClient.getEnvironment(new GetEnvironmentRequest(name), token);
+ }
+ catch (BaseException be) {
+ log.info("Environment %s is not available. %s", name, be.getMessage());
+ }
+ if (response != null &&
+ response.ip() != null &&
+ response.state() == EnvironmentResponse.State.RUNNING) {
+ EnvironmentRequest request = new EnvironmentRequest(name, new OperationRequest("stop"));
+ teradataHttpClient.stopEnvironment(request, token);
+ }
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to stop ClearScape instance", e);
+ }
+ }
+
+ private void shutdownAndDestroyClearScapeInstance()
+ {
+ try {
+ TeradataHttpClient teradataHttpClient = getTeradataHttpClient();
+ String token = this.model.getToken();
+ DeleteEnvironmentRequest request = new DeleteEnvironmentRequest(this.model.getEnvName());
+ teradataHttpClient.deleteEnvironment(request, token).get();
+ }
+ catch (BaseException be) {
+ log.info("Environment %s is not available. Error - %s",
+ this.model.getEnvName(), be.getMessage());
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to shutdown and destroy ClearScape instance", e);
+ }
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java
new file mode 100644
index 000000000000..0af3a09727bf
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/ClearScapeSetup.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import io.trino.plugin.teradata.integration.TeradataTestConstants;
+
+import static java.util.Objects.requireNonNull;
+
+public class ClearScapeSetup
+{
+ private final String token;
+ private final String password;
+ private final String envName;
+ private final String region;
+ private final boolean destroyEnv;
+ private ClearScapeManager manager;
+
+ public ClearScapeSetup(
+ String token,
+ String password,
+ String envName,
+ boolean destroyEnv,
+ String region)
+ {
+ requireNonNull(token, "token is null");
+ requireNonNull(password, "password is null");
+ requireNonNull(envName, "envName is null");
+ requireNonNull(region, "region is null");
+ this.token = token;
+ this.password = password;
+ this.envName = envName;
+ this.region = region;
+ this.destroyEnv = destroyEnv;
+ }
+
+ public Model initialize()
+ {
+ try {
+ manager = new ClearScapeManager();
+ Model model = createModel();
+ manager.init(model);
+ manager.setup();
+ return model;
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Failed to initialize ClearScape environment: " + envName, e);
+ }
+ }
+
+ private Model createModel()
+ {
+ Model model = new Model();
+ model.setEnvName(envName);
+ model.setUserName(TeradataTestConstants.ENV_CLEARSCAPE_USERNAME);
+ model.setPassword(password);
+ model.setDatabaseName(TeradataTestConstants.ENV_CLEARSCAPE_USERNAME);
+ model.setToken(token);
+ model.setRegion(region);
+ return model;
+ }
+
+ public void cleanup()
+ {
+ if (manager == null) {
+ return;
+ }
+ if (destroyEnv) {
+ manager.teardown();
+ return;
+ }
+ manager.stop();
+ }
+
+ public EnvironmentResponse.State status()
+ {
+ if (manager == null) {
+ throw new IllegalStateException("ClearScape manager is not initialized");
+ }
+ return manager.status();
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java
new file mode 100644
index 000000000000..8ac35a095493
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/CreateEnvironmentRequest.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record CreateEnvironmentRequest(
+ String name,
+ String region,
+ String password)
+{
+ public CreateEnvironmentRequest
+ {
+ requireNonNull(name, "name should not be null");
+ requireNonNull(region, "region should not be null");
+ requireNonNull(password, "password should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java
new file mode 100644
index 000000000000..38320c87e226
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/DeleteEnvironmentRequest.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record DeleteEnvironmentRequest(
+ String name)
+{
+ public DeleteEnvironmentRequest
+ {
+ requireNonNull(name, "name should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java
new file mode 100644
index 000000000000..a83993f4c24a
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentRequest.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record EnvironmentRequest(
+ String name,
+ OperationRequest request)
+{
+ public EnvironmentRequest
+ {
+ requireNonNull(name, "name must not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java
new file mode 100644
index 000000000000..0ed7109f86e9
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/EnvironmentResponse.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Locale.ENGLISH;
+import static java.util.Objects.requireNonNull;
+
+public record EnvironmentResponse(
+ State state,
+ String region,
+ String ip)
+{
+ public EnvironmentResponse
+ {
+ requireNonNull(state, "state must not be null");
+ requireNonNull(region, "region must not be null");
+ region = region.toUpperCase(ENGLISH);
+ }
+
+ public enum State
+ {
+ RUNNING,
+ STOPPED,
+ TERMINATED,
+ STOPPING
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error4xxException.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error4xxException.java
new file mode 100644
index 000000000000..be90c6f28e6b
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error4xxException.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+public class Error4xxException
+ extends BaseException
+{
+ public Error4xxException(int statusCode, String body)
+ {
+ super(statusCode, body);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error5xxException.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error5xxException.java
new file mode 100644
index 000000000000..20d4afb8b441
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Error5xxException.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+public class Error5xxException
+ extends BaseException
+{
+ public Error5xxException(int statusCode, String body)
+ {
+ super(statusCode, body);
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java
new file mode 100644
index 000000000000..9f2ba3971ab2
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/GetEnvironmentRequest.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record GetEnvironmentRequest(
+ String name)
+{
+ public GetEnvironmentRequest
+ {
+ requireNonNull(name, "name should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java
new file mode 100644
index 000000000000..605c60c3a191
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/Model.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+public class Model
+{
+ String envName;
+ String hostName;
+ String userName;
+ String password;
+ String databaseName;
+ String token;
+ String region;
+
+ public String getEnvName()
+ {
+ return envName;
+ }
+
+ public void setEnvName(String envName)
+ {
+ this.envName = envName;
+ }
+
+ public String getHostName()
+ {
+ return hostName;
+ }
+
+ public void setHostName(String hostName)
+ {
+ this.hostName = hostName;
+ }
+
+ public void setUserName(String userName)
+ {
+ this.userName = userName;
+ }
+
+ public String getPassword()
+ {
+ return password;
+ }
+
+ public void setPassword(String password)
+ {
+ this.password = password;
+ }
+
+ public void setDatabaseName(String databaseName)
+ {
+ this.databaseName = databaseName;
+ }
+
+ public String getToken()
+ {
+ return token;
+ }
+
+ public void setToken(String token)
+ {
+ this.token = token;
+ }
+
+ public String getRegion()
+ {
+ return region;
+ }
+
+ public void setRegion(String region)
+ {
+ this.region = region;
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java
new file mode 100644
index 000000000000..88e1e80d1530
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/OperationRequest.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import static java.util.Objects.requireNonNull;
+
+public record OperationRequest(
+ String operation)
+{
+ public OperationRequest
+ {
+ requireNonNull(operation, "operation should not be null");
+ }
+}
diff --git a/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java
new file mode 100644
index 000000000000..d356104d8749
--- /dev/null
+++ b/plugin/trino-teradata/src/test/java/io/trino/plugin/teradata/integration/clearscape/TeradataHttpClient.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.teradata.integration.clearscape;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.net.URI;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpResponse;
+import java.util.concurrent.CompletableFuture;
+
+import static com.google.common.net.HttpHeaders.AUTHORIZATION;
+import static com.google.common.net.HttpHeaders.CONTENT_TYPE;
+
+public class TeradataHttpClient
+{
+ private static final String APPLICATION_JSON = "application/json";
+ private static final String BEARER = "Bearer ";
+
+ private final String baseUrl;
+ private final HttpClient httpClient;
+ private final ObjectMapper objectMapper;
+
+ public TeradataHttpClient(String baseUrl)
+ {
+ this(HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(), baseUrl);
+ }
+
+ public TeradataHttpClient(
+ HttpClient httpClient,
+ String baseUrl)
+ {
+ this.httpClient = httpClient;
+ this.baseUrl = baseUrl;
+ this.objectMapper = JsonMapper.builder()
+ .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
+ .configure(MapperFeature.ALLOW_FINAL_FIELDS_AS_MUTATORS, false)
+ .build();
+ }
+
+ // Creating an environment is a blocking operation by default, and it takes ~1.5min to finish
+ public CompletableFuture createEnvironment(CreateEnvironmentRequest createEnvironmentRequest,
+ String token)
+ {
+ var requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(createEnvironmentRequest));
+ var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl.concat("/environments")))
+ .headers(
+ AUTHORIZATION, BEARER + token,
+ CONTENT_TYPE, APPLICATION_JSON)
+ .POST(HttpRequest.BodyPublishers.ofString(requestBody))
+ .build();
+ return httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString())
+ .thenApply(httpResponse -> handleHttpResponse(httpResponse, new TypeReference<>() {}));
+ }
+
+ public EnvironmentResponse getEnvironment(GetEnvironmentRequest getEnvironmentRequest, String token)
+ {
+ var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl
+ .concat("/environments/")
+ .concat(getEnvironmentRequest.name())))
+ .headers(AUTHORIZATION, BEARER + token)
+ .GET()
+ .build();
+ var httpResponse =
+ handleCheckedException(() -> httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString()));
+ return handleHttpResponse(httpResponse, new TypeReference<>() {});
+ }
+
+ public CompletableFuture deleteEnvironment(DeleteEnvironmentRequest deleteEnvironmentRequest, String token)
+ {
+ var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl + "/environments/" + deleteEnvironmentRequest.name()))
+ .headers(AUTHORIZATION, BEARER + token)
+ .DELETE()
+ .build();
+
+ // start async and ignore returned future
+ httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString());
+ return CompletableFuture.completedFuture(null);
+ }
+
+ public void startEnvironment(EnvironmentRequest environmentRequest, String token)
+ {
+ var requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(environmentRequest.request()));
+ getVoidCompletableFuture(environmentRequest.name(), token, requestBody);
+ }
+
+ public void stopEnvironment(EnvironmentRequest environmentRequest, String token)
+ {
+ var requestBody = handleCheckedException(() -> objectMapper.writeValueAsString(environmentRequest.request()));
+ getVoidCompletableFuture(environmentRequest.name(), token, requestBody);
+ }
+
+ private void getVoidCompletableFuture(String name, String token, String jsonPayLoadString)
+ {
+ HttpRequest.BodyPublisher publisher = HttpRequest.BodyPublishers.ofString(jsonPayLoadString);
+ var httpRequest = HttpRequest.newBuilder(URI.create(baseUrl + "/environments/" + name))
+ .headers(AUTHORIZATION, BEARER + token, CONTENT_TYPE, APPLICATION_JSON)
+ .method("PATCH", publisher)
+ .build();
+
+ httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.ofString());
+ }
+
+ private T handleHttpResponse(HttpResponse httpResponse, TypeReference typeReference)
+ {
+ var body = httpResponse.body();
+ if (httpResponse.statusCode() >= 200 && httpResponse.statusCode() <= 299) {
+ return handleCheckedException(() -> {
+ if (typeReference.getType().getTypeName().equals(Void.class.getTypeName())) {
+ return null;
+ }
+ else {
+ return objectMapper.readValue(body, typeReference);
+ }
+ });
+ }
+ else if (httpResponse.statusCode() >= 400 && httpResponse.statusCode() <= 499) {
+ throw new Error4xxException(httpResponse.statusCode(), body);
+ }
+ else if (httpResponse.statusCode() >= 500 && httpResponse.statusCode() <= 599) {
+ throw new Error5xxException(httpResponse.statusCode(), body);
+ }
+ else {
+ throw new BaseException(httpResponse.statusCode(), body);
+ }
+ }
+
+ private static T handleCheckedException(CheckedSupplier checkedSupplier)
+ {
+ try {
+ return checkedSupplier.get();
+ }
+ catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new RuntimeException(e);
+ }
+ }
+
+ @FunctionalInterface
+ private interface CheckedSupplier
+ {
+ T get()
+ throws IOException, InterruptedException;
+ }
+}
diff --git a/pom.xml b/pom.xml
index 66a3d0988f74..be9a35a563b7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -113,6 +113,7 @@
plugin/trino-snowflake
plugin/trino-spooling-filesystem
plugin/trino-sqlserver
+ plugin/trino-teradata
plugin/trino-teradata-functions
plugin/trino-thrift
plugin/trino-thrift-api