diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 1fb0c3b9140..62e1b9861af 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -557,6 +557,40 @@ integTest { // Exclude this IT, because they executed in another task (:integTestWithSecurity) exclude 'org/opensearch/sql/security/**' + // Workaround for Gradle 9.4.1 ClassCastException in TestEventReporterAsListener.started + // (line 58) — the bridge casts a parent test descriptor's reporter to + // GroupTestEventReporterInternal but a class-level @Ignore produces a non-composite parent + // descriptor with a leaf reporter, so the cast fails and aborts the entire integTest task + // even though the tests would have been skipped anyway. The bridge is registered by Gradle's + // own AbstractTestTask (we can't bypass it from user code), so the only available remedy is + // to keep these classes off the test runner's input set. Net behaviour for CI: still + // skipped, just at the build layer instead of inside JUnit. Remove once Gradle ships a fix. + // OrderIT is already excluded above. + exclude 'org/opensearch/sql/calcite/remote/CalciteInformationSchemaCommandIT.class' + exclude 'org/opensearch/sql/calcite/remote/CalciteJsonFunctionsIT.class' + exclude 'org/opensearch/sql/calcite/remote/CalcitePrometheusDataSourceCommandsIT.class' + exclude 'org/opensearch/sql/calcite/remote/CalciteShowDataSourcesCommandIT.class' + exclude 'org/opensearch/sql/legacy/AggregationIT.class' + exclude 'org/opensearch/sql/legacy/DateFormatIT.class' + exclude 'org/opensearch/sql/legacy/DateFunctionsIT.class' + exclude 'org/opensearch/sql/legacy/HashJoinIT.class' + exclude 'org/opensearch/sql/legacy/HavingIT.class' + exclude 'org/opensearch/sql/legacy/JSONRequestIT.class' + exclude 'org/opensearch/sql/legacy/JoinIT.class' + exclude 'org/opensearch/sql/legacy/MathFunctionsIT.class' + exclude 'org/opensearch/sql/legacy/MetricsIT.class' + exclude 'org/opensearch/sql/legacy/MultiQueryIT.class' + exclude 'org/opensearch/sql/legacy/NestedFieldQueryIT.class' + exclude 'org/opensearch/sql/legacy/PreparedStatementIT.class' + exclude 'org/opensearch/sql/legacy/QueryFunctionsIT.class' + exclude 'org/opensearch/sql/legacy/QueryIT.class' + exclude 'org/opensearch/sql/legacy/SQLFunctionsIT.class' + exclude 'org/opensearch/sql/legacy/ShowIT.class' + exclude 'org/opensearch/sql/legacy/SourceFieldIT.class' + exclude 'org/opensearch/sql/legacy/SubqueryIT.class' + exclude 'org/opensearch/sql/ppl/JsonFunctionsIT.class' + exclude 'org/opensearch/sql/sql/ExpressionIT.class' + finalizedBy 'printIntegTestPaths' } diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/CalciteEvalCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/CalciteEvalCommandIT.java index 588a4a784f9..9782ec06287 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/CalciteEvalCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/CalciteEvalCommandIT.java @@ -15,6 +15,7 @@ import org.json.JSONObject; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; +import org.opensearch.sql.legacy.TestUtils; import org.opensearch.sql.ppl.PPLIntegTestCase; public class CalciteEvalCommandIT extends PPLIntegTestCase { @@ -26,23 +27,46 @@ public void init() throws Exception { loadIndex(Index.BANK); - // Create test data for string concatenation - Request request1 = new Request("PUT", "/test_eval/_doc/1?refresh=true"); - request1.setJsonEntity("{\"name\": \"Alice\", \"age\": 25, \"title\": \"Engineer\"}"); - client().performRequest(request1); + // Pre-create test_eval through the helper so the analytics-engine compatibility run + // (tests.analytics.parquet_indices=true) provisions it as a parquet-backed composite + // index. Plain auto-mapping via the doc PUTs would create a Lucene-backed index, which + // the analytics-engine planner cannot scan ("No backend can scan all requested fields"). + // Explicit mapping pins types so both v2 (verifySchema "string"/"bigint") and analytics + // paths see the same shape regardless of dynamic-mapping behavior on the parquet engine. + // Guarded by isIndexExist for idempotency — init() runs before each @Test method. + if (!TestUtils.isIndexExist(client(), "test_eval")) { + String testEvalMapping = + "{\"mappings\":{\"properties\":{" + + "\"name\":{\"type\":\"keyword\"}," + + "\"age\":{\"type\":\"long\"}," + + "\"title\":{\"type\":\"keyword\"}}}}"; + TestUtils.createIndexByRestClient(client(), "test_eval", testEvalMapping); - Request request2 = new Request("PUT", "/test_eval/_doc/2?refresh=true"); - request2.setJsonEntity("{\"name\": \"Bob\", \"age\": 30, \"title\": \"Manager\"}"); - client().performRequest(request2); + // Create test data for string concatenation + Request request1 = new Request("PUT", "/test_eval/_doc/1?refresh=true"); + request1.setJsonEntity("{\"name\": \"Alice\", \"age\": 25, \"title\": \"Engineer\"}"); + client().performRequest(request1); - Request request3 = new Request("PUT", "/test_eval/_doc/3?refresh=true"); - request3.setJsonEntity("{\"name\": \"Charlie\", \"age\": null, \"title\": \"Analyst\"}"); - client().performRequest(request3); + Request request2 = new Request("PUT", "/test_eval/_doc/2?refresh=true"); + request2.setJsonEntity("{\"name\": \"Bob\", \"age\": 30, \"title\": \"Manager\"}"); + client().performRequest(request2); + + Request request3 = new Request("PUT", "/test_eval/_doc/3?refresh=true"); + request3.setJsonEntity("{\"name\": \"Charlie\", \"age\": null, \"title\": \"Analyst\"}"); + client().performRequest(request3); + } } @Test public void testEvalStringConcatenation() throws IOException { - JSONObject result = executeQuery("source=test_eval | eval greeting = 'Hello ' + name"); + // Pin the projection so column order is deterministic across execution paths — the + // analytics-engine route reads parquet schema in storage order, which can differ from the + // v2 / Lucene path's _source-iteration order. Adding an explicit | fields makes the test + // a strict assertion on the eval expression rather than a coincidence of projection order. + JSONObject result = + executeQuery( + "source=test_eval | eval greeting = 'Hello ' + name | fields name, title, age," + + " greeting"); verifySchema( result, schema("name", "string"),