diff --git a/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppCommonConfig.java b/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppCommonConfig.java index e7f4b228d4f1..746f13f04c0d 100644 --- a/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppCommonConfig.java +++ b/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppCommonConfig.java @@ -657,6 +657,18 @@ public CommonConfig setRestrictObjectLimit(boolean restrictObjectLimit) { return this; } + @Override + public CommonConfig setCteBufferSize(long cteBufferSize) { + setProperty("cte_buffer_size_in_bytes", String.valueOf(cteBufferSize)); + return this; + } + + @Override + public CommonConfig setMaxRowsInCteBuffer(int maxRows) { + setProperty("max_rows_in_cte_buffer", String.valueOf(maxRows)); + return this; + } + // For part of the log directory public String getClusterConfigStr() { return fromConsensusFullNameToAbbr(properties.getProperty(CONFIG_NODE_CONSENSUS_PROTOCOL_CLASS)) diff --git a/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppSharedCommonConfig.java b/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppSharedCommonConfig.java index abb0f8bf8bb0..de15176c5f1c 100644 --- a/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppSharedCommonConfig.java +++ b/integration-test/src/main/java/org/apache/iotdb/it/env/cluster/config/MppSharedCommonConfig.java @@ -691,4 +691,18 @@ public CommonConfig setRestrictObjectLimit(boolean restrictObjectLimit) { dnConfig.setRestrictObjectLimit(restrictObjectLimit); return this; } + + @Override + public CommonConfig setCteBufferSize(long cteBufferSize) { + dnConfig.setCteBufferSize(cteBufferSize); + cnConfig.setCteBufferSize(cteBufferSize); + return this; + } + + @Override + public CommonConfig setMaxRowsInCteBuffer(int maxRows) { + dnConfig.setMaxRowsInCteBuffer(maxRows); + cnConfig.setMaxRowsInCteBuffer(maxRows); + return this; + } } diff --git a/integration-test/src/main/java/org/apache/iotdb/it/env/remote/config/RemoteCommonConfig.java b/integration-test/src/main/java/org/apache/iotdb/it/env/remote/config/RemoteCommonConfig.java index 148046423cd0..807660330a30 100644 --- a/integration-test/src/main/java/org/apache/iotdb/it/env/remote/config/RemoteCommonConfig.java +++ b/integration-test/src/main/java/org/apache/iotdb/it/env/remote/config/RemoteCommonConfig.java @@ -482,4 +482,14 @@ public CommonConfig setAuditableOperationResult(String auditableOperationResult) public CommonConfig setRestrictObjectLimit(boolean restrictObjectLimit) { return this; } + + @Override + public CommonConfig setCteBufferSize(long cteBufferSize) { + return this; + } + + @Override + public CommonConfig setMaxRowsInCteBuffer(int maxRows) { + return this; + } } diff --git a/integration-test/src/main/java/org/apache/iotdb/itbase/env/CommonConfig.java b/integration-test/src/main/java/org/apache/iotdb/itbase/env/CommonConfig.java index 531f94eec4b1..4cda485a6cbf 100644 --- a/integration-test/src/main/java/org/apache/iotdb/itbase/env/CommonConfig.java +++ b/integration-test/src/main/java/org/apache/iotdb/itbase/env/CommonConfig.java @@ -213,4 +213,8 @@ default CommonConfig setDefaultDatabaseLevel(int defaultDatabaseLevel) { CommonConfig setAuditableOperationResult(String auditableOperationResult); CommonConfig setRestrictObjectLimit(boolean restrictObjectLimit); + + CommonConfig setCteBufferSize(long cteBufferSize); + + CommonConfig setMaxRowsInCteBuffer(int maxRows); } diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/IoTDBCteIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/IoTDBCteIT.java index e3100e6ee5c4..13a2b0dd28f2 100644 --- a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/IoTDBCteIT.java +++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/IoTDBCteIT.java @@ -45,11 +45,21 @@ import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; +import java.util.Arrays; import java.util.Locale; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import static org.apache.iotdb.db.it.utils.TestUtils.tableAssertTestFail; import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(IoTDBTestRunner.class) @@ -61,14 +71,16 @@ public class IoTDBCteIT { new String[] { "CREATE DATABASE IF NOT EXISTS testdb", "USE testdb", - "CREATE TABLE IF NOT EXISTS testtb(deviceid STRING TAG, voltage FLOAT FIELD)", - "INSERT INTO testtb VALUES(1000, 'd1', 100.0)", - "INSERT INTO testtb VALUES(2000, 'd1', 200.0)", - "INSERT INTO testtb VALUES(1000, 'd2', 300.0)", + "CREATE TABLE IF NOT EXISTS testtb(voltage FLOAT FIELD, manufacturer STRING FIELD, deviceid STRING TAG)", + "INSERT INTO testtb VALUES(1000, 100.0, 'a', 'd1')", + "INSERT INTO testtb VALUES(2000, 200.0, 'b', 'd1')", + "INSERT INTO testtb VALUES(1000, 300.0, 'c', 'd2')", }; private static final String dropDbSqls = "DROP DATABASE IF EXISTS testdb"; + private static final String[] cteKeywords = {"", "materialized"}; + @BeforeClass public static void setUpClass() { Locale.setDefault(Locale.ENGLISH); @@ -102,51 +114,107 @@ public void tearDown() { } @Test - public void testQuery() { - String[] expectedHeader = new String[] {"time", "deviceid", "voltage"}; + public void testMultipleWith() { + String mainQuery = + "select * from cte1 where voltage > " + + "(with cte2 as materialized (select avg(voltage) as avg_voltage from testtb) select avg_voltage from cte2)"; + String[] expectedHeader = new String[] {"time", "voltage", "manufacturer", "deviceid"}; String[] retArray = new String[] { - "1970-01-01T00:00:01.000Z,d1,100.0,", - "1970-01-01T00:00:02.000Z,d1,200.0,", - "1970-01-01T00:00:01.000Z,d2,300.0," + "1970-01-01T00:00:01.000Z,300.0,c,d2,", }; - tableResultSetEqualTest( - "with cte as (select * from testtb) select * from cte order by deviceid", - expectedHeader, - retArray, - DATABASE_NAME); + String[] cteTemplateQueries = new String[] {"cte1 as %s (select * from testtb)"}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + } - expectedHeader = new String[] {"deviceid", "voltage"}; - retArray = new String[] {"d1,100.0,", "d1,200.0,", "d2,300.0,"}; - tableResultSetEqualTest( - "with cte as (select deviceid, voltage from testtb) select * from cte order by deviceid", - expectedHeader, - retArray, - DATABASE_NAME); - - expectedHeader = new String[] {"deviceid", "avg_voltage"}; - retArray = new String[] {"d1,150.0,", "d2,300.0,"}; - tableResultSetEqualTest( - "with cte as (select deviceid, avg(voltage) as avg_voltage from testtb group by deviceid) select * from cte order by deviceid", - expectedHeader, - retArray, - DATABASE_NAME); + @Test + public void testFilterQuery() { + // case 1 + String mainQuery = "select * from cte where time > 1000 order by deviceid"; + String[] expectedHeader = new String[] {"time", "voltage", "manufacturer", "deviceid"}; + String[] retArray = + new String[] { + "1970-01-01T00:00:02.000Z,200.0,b,d1,", + }; + String[] cteTemplateQueries = new String[] {"cte as %s (select * from testtb)"}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + + // case 2 + mainQuery = "select * from cte where voltage > 200 order by deviceid"; + expectedHeader = new String[] {"time", "voltage", "manufacturer", "deviceid"}; + retArray = new String[] {"1970-01-01T00:00:01.000Z,300.0,c,d2,"}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + } + + @Test + public void testSortQuery() { + final String mainQuery = "select * from cte order by deviceid, voltage desc"; + + String[] expectedHeader = new String[] {"time", "voltage", "manufacturer", "deviceid"}; + String[] retArray = + new String[] { + "1970-01-01T00:00:02.000Z,200.0,b,d1,", + "1970-01-01T00:00:01.000Z,100.0,a,d1,", + "1970-01-01T00:00:01.000Z,300.0,c,d2," + }; + String[] cteTemplateQueries = new String[] {"cte as %s (select * from testtb)"}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + } + + @Test + public void testLimitOffsetQuery() { + final String mainQuery = "select * from cte limit 1 offset 1"; + + String[] expectedHeader = new String[] {"time", "voltage", "manufacturer", "deviceid"}; + String[] retArray = + new String[] { + "1970-01-01T00:00:02.000Z,200.0,b,d1,", + }; + String[] cteTemplateQueries = + new String[] {"cte as %s (select * from testtb where deviceid = 'd1') "}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + } + + @Test + public void testAggQuery() { + // case 1 + String mainQuery = "select * from cte order by deviceid"; + String[] expectedHeader = new String[] {"deviceid", "avg_voltage"}; + String[] retArray = new String[] {"d1,150.0,", "d2,300.0,"}; + String[] cteTemplateQueries = + new String[] { + "cte as %s (select deviceid, avg(voltage) as avg_voltage from testtb group by deviceid)" + }; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + + // case 2 + mainQuery = + "select deviceid, avg(voltage) as avg_voltage from cte group by deviceid order by deviceid"; + cteTemplateQueries = new String[] {"cte as %s (select deviceid, voltage from testtb)"}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); } @Test public void testPartialColumn() { - String[] expectedHeader = new String[] {"id", "v"}; + // case 1 + String mainQuery = "select * from cte order by deviceid"; + String[] expectedHeader = new String[] {"deviceid", "voltage"}; String[] retArray = new String[] {"d1,100.0,", "d1,200.0,", "d2,300.0,"}; - tableResultSetEqualTest( - "with cte(id, v) as (select deviceid, voltage from testtb) select * from cte order by id", - expectedHeader, - retArray, - DATABASE_NAME); - - tableAssertTestFail( - "with cte(v) as (select deviceid, voltage from testtb) select * from cte order by id", - "701: Column alias list has 1 entries but relation has 2 columns", - DATABASE_NAME); + String[] cteTemplateQueries = new String[] {"cte as %s (select deviceid, voltage from testtb)"}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + + mainQuery = "select * from cte order by id"; + expectedHeader = new String[] {"id", "v"}; + retArray = new String[] {"d1,100.0,", "d1,200.0,", "d2,300.0,"}; + + // case 2 + cteTemplateQueries = new String[] {"cte(id, v) as %s (select deviceid, voltage from testtb)"}; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + + // case 3 + cteTemplateQueries = new String[] {"cte(v) as %s (select deviceid, voltage from testtb)"}; + String errMsg = "701: Column alias list has 1 entries but relation has 2 columns"; + testCteFailureWithVariants(cteTemplateQueries, mainQuery, errMsg); } @Test @@ -154,68 +222,76 @@ public void testExplain() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); Statement statement = connection.createStatement()) { statement.execute("USE testdb"); - // explain - ResultSet resultSet = - statement.executeQuery( - "explain with cte as (select * from testtb) select * from cte order by deviceid"); - ResultSetMetaData metaData = resultSet.getMetaData(); - assertEquals(metaData.getColumnCount(), 1); - assertEquals(metaData.getColumnName(1), "distribution plan"); - - // explain analyze - resultSet = - statement.executeQuery( - "explain analyze with cte as (select * from testtb) select * from cte order by deviceid"); - metaData = resultSet.getMetaData(); - assertEquals(metaData.getColumnCount(), 1); - assertEquals(metaData.getColumnName(1), "Explain Analyze"); + for (String keyword : cteKeywords) { + // explain + ResultSet resultSet = + statement.executeQuery( + String.format( + "explain with cte as %s (select * from testtb) select * from cte order by deviceid", + keyword)); + ResultSetMetaData metaData = resultSet.getMetaData(); + assertEquals(metaData.getColumnCount(), 1); + assertEquals(metaData.getColumnName(1), "distribution plan"); + + // explain analyze + resultSet = + statement.executeQuery( + String.format( + "explain analyze with cte as %s (select * from testtb) select * from cte order by deviceid", + keyword)); + metaData = resultSet.getMetaData(); + assertEquals(metaData.getColumnCount(), 1); + assertEquals(metaData.getColumnName(1), "Explain Analyze"); + } } } @Test public void testMultiReference() { - String[] expectedHeader = new String[] {"time", "deviceid", "voltage"}; - String[] retArray = new String[] {"1970-01-01T00:00:01.000Z,d2,300.0,"}; - tableResultSetEqualTest( - "with cte as (select * from testtb) select * from cte where voltage > (select avg(voltage) from cte)", - expectedHeader, - retArray, - DATABASE_NAME); + String[] expectedHeader = new String[] {"time", "voltage", "manufacturer", "deviceid"}; + String[] retArray = new String[] {"1970-01-01T00:00:01.000Z,300.0,c,d2,"}; + String[] cteTemplateQueries = new String[] {"cte as %s (select * from testtb)"}; + String mainQuery = "select * from cte where voltage > (select avg(voltage) from cte)"; + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); } @Test public void testDomain() { + final String mainQuery = "select * from testtb order by deviceid"; + String[] expectedHeader = new String[] {"deviceid", "voltage"}; String[] retArray = new String[] {"d1,100.0,", "d1,200.0,", "d2,300.0,"}; - tableResultSetEqualTest( - "with testtb as (select deviceid, voltage from testtb) select * from testtb order by deviceid", - expectedHeader, - retArray, - DATABASE_NAME); - - tableAssertTestFail( - "with testtb as (select voltage from testtb) select * from testtb order by deviceid", - "616: Column 'deviceid' cannot be resolved", - DATABASE_NAME); + String[] cteTemplateQueries = + new String[] {"testtb as %s (select deviceid, voltage from testtb)"}; + + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); + + cteTemplateQueries = new String[] {"testtb as %s (select voltage from testtb)"}; + String errMsg = "616: Column 'deviceid' cannot be resolved"; + testCteFailureWithVariants(cteTemplateQueries, mainQuery, errMsg); } @Test public void testSession() throws IoTDBConnectionException, StatementExecutionException { try (ITableSession session = EnvFactory.getEnv().getTableSessionConnection()) { session.executeNonQueryStatement("use testdb"); - SessionDataSet dataSet = - session.executeQueryStatement("with cte as (select * from testtb) select * from cte"); - - assertEquals(dataSet.getColumnNames().size(), 3); - assertEquals(dataSet.getColumnNames().get(0), "time"); - assertEquals(dataSet.getColumnNames().get(1), "deviceid"); - assertEquals(dataSet.getColumnNames().get(2), "voltage"); - int cnt = 0; - while (dataSet.hasNext()) { - dataSet.next(); - cnt++; + for (String keyword : cteKeywords) { + SessionDataSet dataSet = + session.executeQueryStatement( + String.format("with cte as %s (select * from testtb) select * from cte", keyword)); + + assertEquals(dataSet.getColumnNames().size(), 4); + assertEquals(dataSet.getColumnNames().get(0), "time"); + assertEquals(dataSet.getColumnNames().get(1), "voltage"); + assertEquals(dataSet.getColumnNames().get(2), "manufacturer"); + assertEquals(dataSet.getColumnNames().get(3), "deviceid"); + int cnt = 0; + while (dataSet.hasNext()) { + dataSet.next(); + cnt++; + } + Assert.assertEquals(3, cnt); } - Assert.assertEquals(3, cnt); } } @@ -229,54 +305,118 @@ public void testJdbc() throws ClassNotFoundException, SQLException { uri, SessionConfig.DEFAULT_USER, SessionConfig.DEFAULT_PASSWORD); Statement statement = connection.createStatement()) { statement.executeUpdate("use testdb"); - ResultSet resultSet = - statement.executeQuery("with cte as (select * from testtb) select * from cte"); - - final ResultSetMetaData metaData = resultSet.getMetaData(); - assertEquals(metaData.getColumnCount(), 3); - assertEquals(metaData.getColumnLabel(1), "time"); - assertEquals(metaData.getColumnLabel(2), "deviceid"); - assertEquals(metaData.getColumnLabel(3), "voltage"); - - int cnt = 0; - while (resultSet.next()) { - cnt++; + for (String keyword : cteKeywords) { + ResultSet resultSet = + statement.executeQuery( + String.format("with cte as %s (select * from testtb) select * from cte", keyword)); + + final ResultSetMetaData metaData = resultSet.getMetaData(); + assertEquals(metaData.getColumnCount(), 4); + assertEquals(metaData.getColumnLabel(1), "time"); + assertEquals(metaData.getColumnLabel(2), "voltage"); + assertEquals(metaData.getColumnLabel(3), "manufacturer"); + assertEquals(metaData.getColumnLabel(4), "deviceid"); + + int cnt = 0; + while (resultSet.next()) { + cnt++; + } + Assert.assertEquals(3, cnt); } - Assert.assertEquals(3, cnt); } } @Test public void testNest() { - String sql1 = - "WITH" - + " cte1 AS (select deviceid, voltage from testtb where voltage > 200)," - + " cte2 AS (SELECT voltage FROM cte1)" - + " SELECT * FROM cte2"; - - String sql2 = - "WITH" - + " cte2 AS (SELECT voltage FROM cte1)," - + " cte1 AS (select deviceid, voltage from testtb where voltage > 200)" - + " SELECT * FROM cte2"; + final String mainQuery = "select * from cte2"; + String[] cteTemplateQueries = + new String[] { + "cte1 as %s (select deviceid, voltage from testtb where voltage > 200)", + "cte2 as %s (select voltage from cte1)" + }; String[] expectedHeader = new String[] {"voltage"}; String[] retArray = new String[] {"300.0,"}; - tableResultSetEqualTest(sql1, expectedHeader, retArray, DATABASE_NAME); + testCteSuccessWithVariants(cteTemplateQueries, mainQuery, expectedHeader, retArray); - tableAssertTestFail(sql2, "550: Table 'testdb.cte1' does not exist.", DATABASE_NAME); + cteTemplateQueries = + new String[] { + "cte2 as %s (select voltage from cte1)", + "cte1 as %s (select deviceid, voltage from testtb where voltage > 200)" + }; + String errMsg = "550: Table 'testdb.cte1' does not exist."; + testCteFailureWithVariants(cteTemplateQueries, mainQuery, errMsg); } @Test - public void testRecursive() { + public void testNestExplain1() throws SQLException { String sql = - "WITH RECURSIVE t(n) AS (" + "explain with cte1 as (select * from testtb), " + + "cte2 as materialized (select time, voltage from cte1) " + + "select * from cte2"; + try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = connection.createStatement()) { + statement.execute("USE testdb"); + + // explain + ResultSet resultSet = statement.executeQuery(sql); + ResultSetMetaData metaData = resultSet.getMetaData(); + assertEquals(metaData.getColumnCount(), 1); + assertEquals(metaData.getColumnName(1), "distribution plan"); + + StringBuilder sb = new StringBuilder(); + while (resultSet.next()) { + sb.append(resultSet.getString(1)).append(System.lineSeparator()); + } + String result = sb.toString(); + assertFalse(result.contains("CTE Query : 'cte1'")); + assertTrue(result.contains("CTE Query : 'cte2'")); + assertTrue(result.contains("Main Query")); + } + } + + @Test + public void testNestExplain2() throws SQLException { + String sql = + "explain with cte1 as materialized (select * from testtb), " + + "cte2 as materialized (select time, voltage from cte1) " + + "select * from cte2"; + try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = connection.createStatement()) { + statement.execute("USE testdb"); + + // explain + ResultSet resultSet = statement.executeQuery(sql); + ResultSetMetaData metaData = resultSet.getMetaData(); + assertEquals(metaData.getColumnCount(), 1); + assertEquals(metaData.getColumnName(1), "distribution plan"); + + StringBuilder sb = new StringBuilder(); + while (resultSet.next()) { + sb.append(resultSet.getString(1)).append(System.lineSeparator()); + } + String result = sb.toString(); + assertTrue(result.contains("CTE Query : 'cte1'")); + assertTrue(result.contains("CTE Query : 'cte2'")); + assertTrue(result.contains("Main Query")); + } + } + + @Test + public void testRecursive() { + String sqlTemplate = + "WITH RECURSIVE t(n) AS %s (" + " VALUES (1)" + " UNION ALL" - + " SELECT n+1 FROM t WHERE n < 100)" - + " SELECT sum(n) FROM t"; - - tableAssertTestFail(sql, "701: recursive cte is not supported yet", DATABASE_NAME); + + " select n+1 from t WHERE n < 100)" + + " select sum(n) from t"; + + for (String keyword : cteKeywords) { + tableAssertTestFail( + String.format(sqlTemplate, keyword), + "701: recursive cte is not supported yet", + DATABASE_NAME); + } } @Test @@ -288,14 +428,17 @@ public void testPrivileges() throws SQLException { adminStmt.execute("USE testdb"); adminStmt.execute( "CREATE TABLE IF NOT EXISTS testtb1(deviceid STRING TAG, voltage FLOAT FIELD)"); - adminStmt.execute("GRANT SELECT ON testdb.testtb TO USER tmpuser"); + adminStmt.execute("GRANT select ON testdb.testtb TO USER tmpuser"); try (Connection connection = EnvFactory.getEnv() .getConnection("tmpuser", "tmppw123456789", BaseEnv.TABLE_SQL_DIALECT); Statement statement = connection.createStatement()) { statement.execute("USE testdb"); - statement.execute("with cte as (select * from testtb) select * from cte"); + for (String keyword : cteKeywords) { + statement.execute( + String.format("with cte as %s (select * from testtb) select * from cte", keyword)); + } } try (Connection connection = @@ -303,10 +446,14 @@ public void testPrivileges() throws SQLException { .getConnection("tmpuser", "tmppw123456789", BaseEnv.TABLE_SQL_DIALECT); Statement statement = connection.createStatement()) { statement.execute("USE testdb"); - statement.execute("with cte as (select * from testtb1) select * from testtb"); + for (String keyword : cteKeywords) { + statement.execute( + String.format( + "with cte as %s (select * from testtb1) select * from testtb", keyword)); + } fail("No exception!"); } catch (Exception e) { - Assert.assertTrue( + assertTrue( e.getMessage(), e.getMessage() .contains( @@ -318,6 +465,97 @@ public void testPrivileges() throws SQLException { } } + @Test + public void testConcurrentCteQueries() throws Exception { + final int threadCount = 10; + final int queriesPerThread = 20; + final AtomicInteger successCount = new AtomicInteger(0); + final AtomicInteger failureCount = new AtomicInteger(0); + final AtomicInteger totalCount = new AtomicInteger(0); + final CountDownLatch startLatch = new CountDownLatch(1); + final CountDownLatch finishLatch = new CountDownLatch(threadCount); + + ExecutorService executorService = Executors.newFixedThreadPool(threadCount); + + // Create CTE query tasks + Future[] futures = new Future[threadCount]; + for (int i = 0; i < threadCount; i++) { + final int threadId = i; + futures[i] = + executorService.submit( + () -> { + try { + startLatch.await(); // Wait for all threads to be ready + + try (Connection connection = + EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = connection.createStatement()) { + statement.execute("USE testdb"); + + // Execute multiple CTE queries in each thread + for (int j = 0; j < queriesPerThread; j++) { + try { + // Test different types of CTE queries + String[] queries = { + String.format( + "WITH cte as %s (select * from testtb WHERE voltage > 150) select * from cte ORDER BY deviceid", + cteKeywords[j % cteKeywords.length]), + String.format( + "WITH cte as %s (select deviceid, avg(voltage) as avg_v from testtb GROUP BY deviceid) select * from cte", + cteKeywords[j % cteKeywords.length]), + String.format( + "WITH cte as %s (select * from testtb WHERE time > 1000) select count(*) as cnt from cte", + cteKeywords[j % cteKeywords.length]) + }; + + String query = queries[j % queries.length]; + ResultSet resultSet = statement.executeQuery(query); + + // Verify results + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + totalCount.getAndAdd(rowCount); + + successCount.incrementAndGet(); + + } catch (SQLException e) { + failureCount.incrementAndGet(); + System.err.println( + "Thread " + threadId + " query " + j + " failed: " + e.getMessage()); + } + } + } + } catch (Exception e) { + failureCount.incrementAndGet(); + System.err.println("Thread " + threadId + " failed: " + e.getMessage()); + } finally { + finishLatch.countDown(); + } + }); + } + + // Start all threads at once + startLatch.countDown(); + + // Wait for all threads to complete + finishLatch.await(60, TimeUnit.SECONDS); + + // Shutdown executor + executorService.shutdown(); + boolean terminated = executorService.awaitTermination(10, TimeUnit.SECONDS); + if (!terminated) { + executorService.shutdownNow(); + } + + // Verify results + int totalQueries = threadCount * queriesPerThread; + assertEquals("All queries should succeed", totalQueries, successCount.get()); + assertEquals("No queries should fail", 0, failureCount.get()); + assertEquals("Total query count should match", 340, totalCount.get()); + } + private static void prepareData() { try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); Statement statement = connection.createStatement()) { @@ -329,4 +567,28 @@ private static void prepareData() { fail(e.getMessage()); } } + + private void testCteSuccessWithVariants( + String[] cteTemplateQueries, String mainQuery, String[] expectedHeader, String[] retArray) { + for (String keyword : cteKeywords) { + String cteQueries = + Arrays.stream(cteTemplateQueries) + .map(s -> String.format(s, keyword)) + .collect(Collectors.joining(", ")); + String query = String.format("with %s %s", cteQueries, mainQuery); + tableResultSetEqualTest(query, expectedHeader, retArray, DATABASE_NAME); + } + } + + private void testCteFailureWithVariants( + String[] cteTemplateQueries, String mainQuery, String expectedErrMsg) { + for (String keyword : cteKeywords) { + String cteQueries = + Arrays.stream(cteTemplateQueries) + .map(s -> String.format(s, keyword)) + .collect(Collectors.joining(", ")); + String query = String.format("with %s %s", cteQueries, mainQuery); + tableAssertTestFail(query, expectedErrMsg, DATABASE_NAME); + } + } } diff --git a/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/IoTExplainAnalyzeIT.java b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/IoTExplainAnalyzeIT.java new file mode 100644 index 000000000000..4c252dbf3ca1 --- /dev/null +++ b/integration-test/src/test/java/org/apache/iotdb/relational/it/query/recent/IoTExplainAnalyzeIT.java @@ -0,0 +1,264 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.relational.it.query.recent; + +import org.apache.iotdb.it.env.EnvFactory; +import org.apache.iotdb.it.framework.IoTDBTestRunner; +import org.apache.iotdb.itbase.category.TableLocalStandaloneIT; +import org.apache.iotdb.itbase.env.BaseEnv; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Arrays; +import java.util.Locale; +import java.util.function.ToLongFunction; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +@RunWith(IoTDBTestRunner.class) +@Category({TableLocalStandaloneIT.class}) +public class IoTExplainAnalyzeIT { + private static final String DATABASE_NAME = "testdb"; + + private static final String[] creationSqls = + new String[] { + "CREATE DATABASE IF NOT EXISTS " + DATABASE_NAME, + "USE " + DATABASE_NAME, + "CREATE TABLE IF NOT EXISTS testtb(deviceid STRING TAG, voltage FLOAT FIELD)", + "INSERT INTO testtb VALUES(1000, 'd1', 100.0)", + "INSERT INTO testtb VALUES(2000, 'd1', 200.0)", + "INSERT INTO testtb VALUES(1000, 'd2', 300.0)", + }; + + private static final String dropDbSqls = "DROP DATABASE IF EXISTS " + DATABASE_NAME; + + @BeforeClass + public static void setUpClass() { + Locale.setDefault(Locale.ENGLISH); + + EnvFactory.getEnv() + .getConfig() + .getCommonConfig() + .setPartitionInterval(1000) + .setMemtableSizeThreshold(10000) + .setMaxRowsInCteBuffer(100); + EnvFactory.getEnv().initClusterEnvironment(); + } + + @AfterClass + public static void tearDownClass() { + EnvFactory.getEnv().cleanClusterEnvironment(); + } + + @Before + public void setUp() throws SQLException { + prepareData(); + } + + @After + public void tearDown() { + try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = connection.createStatement()) { + statement.execute(dropDbSqls); + } catch (Exception e) { + fail(e.getMessage()); + } + } + + @Test + public void testEmptyCteQuery() { + String sql = + "explain analyze with cte1 as materialized (select * from testtb1) select * from testtb, cte1 where testtb.deviceid = cte1.deviceid"; + try (Connection conn = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = conn.createStatement()) { + statement.execute("Use " + DATABASE_NAME); + statement.execute( + "CREATE TABLE IF NOT EXISTS testtb1(deviceid STRING TAG, voltage FLOAT FIELD)"); + ResultSet resultSet = statement.executeQuery(sql); + StringBuilder sb = new StringBuilder(); + while (resultSet.next()) { + System.out.println(resultSet.getString(1)); + sb.append(resultSet.getString(1)).append(System.lineSeparator()); + } + resultSet.close(); + + String result = sb.toString(); + Assert.assertFalse( + "Explain Analyze should not contain ExplainAnalyze node.", + result.contains("ExplainAnalyzeNode")); + + String[] lines = result.split(System.lineSeparator()); + Assert.assertTrue(lines.length > 3); + Assert.assertEquals("CTE Query : 'cte1'", lines[0]); + Assert.assertEquals("", lines[1]); + Assert.assertEquals("Main Query", lines[2]); + statement.execute("DROP TABLE testtb1"); + + } catch (SQLException e) { + fail(e.getMessage()); + } + } + + @Test + public void testCteQueryExceedsThreshold() { + String sql = + "explain analyze with cte1 as materialized (select * from testtb2) select * from testtb where testtb.deviceid in (select deviceid from cte1)"; + try (Connection conn = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = conn.createStatement()) { + statement.execute("Use " + DATABASE_NAME); + statement.execute( + "CREATE TABLE IF NOT EXISTS testtb2(deviceid STRING TAG, voltage FLOAT FIELD)"); + for (int i = 0; i < 100; i++) { + statement.addBatch( + String.format("insert into testtb2(deviceid, voltage) values('d%d', %d)", i, i)); + } + statement.executeBatch(); + ResultSet resultSet = statement.executeQuery(sql); + StringBuilder sb = new StringBuilder(); + while (resultSet.next()) { + sb.append(resultSet.getString(1)).append(System.lineSeparator()); + } + resultSet.close(); + + String result = sb.toString(); + Assert.assertFalse( + "Main Query should not contain CteScan node when the CTE query's result set exceeds threshold.", + result.contains("CteScanNode(CteScanOperator)")); + Assert.assertTrue( + "CTE Query should contain warning message when CTE query's result set exceeds threshold.", + result.contains("Failed to materialize CTE")); + Assert.assertFalse( + "Explain Analyze should not contain ExplainAnalyze node.", + result.contains("ExplainAnalyzeNode")); + + String[] plans = result.split("Main Query"); + for (String plan : plans) { + String[] lines = plan.split(System.lineSeparator()); + long[] instanceCount = + Arrays.stream(lines) + .filter(line -> line.contains("Fragment Instances Count:")) + .mapToLong(extractNumber("Fragment Instances Count:\\s(\\d+)")) + .toArray(); + assertEquals(instanceCount.length, 1); + + long totalInstances = + Arrays.stream(lines).filter(line -> line.contains("FRAGMENT-INSTANCE")).count(); + assertEquals(totalInstances, instanceCount[0]); + } + + statement.execute("DROP TABLE testtb2"); + } catch (SQLException e) { + fail(e.getMessage()); + } + } + + @Test + public void testCteQuerySuccess() { + String sql = + "explain analyze with cte1 as materialized (select voltage, deviceid from testtb3) select * from testtb where testtb.deviceid in (select deviceid from cte1)"; + try (Connection conn = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = conn.createStatement()) { + statement.execute("Use " + DATABASE_NAME); + statement.execute( + "CREATE TABLE IF NOT EXISTS testtb3(deviceid STRING TAG, voltage FLOAT FIELD)"); + for (int i = 0; i < 50; i++) { + statement.addBatch( + String.format("insert into testtb3(deviceid, voltage) values('d%d', %d)", i, i)); + } + statement.executeBatch(); + ResultSet resultSet = statement.executeQuery(sql); + StringBuilder sb = new StringBuilder(); + while (resultSet.next()) { + sb.append(resultSet.getString(1)).append(System.lineSeparator()); + } + resultSet.close(); + + String result = sb.toString(); + Assert.assertTrue( + "Main Query should contain CteScan node when the CTE query's result set does not exceeds threshold.", + result.contains("CteScanNode(CteScanOperator)")); + Assert.assertFalse( + "Explain Analyze should not contain ExplainAnalyze node.", + result.contains("ExplainAnalyzeNode")); + + String[] plans = result.split("Main Query"); + for (String plan : plans) { + String[] lines = plan.split(System.lineSeparator()); + long[] instanceCount = + Arrays.stream(lines) + .filter(line -> line.contains("Fragment Instances Count:")) + .mapToLong(extractNumber("Fragment Instances Count:\\s(\\d+)")) + .toArray(); + assertEquals(instanceCount.length, 1); + + long totalInstances = + Arrays.stream(lines).filter(line -> line.contains("FRAGMENT-INSTANCE")).count(); + assertEquals(totalInstances, instanceCount[0]); + } + + statement.execute("DROP TABLE testtb3"); + } catch (SQLException e) { + fail(e.getMessage()); + } + } + + private static ToLongFunction extractNumber(String regex) { + return line -> { + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(line); + if (matcher.find()) { + try { + return Long.parseLong(matcher.group(1)); + } catch (NumberFormatException e) { + return 0; + } + } + return 0; + }; + } + + private static void prepareData() { + try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TABLE_SQL_DIALECT); + Statement statement = connection.createStatement()) { + + for (String sql : creationSqls) { + statement.execute(sql); + } + } catch (Exception e) { + fail(e.getMessage()); + } + } +} diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBConfig.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBConfig.java index b2c808654677..35d56ecc3c22 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBConfig.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBConfig.java @@ -421,6 +421,12 @@ public class IoTDBConfig { /** The buffer for sort operation */ private long sortBufferSize = 32 * 1024 * 1024L; + /** The buffer for cte scan operation */ + private long cteBufferSize = 128 * 1024L; + + /** Max number of rows for cte materialization */ + private int maxRowsInCteBuffer = 1000; + /** Mods cache size limit per fi */ private long modsCacheSizeLimitPerFI = 32 * 1024 * 1024; @@ -4173,6 +4179,22 @@ public long getSortBufferSize() { return sortBufferSize; } + public void setCteBufferSize(long cteBufferSize) { + this.cteBufferSize = cteBufferSize; + } + + public long getCteBufferSize() { + return cteBufferSize; + } + + public void setMaxRowsInCteBuffer(int maxRowsInCteBuffer) { + this.maxRowsInCteBuffer = maxRowsInCteBuffer; + } + + public int getMaxRowsInCteBuffer() { + return maxRowsInCteBuffer; + } + public void setModsCacheSizeLimitPerFI(long modsCacheSizeLimitPerFI) { this.modsCacheSizeLimitPerFI = modsCacheSizeLimitPerFI; } diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBDescriptor.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBDescriptor.java index 2915365262db..73b8764994c7 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBDescriptor.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/conf/IoTDBDescriptor.java @@ -1057,6 +1057,24 @@ public void loadProperties(TrimProperties properties) throws BadNodeUrlException // The buffer for sort operator to calculate loadFixedSizeLimitForQuery(properties, "sort_buffer_size_in_bytes", conf::setSortBufferSize); + // The buffer for cte materialization. + long cteBufferSizeInBytes = + Long.parseLong( + properties.getProperty( + "cte_buffer_size_in_bytes", Long.toString(conf.getCteBufferSize()))); + if (cteBufferSizeInBytes > 0) { + conf.setCteBufferSize(cteBufferSizeInBytes); + } + + // max number of rows for cte materialization + int maxRowsInCteBuffer = + Integer.parseInt( + properties.getProperty( + "max_rows_in_cte_buffer", Integer.toString(conf.getMaxRowsInCteBuffer()))); + if (maxRowsInCteBuffer > 0) { + conf.setMaxRowsInCteBuffer(maxRowsInCteBuffer); + } + loadFixedSizeLimitForQuery( properties, "mods_cache_size_limit_per_fi_in_bytes", conf::setModsCacheSizeLimitPerFI); @@ -2181,6 +2199,24 @@ public synchronized void loadHotModifiedProps(TrimProperties properties) Long.parseLong( properties.getProperty( "max_object_file_size_in_byte", String.valueOf(conf.getMaxObjectSizeInByte())))); + + // The buffer for cte materialization. + long cteBufferSizeInBytes = + Long.parseLong( + properties.getProperty( + "cte_buffer_size_in_bytes", Long.toString(conf.getCteBufferSize()))); + if (cteBufferSizeInBytes > 0) { + conf.setCteBufferSize(cteBufferSizeInBytes); + } + // max number of rows for cte materialization + int maxRowsInCteBuffer = + Integer.parseInt( + properties.getProperty( + "max_rows_in_cte_buffer", Integer.toString(conf.getMaxRowsInCteBuffer()))); + if (maxRowsInCteBuffer > 0) { + conf.setMaxRowsInCteBuffer(maxRowsInCteBuffer); + } + } catch (Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/common/MPPQueryContext.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/common/MPPQueryContext.java index 27a1beba195a..fac3afff8b0b 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/common/MPPQueryContext.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/common/MPPQueryContext.java @@ -33,14 +33,24 @@ import org.apache.iotdb.db.queryengine.plan.analyze.lock.SchemaLockType; import org.apache.iotdb.db.queryengine.plan.planner.memory.MemoryReservationManager; import org.apache.iotdb.db.queryengine.plan.planner.memory.NotThreadSafeMemoryReservationManager; +import org.apache.iotdb.db.queryengine.plan.relational.analyzer.NodeRef; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Identifier; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Query; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Table; import org.apache.iotdb.db.queryengine.statistics.QueryPlanStatistics; +import org.apache.iotdb.db.utils.cte.CteDataStore; +import com.google.common.collect.ImmutableList; import org.apache.tsfile.read.filter.basic.Filter; +import org.apache.tsfile.utils.Pair; import java.time.ZoneId; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -54,6 +64,13 @@ public class MPPQueryContext implements IAuditEntity { private String sql; private final QueryId queryId; + /** The type of explanation for a query. */ + public enum ExplainType { + NONE, + EXPLAIN, + EXPLAIN_ANALYZE, + } + // LocalQueryId is kept to adapt to the old client, it's unique in current datanode. // Now it's only be used by EXPLAIN ANALYZE to get queryExecution. private long localQueryId; @@ -82,7 +99,12 @@ public class MPPQueryContext implements IAuditEntity { private final Set acquiredLocks = new HashSet<>(); - private boolean isExplainAnalyze = false; + // Determines the explanation mode for the query: + // - NONE: Normal query execution without explanation + // - EXPLAIN: Show the logical and physical query plan without execution + // - EXPLAIN_ANALYZE: Execute the query and collect detailed execution statistics + private ExplainType explainType = ExplainType.NONE; + private boolean isVerbose = false; private QueryPlanStatistics queryPlanStatistics = null; @@ -103,6 +125,26 @@ public class MPPQueryContext implements IAuditEntity { private boolean userQuery = false; + private Map, Query> cteQueries = new HashMap<>(); + + // Stores the EXPLAIN/EXPLAIN ANALYZE results for Common Table Expressions (CTEs) + // Key: CTE table reference + // Value: Pair containing (max line length of the explain output, list of formatted explain lines) + // This ensures consistent formatting between the main query and its CTE sub-queries + private final Map, Pair>> cteExplainResults = + new LinkedHashMap<>(); + // Tracks the materialization time cost (in nanoseconds) for each CTE to help optimize query + // planning + private final Map, Long> cteMaterializationCosts = new HashMap<>(); + + // Indicates whether this query context is for a sub-query triggered by the main query. + // Sub-queries are independent queries spawned from the main query (e.g., CTE sub-queries). + // When true, CTE materialization is skipped as it's handled by the main query context. + private boolean innerTriggeredQuery = false; + + // Tables in the subquery + private final Map, List> subQueryTables = new HashMap<>(); + public MPPQueryContext(QueryId queryId) { this.queryId = queryId; this.endPointBlackList = ConcurrentHashMap.newKeySet(); @@ -170,10 +212,20 @@ public void releaseMemoryForSchemaTree() { } public void prepareForRetry() { + if (!isInnerTriggeredQuery()) { + cleanUpCte(); + } this.initResultNodeContext(); this.releaseAllMemoryReservedForFrontEnd(); } + private void cleanUpCte() { + cteQueries.clear(); + cteExplainResults.clear(); + cteMaterializationCosts.clear(); + subQueryTables.clear(); + } + private void initResultNodeContext() { this.resultNodeContext = new ResultNodeContext(queryId); } @@ -282,12 +334,28 @@ public ZoneId getZoneId() { return session.getZoneId(); } - public void setExplainAnalyze(boolean explainAnalyze) { - isExplainAnalyze = explainAnalyze; + public void setExplainType(ExplainType explainType) { + this.explainType = explainType; + } + + public ExplainType getExplainType() { + return explainType; } public boolean isExplainAnalyze() { - return isExplainAnalyze; + return explainType == ExplainType.EXPLAIN_ANALYZE; + } + + public boolean isExplain() { + return explainType == ExplainType.EXPLAIN; + } + + public void setVerbose(boolean verbose) { + isVerbose = verbose; + } + + public boolean isVerbose() { + return isVerbose; } public long getAnalyzeCost() { @@ -435,6 +503,58 @@ public void setUserQuery(boolean userQuery) { this.userQuery = userQuery; } + public boolean isInnerTriggeredQuery() { + return innerTriggeredQuery; + } + + public void setInnerTriggeredQuery(boolean innerTriggeredQuery) { + this.innerTriggeredQuery = innerTriggeredQuery; + } + + public void addCteMaterializationCost(Table table, long cost) { + cteMaterializationCosts.put(NodeRef.of(table), cost); + } + + public Map, Long> getCteMaterializationCosts() { + return cteMaterializationCosts; + } + + public void addCteQuery(Table table, Query query) { + cteQueries.put(NodeRef.of(table), query); + } + + public Map, Query> getCteQueries() { + return cteQueries; + } + + public CteDataStore getCteDataStore(Table table) { + Query query = cteQueries.get(NodeRef.of(table)); + if (query == null) { + return null; + } + return query.getCteDataStore(); + } + + public void setCteQueries(Map, Query> cteQueries) { + this.cteQueries = cteQueries; + } + + public void addSubQueryTables(Query query, List tables) { + subQueryTables.put(NodeRef.of(query), tables); + } + + public List getTables(Query query) { + return subQueryTables.getOrDefault(NodeRef.of(query), ImmutableList.of()); + } + + public void addCteExplainResult(Table table, Pair> cteExplainResult) { + cteExplainResults.put(NodeRef.of(table), cteExplainResult); + } + + public Map, Pair>> getCteExplainResults() { + return cteExplainResults; + } + // ================= Authentication Interfaces ========================= private AuditEventType auditEventType; diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/fragment/FragmentInstanceExecution.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/fragment/FragmentInstanceExecution.java index 08991ae7f7a3..9d099859e37d 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/fragment/FragmentInstanceExecution.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/fragment/FragmentInstanceExecution.java @@ -28,10 +28,9 @@ import org.apache.iotdb.db.queryengine.execution.driver.IDriver; import org.apache.iotdb.db.queryengine.execution.exchange.MPPDataExchangeManager; import org.apache.iotdb.db.queryengine.execution.exchange.sink.ISink; +import org.apache.iotdb.db.queryengine.execution.operator.ExplainAnalyzeOperator; import org.apache.iotdb.db.queryengine.execution.operator.OperatorContext; import org.apache.iotdb.db.queryengine.execution.schedule.IDriverScheduler; -import org.apache.iotdb.db.storageengine.dataregion.IDataRegionForQuery; -import org.apache.iotdb.db.storageengine.dataregion.VirtualDataRegion; import org.apache.iotdb.db.utils.SetThreadName; import org.apache.iotdb.mpp.rpc.thrift.TFetchFragmentInstanceStatisticsResp; import org.apache.iotdb.mpp.rpc.thrift.TOperatorStatistics; @@ -60,6 +59,11 @@ public class FragmentInstanceExecution { // It will be set to null while this FI is FINISHED private List drivers; + // Indicates whether this fragment instance should be ignored for statistics collection. + // This is true when the fragment instance contains ExplainAnalyzeOperator, which is + // a virtual fragment used for EXPLAIN ANALYZE and should not be included in query statistics. + boolean shouldIgnoreForStatistics; + // It will be set to null while this FI is FINISHED private ISink sink; @@ -111,6 +115,7 @@ private FragmentInstanceExecution( this.stateMachine = stateMachine; this.timeoutInMs = timeoutInMs; this.exchangeManager = exchangeManager; + this.shouldIgnoreForStatistics = shouldIgnoreForStatistics(); } public FragmentInstanceState getInstanceState() { @@ -141,17 +146,37 @@ public FragmentInstanceStateMachine getStateMachine() { return stateMachine; } + // Check if this fragment instance should be ignored for statistics + // (i.e., it contains ExplainAnalyzeOperator) + private boolean shouldIgnoreForStatistics() { + if (drivers == null || drivers.isEmpty()) { + return false; + } + // Check if any driver contains ExplainAnalyzeOperator + return drivers.stream() + .anyMatch( + driver -> + driver.getDriverContext().getOperatorContexts().stream() + .anyMatch( + operatorContext -> + ExplainAnalyzeOperator.class + .getSimpleName() + .equals(operatorContext.getOperatorType()))); + } + // Fill Fragment-Level info for statistics private boolean fillFragmentInstanceStatistics( FragmentInstanceContext context, TFetchFragmentInstanceStatisticsResp statistics) { statistics.setFragmentInstanceId(context.getId().toThrift()); statistics.setQueryStatistics(context.getQueryStatistics().toThrift()); statistics.setState(getInstanceState().toString()); - IDataRegionForQuery dataRegionForQuery = context.getDataRegion(); - if (dataRegionForQuery instanceof VirtualDataRegion) { + // Previously we ignore statistics when current data region is instance of + // VirtualDataRegion. Now data region of a CteScanNode is also virtual. + if (shouldIgnoreForStatistics) { // We don't need to output the region having ExplainAnalyzeOperator only. return false; } + statistics.setDataRegion(context.getDataRegion().getDataRegionIdString()); statistics.setIp(CONFIG.getInternalAddress() + ":" + CONFIG.getInternalPort()); statistics.setStartTimeInMS(context.getStartTime()); diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/ExplainAnalyzeOperator.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/ExplainAnalyzeOperator.java index 13adf9b1d65a..b4f1a5c7261b 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/ExplainAnalyzeOperator.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/ExplainAnalyzeOperator.java @@ -32,6 +32,8 @@ import org.apache.iotdb.db.queryengine.plan.Coordinator; import org.apache.iotdb.db.queryengine.plan.execution.QueryExecution; import org.apache.iotdb.db.queryengine.plan.planner.plan.FragmentInstance; +import org.apache.iotdb.db.queryengine.plan.relational.analyzer.NodeRef; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Table; import org.apache.iotdb.db.queryengine.statistics.FragmentInstanceStatisticsDrawer; import org.apache.iotdb.db.queryengine.statistics.QueryStatisticsFetcher; import org.apache.iotdb.db.queryengine.statistics.StatisticLine; @@ -45,6 +47,7 @@ import org.apache.tsfile.read.common.block.TsBlockBuilder; import org.apache.tsfile.read.common.block.column.TimeColumnBuilder; import org.apache.tsfile.utils.Binary; +import org.apache.tsfile.utils.Pair; import org.apache.tsfile.utils.RamUsageEstimator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -56,6 +59,11 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; +import static org.apache.iotdb.commons.conf.IoTDBConstant.BLANK; +import static org.apache.iotdb.commons.conf.IoTDBConstant.CTE_QUERY; +import static org.apache.iotdb.commons.conf.IoTDBConstant.MAIN_QUERY; +import static org.apache.iotdb.commons.conf.IoTDBConstant.SPACE; + public class ExplainAnalyzeOperator implements ProcessOperator { private static final Logger logger = LoggerFactory.getLogger(IoTDBConstant.EXPLAIN_ANALYZE_LOGGER_NAME); @@ -63,6 +71,8 @@ public class ExplainAnalyzeOperator implements ProcessOperator { RamUsageEstimator.shallowSizeOfInstance(ExplainAnalyzeOperator.class); private static final String LOG_TITLE = "---------------------Intermediate Results of EXPLAIN ANALYZE---------------------:"; + private static final double NS_TO_MS_FACTOR = 1.0 / 1000000; + private final OperatorContext operatorContext; private final Operator child; private final boolean verbose; @@ -142,7 +152,7 @@ private List buildFragmentInstanceStatistics( for (int i = 0; i < fragmentInstanceStatisticsDrawer.getMaxLineLength() - line.getValue().length(); i++) { - sb.append(" "); + sb.append(SPACE); } analyzeResult.add(sb.toString()); } @@ -172,8 +182,10 @@ private void logIntermediateResultIfTimeout() { } private TsBlock buildResult() throws FragmentInstanceFetchException { - - List analyzeResult = buildFragmentInstanceStatistics(instances, verbose); + Map, Pair>> cteAnalyzeResults = + mppQueryContext.getCteExplainResults(); + List mainAnalyzeResult = buildFragmentInstanceStatistics(instances, verbose); + List analyzeResult = mergeAnalyzeResults(cteAnalyzeResults, mainAnalyzeResult); TsBlockBuilder builder = new TsBlockBuilder(Collections.singletonList(TSDataType.TEXT)); TimeColumnBuilder timeColumnBuilder = builder.getTimeColumnBuilder(); @@ -187,6 +199,64 @@ private TsBlock buildResult() throws FragmentInstanceFetchException { return builder.build(); } + private List mergeAnalyzeResults( + Map, Pair>> cteAnalyzeResults, + List mainAnalyzeResult) { + if (cteAnalyzeResults.isEmpty()) { + return mainAnalyzeResult; + } + + final int maxLineLength = + Math.max( + cteAnalyzeResults.values().stream().mapToInt(p -> p.left).max().orElse(0), + fragmentInstanceStatisticsDrawer.getMaxLineLength()); + + List analyzeResult = new ArrayList<>(); + cteAnalyzeResults.forEach( + (table, pair) -> { + analyzeResult.add(String.format("%s : '%s'", CTE_QUERY, table.getNode().getName())); + for (String line : pair.right) { + StringBuilder sb = new StringBuilder(); + sb.append(line); + for (int i = 0; i < maxLineLength - line.length(); i++) { + sb.append(SPACE); + } + analyzeResult.add(sb.toString()); + } + analyzeResult.add(BLANK); + }); + + analyzeResult.add(MAIN_QUERY); + mainAnalyzeResult.forEach( + line -> { + StringBuilder sb = new StringBuilder(); + sb.append(line); + for (int i = 0; i < maxLineLength - line.length(); i++) { + sb.append(SPACE); + } + analyzeResult.add(sb.toString()); + if (line.contains("Logical Plan Cost:")) { + mppQueryContext + .getCteMaterializationCosts() + .forEach( + (tableRef, cost) -> { + sb.setLength(0); + sb.append( + String.format( + " %s Materialization Total Cost: %.3f ms", + tableRef.getNode().getName().toString(), cost * NS_TO_MS_FACTOR)); + int currentLength = sb.length(); + for (int i = 0; i < maxLineLength - currentLength; i++) { + sb.append(SPACE); + } + analyzeResult.add(sb.toString()); + }); + } + }); + + return analyzeResult; + } + @Override public boolean hasNext() throws Exception { return child.hasNext() || !outputResult; diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/source/relational/CteScanOperator.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/source/relational/CteScanOperator.java new file mode 100644 index 000000000000..dc2cafab1791 --- /dev/null +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/source/relational/CteScanOperator.java @@ -0,0 +1,119 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.queryengine.execution.operator.source.relational; + +import org.apache.iotdb.db.queryengine.execution.MemoryEstimationHelper; +import org.apache.iotdb.db.queryengine.execution.operator.OperatorContext; +import org.apache.iotdb.db.queryengine.execution.operator.source.SourceOperator; +import org.apache.iotdb.db.queryengine.plan.planner.memory.MemoryReservationManager; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeId; +import org.apache.iotdb.db.utils.cte.CteDataReader; +import org.apache.iotdb.db.utils.cte.CteDataStore; +import org.apache.iotdb.db.utils.cte.MemoryReader; + +import org.apache.tsfile.common.conf.TSFileDescriptor; +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.utils.RamUsageEstimator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static java.util.Objects.requireNonNull; + +public class CteScanOperator implements SourceOperator { + private static final Logger LOGGER = LoggerFactory.getLogger(CteScanOperator.class); + private static final long INSTANCE_SIZE = + RamUsageEstimator.shallowSizeOfInstance(CteScanOperator.class); + + private final long maxReturnSize = + TSFileDescriptor.getInstance().getConfig().getMaxTsBlockSizeInBytes(); + + private final OperatorContext operatorContext; + private final PlanNodeId sourceId; + private final CteDataReader dataReader; + + public CteScanOperator( + OperatorContext operatorContext, + PlanNodeId sourceId, + CteDataStore dataStore, + MemoryReservationManager memoryReservationManager) { + requireNonNull(dataStore, "dataStore is null"); + this.operatorContext = operatorContext; + this.sourceId = sourceId; + this.dataReader = new MemoryReader(dataStore, memoryReservationManager); + } + + @Override + public TsBlock next() throws Exception { + return dataReader.next(); + } + + @Override + public boolean hasNext() throws Exception { + return dataReader.hasNext(); + } + + @Override + public void close() throws Exception { + try { + dataReader.close(); + } catch (Exception e) { + LOGGER.error("Fail to close CteDataReader", e); + } + } + + @Override + public boolean isFinished() throws Exception { + return !hasNextWithTimer(); + } + + @Override + public long calculateMaxPeekMemory() { + return calculateRetainedSizeAfterCallingNext() + calculateMaxReturnSize(); + } + + @Override + public long calculateMaxReturnSize() { + return maxReturnSize; + } + + @Override + public long calculateRetainedSizeAfterCallingNext() { + return 0L; + } + + @Override + public long ramBytesUsed() { + return INSTANCE_SIZE + + MemoryEstimationHelper.getEstimatedSizeOfAccountableObject(operatorContext) + + dataReader.ramBytesUsed(); + } + + @Override + public OperatorContext getOperatorContext() { + return operatorContext; + } + + @Override + public PlanNodeId getSourceId() { + return sourceId; + } +} diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/Coordinator.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/Coordinator.java index 3210d277d861..5d8d0e03a4a4 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/Coordinator.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/Coordinator.java @@ -40,6 +40,7 @@ import org.apache.iotdb.db.protocol.session.PreparedStatementInfo; import org.apache.iotdb.db.queryengine.common.DataNodeEndPoints; import org.apache.iotdb.db.queryengine.common.MPPQueryContext; +import org.apache.iotdb.db.queryengine.common.MPPQueryContext.ExplainType; import org.apache.iotdb.db.queryengine.common.QueryId; import org.apache.iotdb.db.queryengine.common.SessionInfo; import org.apache.iotdb.db.queryengine.execution.QueryIdGenerator; @@ -93,6 +94,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Parameter; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.PipeStatement; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Prepare; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Query; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ReconstructRegion; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.RelationalAuthorStatement; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.RemoveAINode; @@ -130,6 +132,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.StartRepairData; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.StopRepairData; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.SubscriptionStatement; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Table; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.UnloadModel; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Use; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.WrappedInsertStatement; @@ -394,6 +397,50 @@ private IQueryExecution createQueryExecutionForTreeModel( return new QueryExecution(treeModelPlanner, queryContext, executor); } + /** + * This method is specifically used following subquery: + * + *

1. When uncorrelated scalar subquery is handled + * (fetchUncorrelatedSubqueryResultForPredicate), we try to fold it and get constant value. Since + * CTE might be referenced, we need to add CTE materialization result into subquery's + * MPPQueryContext. + * + *

2. When CTE subquery is handled (fetchCteQueryResult), the main query, however, might be + * 'Explain' or 'Explain Analyze' statement. So we need to keep explain/explain analyze results + * along with CTE query dataset. + */ + public ExecutionResult executeForTableModel( + org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Statement statement, + SqlParser sqlParser, + IClientSession clientSession, + long queryId, + SessionInfo session, + String sql, + Metadata metadata, + Map, Query> cteQueries, + ExplainType explainType, + long timeOut, + boolean userQuery) { + return execution( + queryId, + session, + sql, + userQuery, + ((queryContext, startTime) -> { + queryContext.setInnerTriggeredQuery(true); + queryContext.setCteQueries(cteQueries); + queryContext.setExplainType(explainType); + return createQueryExecutionForTableModel( + statement, + sqlParser, + clientSession, + queryContext, + metadata, + timeOut > 0 ? timeOut : CONFIG.getQueryTimeoutThreshold(), + startTime); + })); + } + public ExecutionResult executeForTableModel( org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Statement statement, SqlParser sqlParser, diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/analyze/AnalyzeVisitor.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/analyze/AnalyzeVisitor.java index 8abc92e9d4ab..4cae2c0f2884 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/analyze/AnalyzeVisitor.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/analyze/AnalyzeVisitor.java @@ -48,6 +48,7 @@ import org.apache.iotdb.db.exception.sql.StatementAnalyzeException; import org.apache.iotdb.db.queryengine.common.DeviceContext; import org.apache.iotdb.db.queryengine.common.MPPQueryContext; +import org.apache.iotdb.db.queryengine.common.MPPQueryContext.ExplainType; import org.apache.iotdb.db.queryengine.common.TimeseriesContext; import org.apache.iotdb.db.queryengine.common.header.DatasetHeader; import org.apache.iotdb.db.queryengine.common.header.DatasetHeaderFactory; @@ -240,6 +241,7 @@ public Analysis visitNode(StatementNode node, MPPQueryContext context) { @Override public Analysis visitExplain(ExplainStatement explainStatement, MPPQueryContext context) { Analysis analysis = visitQuery(explainStatement.getQueryStatement(), context); + context.setExplainType(ExplainType.EXPLAIN); analysis.setRealStatement(explainStatement); analysis.setFinishQueryAfterAnalyze(true); analysis.setDatabaseName(context.getDatabaseName().orElse(null)); @@ -250,7 +252,7 @@ public Analysis visitExplain(ExplainStatement explainStatement, MPPQueryContext public Analysis visitExplainAnalyze( ExplainAnalyzeStatement explainAnalyzeStatement, MPPQueryContext context) { Analysis analysis = visitQuery(explainAnalyzeStatement.getQueryStatement(), context); - context.setExplainAnalyze(true); + context.setExplainType(ExplainType.EXPLAIN_ANALYZE); analysis.setRealStatement(explainAnalyzeStatement); analysis.setRespDatasetHeader( new DatasetHeader( diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/execution/memory/TableModelStatementMemorySourceVisitor.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/execution/memory/TableModelStatementMemorySourceVisitor.java index 4016bd3885dd..932d94197922 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/execution/memory/TableModelStatementMemorySourceVisitor.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/execution/memory/TableModelStatementMemorySourceVisitor.java @@ -27,6 +27,7 @@ import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanGraphPrinter; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; +import org.apache.iotdb.db.queryengine.plan.relational.analyzer.NodeRef; import org.apache.iotdb.db.queryengine.plan.relational.planner.SymbolAllocator; import org.apache.iotdb.db.queryengine.plan.relational.planner.TableLogicalPlanner; import org.apache.iotdb.db.queryengine.plan.relational.planner.distribute.TableDistributedPlanGenerator; @@ -36,13 +37,20 @@ import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Explain; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Node; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ShowDevice; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Table; import org.apache.tsfile.enums.TSDataType; import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.utils.Pair; +import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; +import static org.apache.iotdb.commons.conf.IoTDBConstant.BLANK; +import static org.apache.iotdb.commons.conf.IoTDBConstant.CTE_QUERY; +import static org.apache.iotdb.commons.conf.IoTDBConstant.MAIN_QUERY; import static org.apache.iotdb.db.queryengine.common.header.DatasetHeader.EMPTY_HEADER; import static org.apache.iotdb.db.queryengine.execution.warnings.WarningCollector.NOOP; import static org.apache.iotdb.db.queryengine.plan.execution.memory.StatementMemorySourceVisitor.getStatementMemorySource; @@ -95,12 +103,16 @@ public StatementMemorySource visitExplain( Coordinator.getInstance().getDataNodeLocationSupplier()) .generateDistributedPlanWithOptimize(planContext); - final List lines = + List mainExplainResult = outputNodeWithExchange.accept( new PlanGraphPrinter(), new PlanGraphPrinter.GraphContext( context.getQueryContext().getTypeProvider().getTemplatedInfo())); + Map, Pair>> cteExplainResults = + context.getQueryContext().getCteExplainResults(); + List lines = mergeExplainResults(cteExplainResults, mainExplainResult); + return getStatementMemorySource(header, lines); } @@ -117,4 +129,24 @@ public StatementMemorySource visitCountDevice( return new StatementMemorySource( node.getTsBlock(context.getAnalysis()), node.getDataSetHeader()); } + + private List mergeExplainResults( + Map, Pair>> cteExplainResults, + List mainExplainResult) { + if (cteExplainResults.isEmpty()) { + return mainExplainResult; + } + + List analyzeResult = new ArrayList<>(); + cteExplainResults.forEach( + (table, pair) -> { + analyzeResult.add(String.format("%s : '%s'", CTE_QUERY, table.getNode().getName())); + analyzeResult.addAll(pair.getRight()); + analyzeResult.add(BLANK); + }); + analyzeResult.add(MAIN_QUERY); + analyzeResult.addAll(mainExplainResult); + + return analyzeResult; + } } diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/TableOperatorGenerator.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/TableOperatorGenerator.java index e699ee417deb..cd88853793b9 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/TableOperatorGenerator.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/TableOperatorGenerator.java @@ -119,6 +119,7 @@ import org.apache.iotdb.db.queryengine.execution.operator.source.relational.AbstractTableScanOperator; import org.apache.iotdb.db.queryengine.execution.operator.source.relational.AsofMergeSortInnerJoinOperator; import org.apache.iotdb.db.queryengine.execution.operator.source.relational.AsofMergeSortLeftJoinOperator; +import org.apache.iotdb.db.queryengine.execution.operator.source.relational.CteScanOperator; import org.apache.iotdb.db.queryengine.execution.operator.source.relational.DefaultAggTableScanOperator; import org.apache.iotdb.db.queryengine.execution.operator.source.relational.DeviceIteratorScanOperator; import org.apache.iotdb.db.queryengine.execution.operator.source.relational.InformationSchemaTableScanOperator; @@ -183,6 +184,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTreeDeviceViewScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AssignUniqueId; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CollectNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.EnforceSingleRowNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExchangeNode; @@ -1191,6 +1193,22 @@ private void addSource( Long.MAX_VALUE); } + @Override + public Operator visitCteScan(CteScanNode node, LocalExecutionPlanContext context) { + OperatorContext operatorContext = + context + .getDriverContext() + .addOperatorContext( + context.getNextOperatorId(), + node.getPlanNodeId(), + CteScanOperator.class.getSimpleName()); + return new CteScanOperator( + operatorContext, + node.getPlanNodeId(), + node.getDataStore(), + context.getInstanceContext().getMemoryReservationContext()); + } + @Override public Operator visitTreeDeviceViewScan( TreeDeviceViewScanNode node, LocalExecutionPlanContext context) { diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/DistributedQueryPlan.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/DistributedQueryPlan.java index 91cbe9ee97e3..d9970103512a 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/DistributedQueryPlan.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/DistributedQueryPlan.java @@ -21,16 +21,19 @@ import org.apache.iotdb.commons.utils.TestOnly; +import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; public class DistributedQueryPlan { private final SubPlan rootSubPlan; private final List instances; + private final List planText; public DistributedQueryPlan(SubPlan rootSubPlan, List instances) { this.rootSubPlan = rootSubPlan; this.instances = instances; + this.planText = new ArrayList<>(); } @TestOnly @@ -45,4 +48,12 @@ public SubPlan getRootSubPlan() { public List getInstances() { return instances; } + + public List getPlanText() { + return planText; + } + + public void addPlanText(List plan) { + planText.addAll(plan); + } } diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanGraphPrinter.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanGraphPrinter.java index bd4dd912e4c1..61eec1fae4ef 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanGraphPrinter.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanGraphPrinter.java @@ -69,6 +69,8 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTreeDeviceViewScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AssignUniqueId; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CorrelatedJoinNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.EnforceSingleRowNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExceptNode; @@ -623,6 +625,14 @@ public List visitColumnInject(ColumnInjectNode node, GraphContext contex } // =============== Methods below are used for table model ================ + @Override + public List visitCteScan(CteScanNode node, GraphContext context) { + List boxValue = new ArrayList<>(); + boxValue.add(String.format("CteScan-%s", node.getPlanNodeId().getId())); + boxValue.add(String.format("OutputSymbols: %s", node.getOutputSymbols())); + return render(node, boxValue, context); + } + @Override public List visitTableScan(TableScanNode node, GraphContext context) { DeviceTableScanNode deviceTableScanNode = null; @@ -990,6 +1000,16 @@ public List visitTopK( return render(node, boxValue, context); } + @Override + public List visitCorrelatedJoin(CorrelatedJoinNode node, GraphContext context) { + List boxValue = new ArrayList<>(); + boxValue.add(String.format("CorrelatedJoin-%s", node.getPlanNodeId().getId())); + boxValue.add(String.format("JoinType: %s", node.getJoinType())); + boxValue.add(String.format("Correlation: %s", node.getCorrelation())); + boxValue.add(String.format("Filter: %s", node.getFilter())); + return render(node, boxValue, context); + } + @Override public List visitJoin( org.apache.iotdb.db.queryengine.plan.relational.planner.node.JoinNode node, diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanVisitor.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanVisitor.java index 4bebb692254a..e4ed7e342314 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanVisitor.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanVisitor.java @@ -123,6 +123,7 @@ import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.RelationalInsertTabletNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.iterative.GroupReference; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTreeDeviceViewScanNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExceptNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.GapFillNode; @@ -215,6 +216,10 @@ public R visitTimeSeriesRegionScan(TimeseriesRegionScanNode node, C context) { return visitRegionScan(node, context); } + public R visitCteScan(CteScanNode node, C context) { + return visitSourceNode(node, context); + } + // single child -------------------------------------------------------------------------------- public R visitSingleChildProcess(SingleChildProcessNode node, C context) { diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analysis.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analysis.java index b42559710d61..4a0fe9daa570 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analysis.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analysis.java @@ -69,6 +69,8 @@ import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Table; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.TableFunctionInvocation; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.WindowFrame; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.With; +import org.apache.iotdb.db.queryengine.plan.relational.sql.parser.SqlParser; import org.apache.iotdb.db.queryengine.plan.statement.component.FillPolicy; import com.google.common.collect.ArrayListMultimap; @@ -123,6 +125,10 @@ public class Analysis implements IAnalysis { private final Map, Query> namedQueries = new LinkedHashMap<>(); + // WITH clause stored during analyze phase. Required for constant folding and CTE materialization + // subqueries, which cannot directly access the WITH clause + private With with; + // map expandable query to the node being the inner recursive reference private final Map, Node> expandableNamedQueries = new LinkedHashMap<>(); @@ -252,6 +258,11 @@ public class Analysis implements IAnalysis { private boolean isQuery = false; + // SqlParser is needed during query planning phase for executing uncorrelated scalar subqueries + // in advance (predicate folding). The planner needs to parse and execute these subqueries + // independently to utilize predicate pushdown optimization. + private SqlParser sqlParser; + public Analysis(@Nullable Statement root, Map, Expression> parameters) { this.root = root; this.parameters = ImmutableMap.copyOf(requireNonNull(parameters, "parameters is null")); @@ -274,10 +285,30 @@ public void setUpdateType(String updateType) { this.updateType = updateType; } + public SqlParser getSqlParser() { + return sqlParser; + } + + public void setSqlParser(SqlParser sqlParser) { + this.sqlParser = sqlParser; + } + public Query getNamedQuery(Table table) { return namedQueries.get(NodeRef.of(table)); } + public Map, Query> getNamedQueries() { + return namedQueries; + } + + public With getWith() { + return with; + } + + public void setWith(With with) { + this.with = with; + } + public boolean isAnalyzed(Expression expression) { return expression instanceof DataType || types.containsKey(NodeRef.of(expression)); } diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analyzer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analyzer.java index 3f7c5322c8e6..cab532fd2d38 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analyzer.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Analyzer.java @@ -80,6 +80,12 @@ public Analysis analyze(Statement statement) { warningCollector); Analysis analysis = new Analysis(rewrittenStatement, parameterLookup); + analysis.setSqlParser(statementAnalyzerFactory.getSqlParser()); + // Register CTE passed by parent query. + context + .getCteQueries() + .forEach((tableRef, query) -> analysis.registerNamedQuery(tableRef.getNode(), query)); + Statement innerStatement = rewrittenStatement instanceof PipeEnriched ? ((PipeEnriched) rewrittenStatement).getInnerStatement() diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/ExpressionAnalyzer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/ExpressionAnalyzer.java index 6157612b4238..663c92ad83e9 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/ExpressionAnalyzer.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/ExpressionAnalyzer.java @@ -1717,12 +1717,12 @@ private Type analyzePredicateWithSubquery( return subqueryType; } - private Type analyzeSubquery( - SubqueryExpression node, StackableAstVisitorContext context) { + private Type analyzeSubquery(SubqueryExpression node, StackableAstVisitorContext ctx) { StatementAnalyzer analyzer = - statementAnalyzerFactory.apply(node, context.getContext().getCorrelationSupport()); - Scope subqueryScope = Scope.builder().withParent(context.getContext().getScope()).build(); + statementAnalyzerFactory.apply(node, ctx.getContext().getCorrelationSupport()); + Scope subqueryScope = Scope.builder().withParent(ctx.getContext().getScope()).build(); Scope queryScope = analyzer.analyze(node.getQuery(), subqueryScope); + context.addSubQueryTables(node.getQuery(), queryScope.getTables()); ImmutableList.Builder fields = ImmutableList.builder(); for (int i = 0; i < queryScope.getRelationType().getAllFieldCount(); i++) { diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Scope.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Scope.java index 0e7588a59f49..e2fd0cc72767 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Scope.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/Scope.java @@ -23,12 +23,16 @@ import org.apache.iotdb.db.exception.sql.SemanticException; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.AllColumns; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Expression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Identifier; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.QualifiedName; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Table; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.WithQuery; import org.apache.iotdb.rpc.TSStatusCode; import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -51,6 +55,11 @@ public class Scope { private final RelationType relation; private final Map namedQueries; + // Tables to access for the current relation. For CTE materialization and constant folding + // subqueries, non-materialized CTEs in tables must be identified, and their definitions + // attached to the subquery context. + private List tables; + public static Scope create() { return builder().build(); } @@ -64,16 +73,30 @@ private Scope( boolean queryBoundary, RelationId relationId, RelationType relation, - Map namedQueries) { + Map namedQueries, + List tables) { this.parent = requireNonNull(parent, "parent is null"); this.relationId = requireNonNull(relationId, "relationId is null"); this.queryBoundary = queryBoundary; this.relation = requireNonNull(relation, "relation is null"); this.namedQueries = ImmutableMap.copyOf(requireNonNull(namedQueries, "namedQueries is null")); + this.tables = new ArrayList<>(requireNonNull(tables, "tables is null")); + } + + public void addTable(Table table) { + tables.add(new Identifier(table.getName().getSuffix())); + } + + public void setTables(List tables) { + this.tables = tables; + } + + public List getTables() { + return Collections.unmodifiableList(tables); } public Scope withRelationType(RelationType relationType) { - return new Scope(parent, queryBoundary, relationId, relationType, namedQueries); + return new Scope(parent, queryBoundary, relationId, relationType, namedQueries, tables); } public Scope getQueryBoundaryScope() { @@ -327,6 +350,7 @@ public static final class Builder { private RelationId relationId = RelationId.anonymous(); private RelationType relationType = new RelationType(); private final Map namedQueries = new HashMap<>(); + private final List tables = new ArrayList<>(); private Optional parent = Optional.empty(); private boolean queryBoundary; @@ -334,6 +358,7 @@ public Builder like(Scope other) { relationId = other.relationId; relationType = other.relation; namedQueries.putAll(other.namedQueries); + tables.addAll(other.tables); parent = other.parent; queryBoundary = other.queryBoundary; return this; @@ -364,12 +389,17 @@ public Builder withNamedQuery(String name, WithQuery withQuery) { return this; } + public Builder withTables(List tables) { + this.tables.addAll(tables); + return this; + } + public boolean containsNamedQuery(String name) { return namedQueries.containsKey(name); } public Scope build() { - return new Scope(parent, queryBoundary, relationId, relationType, namedQueries); + return new Scope(parent, queryBoundary, relationId, relationType, namedQueries, tables); } } diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzer.java index e13c52ba8b16..9220ad55afa5 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzer.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzer.java @@ -26,6 +26,7 @@ import org.apache.iotdb.commons.udf.utils.UDFDataTypeTransformer; import org.apache.iotdb.db.exception.sql.SemanticException; import org.apache.iotdb.db.queryengine.common.MPPQueryContext; +import org.apache.iotdb.db.queryengine.common.MPPQueryContext.ExplainType; import org.apache.iotdb.db.queryengine.common.SessionInfo; import org.apache.iotdb.db.queryengine.execution.warnings.IoTDBWarning; import org.apache.iotdb.db.queryengine.execution.warnings.WarningCollector; @@ -196,6 +197,7 @@ import org.apache.iotdb.db.queryengine.plan.statement.crud.InsertBaseStatement; import org.apache.iotdb.db.schemaengine.table.DataNodeTableCache; import org.apache.iotdb.db.schemaengine.table.DataNodeTreeViewSchemaUtils; +import org.apache.iotdb.db.utils.cte.CteDataStore; import org.apache.iotdb.rpc.RpcUtils; import org.apache.iotdb.rpc.TSStatusCode; import org.apache.iotdb.udf.api.exception.UDFException; @@ -831,13 +833,15 @@ protected Scope visitLoadTsFile(final LoadTsFile node, final Optional sco @Override protected Scope visitExplain(Explain node, Optional context) { + queryContext.setExplainType(ExplainType.EXPLAIN); analysis.setFinishQueryAfterAnalyze(); return visitQuery((Query) node.getStatement(), context); } @Override protected Scope visitExplainAnalyze(ExplainAnalyze node, Optional context) { - queryContext.setExplainAnalyze(true); + queryContext.setExplainType(ExplainType.EXPLAIN_ANALYZE); + queryContext.setVerbose(node.isVerbose()); return visitQuery((Query) node.getStatement(), context); } @@ -891,6 +895,7 @@ protected Scope visitQuery(Query node, Optional context) { Scope.builder() .withParent(withScope) .withRelationType(RelationId.of(node), queryBodyScope.getRelationType()) + .withTables(queryBodyScope.getTables()) .build(); analysis.setScope(node, queryScope); @@ -916,6 +921,7 @@ private Scope analyzeWith(Query node, Optional scope) { // analyze WITH clause With with = node.getWith().get(); + analysis.setWith(with); Scope.Builder withScopeBuilder = scopeBuilder(scope); for (WithQuery withQuery : with.getQueries()) { @@ -932,7 +938,7 @@ private Scope analyzeWith(Query node, Optional scope) { if (!isRecursive) { Query query = withQuery.getQuery(); - analyze(query, withScopeBuilder.build()); + Scope queryScope = analyze(query, withScopeBuilder.build()); // check if all or none of the columns are explicitly alias if (withQuery.getColumnNames().isPresent()) { @@ -942,6 +948,7 @@ private Scope analyzeWith(Query node, Optional scope) { } withScopeBuilder.withNamedQuery(name, withQuery); + queryContext.addSubQueryTables(withQuery.getQuery(), queryScope.getTables()); } } Scope withScope = withScopeBuilder.build(); @@ -3061,6 +3068,7 @@ protected Scope visitSetOperation(SetOperation node, Optional scope) { @Override protected Scope visitTable(Table table, Optional scope) { if (!table.getName().getPrefix().isPresent()) { + scope.ifPresent(s -> s.addTable(table)); // is this a reference to a WITH query? Optional withQuery = createScope(scope).getNamedQuery(table.getName().getSuffix()); @@ -3092,7 +3100,36 @@ protected Scope visitTable(Table table, Optional scope) { analysis.setRelationName( table, QualifiedName.of(name.getDatabaseName(), name.getObjectName())); - Optional tableSchema = metadata.getTableSchema(sessionContext, name); + // check if table schema is found in CTE data stores + CteDataStore dataStore = queryContext.getCteDataStore(table); + Optional tableSchema = Optional.empty(); + if (dataStore != null) { + tableSchema = Optional.of(dataStore.getTableSchema()); + List columnIndex2TsBlockColumnIndexList = + dataStore.getColumnIndex2TsBlockColumnIndexList(); + if (columnIndex2TsBlockColumnIndexList != null + && !columnIndex2TsBlockColumnIndexList.isEmpty()) { + // Check if the list is completely sequential (0, 1, 2, ...) + boolean isSequential = true; + for (int i = 0; i < columnIndex2TsBlockColumnIndexList.size(); i++) { + if (columnIndex2TsBlockColumnIndexList.get(i) != i) { + isSequential = false; + break; + } + } + + // Generate new TableSchema with reordered columns only if not sequential + if (!isSequential) { + tableSchema = + reorderTableSchemaColumns(tableSchema.get(), columnIndex2TsBlockColumnIndexList); + } + } + } + // If table schema is not found, check if it is in metadata + if (!tableSchema.isPresent()) { + tableSchema = metadata.getTableSchema(sessionContext, name); + } + // This can only be a table if (!tableSchema.isPresent()) { TableMetadataImpl.throwTableNotExistsException( @@ -3111,9 +3148,21 @@ protected Scope visitTable(Table table, Optional scope) { return createAndAssignScope(table, scope, relationType); } + private Optional reorderTableSchemaColumns( + TableSchema tableSchema, List columnIndex2TsBlockColumnIndexList) { + List columnSchemas = tableSchema.getColumns(); + final List columnSchemaList = + columnIndex2TsBlockColumnIndexList.stream() + .map(columnSchemas::get) + .collect(Collectors.toList()); + + return Optional.of(new TableSchema(tableSchema.getTableName(), columnSchemaList)); + } + private Scope createScopeForCommonTableExpression( Table table, Optional scope, WithQuery withQuery) { Query query = withQuery.getQuery(); + query.setMaterialized(withQuery.isMaterialized()); analysis.registerNamedQuery(table, query); // re-alias the fields with the name assigned to the query in the WITH declaration @@ -3581,7 +3630,12 @@ protected Scope visitJoin(Join node, Optional scope) { joinConditionCheck(criteria); + // remember current tables in the scope + List tables = new ArrayList<>(); + scope.ifPresent(s -> tables.addAll(s.getTables())); + Scope left = process(node.getLeft(), scope); + scope.ifPresent(s -> s.setTables(tables)); Scope right = process(node.getRight(), scope); if (criteria instanceof JoinUsing) { @@ -4433,8 +4487,10 @@ private Scope createAndAssignScope(Node node, Optional parentScope, List< private Scope createAndAssignScope( Node node, Optional parentScope, RelationType relationType) { - Scope scope = - scopeBuilder(parentScope).withRelationType(RelationId.of(node), relationType).build(); + Scope.Builder scopeBuilder = + scopeBuilder(parentScope).withRelationType(RelationId.of(node), relationType); + parentScope.ifPresent(scope -> scopeBuilder.withTables(scope.getTables())); + Scope scope = scopeBuilder.build(); analysis.setScope(node, scope); return scope; diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzerFactory.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzerFactory.java index 53c300f3ce35..c532731850c6 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzerFactory.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/StatementAnalyzerFactory.java @@ -77,4 +77,8 @@ public static StatementAnalyzerFactory createTestingStatementAnalyzerFactory( public AccessControl getAccessControl() { return accessControl; } + + public SqlParser getSqlParser() { + return sqlParser; + } } diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteMaterializer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteMaterializer.java new file mode 100644 index 000000000000..a6ac7a422464 --- /dev/null +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteMaterializer.java @@ -0,0 +1,355 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.queryengine.plan.relational.planner; + +import org.apache.iotdb.common.rpc.thrift.TEndPoint; +import org.apache.iotdb.commons.client.IClientManager; +import org.apache.iotdb.commons.client.sync.SyncDataNodeInternalServiceClient; +import org.apache.iotdb.commons.exception.IoTDBException; +import org.apache.iotdb.commons.exception.IoTDBRuntimeException; +import org.apache.iotdb.commons.schema.table.column.TsTableColumnCategory; +import org.apache.iotdb.db.exception.mpp.FragmentInstanceFetchException; +import org.apache.iotdb.db.protocol.session.SessionManager; +import org.apache.iotdb.db.queryengine.common.FragmentInstanceId; +import org.apache.iotdb.db.queryengine.common.MPPQueryContext; +import org.apache.iotdb.db.queryengine.common.header.DatasetHeader; +import org.apache.iotdb.db.queryengine.plan.Coordinator; +import org.apache.iotdb.db.queryengine.plan.execution.ExecutionResult; +import org.apache.iotdb.db.queryengine.plan.execution.QueryExecution; +import org.apache.iotdb.db.queryengine.plan.planner.LocalExecutionPlanner; +import org.apache.iotdb.db.queryengine.plan.planner.plan.DistributedQueryPlan; +import org.apache.iotdb.db.queryengine.plan.planner.plan.FragmentInstance; +import org.apache.iotdb.db.queryengine.plan.relational.analyzer.Analysis; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.ColumnSchema; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableSchema; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Identifier; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Query; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Table; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.With; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.WithQuery; +import org.apache.iotdb.db.queryengine.plan.relational.sql.parser.SqlParser; +import org.apache.iotdb.db.queryengine.statistics.FragmentInstanceStatisticsDrawer; +import org.apache.iotdb.db.queryengine.statistics.QueryStatisticsFetcher; +import org.apache.iotdb.db.queryengine.statistics.StatisticLine; +import org.apache.iotdb.db.utils.cte.CteDataStore; +import org.apache.iotdb.mpp.rpc.thrift.TFetchFragmentInstanceStatisticsResp; +import org.apache.iotdb.rpc.TSStatusCode; + +import com.google.common.collect.ImmutableList; +import org.apache.tsfile.enums.TSDataType; +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.read.common.type.TypeFactory; +import org.apache.tsfile.utils.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +public class CteMaterializer { + private static final Logger LOGGER = LoggerFactory.getLogger(CteMaterializer.class); + + private static final Coordinator coordinator = Coordinator.getInstance(); + private static final SessionManager sessionManager = SessionManager.getInstance(); + + public void materializeCTE(Analysis analysis, MPPQueryContext context) { + analysis + .getNamedQueries() + .forEach( + (tableRef, query) -> { + Table table = tableRef.getNode(); + if (query.isMaterialized()) { + if (!query.isExecuted()) { + CteDataStore dataStore = + fetchCteQueryResult(context, table, query, analysis.getWith()); + query.setExecuted(true); + if (dataStore == null) { + // CTE query execution failed. Use inline instead of materialization + // in the outer query + query.setCteDataStore(null); + return; + } + query.setCteDataStore(dataStore); + } + context.addCteQuery(table, query); + } + }); + } + + public CteDataStore fetchCteQueryResult( + MPPQueryContext context, Table table, Query query, With with) { + final long queryId = sessionManager.requestQueryId(); + Throwable t = null; + CteDataStore cteDataStore = null; + long startTime = System.nanoTime(); + try { + Query q = query; + if (with != null) { + List tables = context.getTables(query); + List withQueries = + with.getQueries().stream() + .filter( + x -> + tables.contains(x.getName()) + && !x.getQuery().isMaterialized() + && !x.getQuery().isDone()) + .collect(Collectors.toList()); + + if (!withQueries.isEmpty()) { + With w = new With(with.getLocation().orElse(null), with.isRecursive(), withQueries); + q = + new Query( + Optional.of(w), + query.getQueryBody(), + query.getFill(), + query.getOrderBy(), + query.getOffset(), + query.getLimit()); + } + } + final ExecutionResult executionResult = + coordinator.executeForTableModel( + q, + new SqlParser(), + sessionManager.getCurrSession(), + queryId, + sessionManager.getSessionInfoOfTableModel(sessionManager.getCurrSession()), + String.format("Materialize query for CTE '%s'", table.getName()), + LocalExecutionPlanner.getInstance().metadata, + context.getCteQueries(), + context.getExplainType(), + context.getTimeOut(), + false); + if (executionResult.status.getCode() != TSStatusCode.SUCCESS_STATUS.getStatusCode()) { + return null; + } + // query execution + QueryExecution execution = (QueryExecution) coordinator.getQueryExecution(queryId); + + // get table schema + DatasetHeader datasetHeader = coordinator.getQueryExecution(queryId).getDatasetHeader(); + TableSchema tableSchema = getTableSchema(datasetHeader, table.getName().toString()); + + cteDataStore = + new CteDataStore(tableSchema, datasetHeader.getColumnIndex2TsBlockColumnIndexList()); + while (execution.hasNextResult()) { + final Optional tsBlock; + try { + tsBlock = execution.getBatchResult(); + } catch (final IoTDBException e) { + LOGGER.warn("Fail to materialize CTE because {}", e.getMessage()); + return null; + } + if (!tsBlock.isPresent() || tsBlock.get().isEmpty()) { + continue; + } + context.reserveMemoryForFrontEnd(tsBlock.get().getRetainedSizeInBytes()); + if (!cteDataStore.addTsBlock(tsBlock.get())) { + LOGGER.warn( + "Fail to materialize CTE because the data size exceeded memory or the row count threshold"); + if (context.isExplainAnalyze()) { + handleCteExplainAnalyzeResults( + context, + queryId, + table, + "!!! Failed to materialize CTE. The main query falls back to INLINE mode !!!"); + } + context.releaseMemoryReservedForFrontEnd(cteDataStore.ramBytesUsed()); + cteDataStore.clear(); + return null; + } + } + + if (context.isExplainAnalyze()) { + handleCteExplainAnalyzeResults(context, queryId, table, null); + } else if (context.isExplain()) { + handleCteExplainResults(context, queryId, table); + } + + return cteDataStore; + } catch (final Throwable throwable) { + if (cteDataStore != null) { + context.releaseMemoryReservedForFrontEnd(cteDataStore.ramBytesUsed()); + cteDataStore.clear(); + } + t = throwable; + } finally { + long cost = System.nanoTime() - startTime; + context.addCteMaterializationCost(table, cost); + coordinator.cleanupQueryExecution(queryId, null, t); + } + return null; + } + + private TableSchema getTableSchema(DatasetHeader datasetHeader, String cteName) { + final List columnNames = datasetHeader.getRespColumns(); + final List columnDataTypes = datasetHeader.getRespDataTypes(); + if (columnNames.size() != columnDataTypes.size()) { + throw new IoTDBRuntimeException( + "Size of column names and column data types do not match", + TSStatusCode.INTERNAL_SERVER_ERROR.getStatusCode()); + } + + List columnIndex2TsBlockColumnIndexList = + datasetHeader.getColumnIndex2TsBlockColumnIndexList(); + if (columnIndex2TsBlockColumnIndexList == null) { + columnIndex2TsBlockColumnIndexList = + IntStream.range(0, columnNames.size()).boxed().collect(Collectors.toList()); + } + + // Get original column indices in the TsBlock + List tsBlockColumnIndices = + adjustColumnIndexMapping(columnIndex2TsBlockColumnIndexList); + + // build column schema list of cte table based on sorted original indices + final List columnSchemaList = + tsBlockColumnIndices.stream() + .map( + index -> + new ColumnSchema( + columnNames.get(index), + TypeFactory.getType(columnDataTypes.get(index)), + false, + TsTableColumnCategory.FIELD)) + .collect(Collectors.toList()); + return new TableSchema(cteName, columnSchemaList); + } + + /** + * Adjust column index mapping by sorting and preserving original indices. For example, if input + * is {0, 3, 1, 2}, the output will be {0, 2, 3, 1}. This method doesn't modify the original list. + * + * @param originalIndexList original column index list + * @return adjusted column index list with sorted values preserving original positions + */ + private List adjustColumnIndexMapping(List originalIndexList) { + if (originalIndexList == null || originalIndexList.isEmpty()) { + return originalIndexList; + } + + boolean isSequential = true; + for (int i = 0; i < originalIndexList.size(); i++) { + if (originalIndexList.get(i) != i) { + isSequential = false; + break; + } + } + if (isSequential) { + return originalIndexList; + } + + // Create LinkedHashMap to maintain value-position mapping + Map valueToPositionMap = new LinkedHashMap<>(); + IntStream.range(0, originalIndexList.size()) + .forEach(i -> valueToPositionMap.put(originalIndexList.get(i), i)); + + // Sort by key (value) and collect positions in sorted order + return valueToPositionMap.entrySet().stream() + .sorted(Map.Entry.comparingByKey()) + .map(Map.Entry::getValue) + .collect(Collectors.toList()); + } + + private List getCteExplainAnalyzeLines( + FragmentInstanceStatisticsDrawer fragmentInstanceStatisticsDrawer, + List instances, + boolean verbose) + throws FragmentInstanceFetchException { + if (instances == null || instances.isEmpty()) { + return ImmutableList.of(); + } + + IClientManager clientManager = + coordinator.getInternalServiceClientManager(); + Map allStatistics = + QueryStatisticsFetcher.fetchAllStatistics(instances, clientManager); + List statisticLines = + fragmentInstanceStatisticsDrawer.renderFragmentInstances(instances, allStatistics, verbose); + return statisticLines.stream().map(StatisticLine::getValue).collect(Collectors.toList()); + } + + private void handleCteExplainAnalyzeResults( + MPPQueryContext context, long queryId, Table table, String warnMessage) { + QueryExecution execution = (QueryExecution) coordinator.getQueryExecution(queryId); + DistributedQueryPlan distributedQueryPlan = execution.getDistributedPlan(); + if (distributedQueryPlan == null) { + context.addCteExplainResult(table, new Pair<>(0, ImmutableList.of())); + return; + } + + MPPQueryContext cteContext = execution.getContext(); + FragmentInstanceStatisticsDrawer fragmentInstanceStatisticsDrawer = + new FragmentInstanceStatisticsDrawer(); + fragmentInstanceStatisticsDrawer.renderPlanStatistics(cteContext); + fragmentInstanceStatisticsDrawer.renderDispatchCost(cteContext); + + try { + List lines = + getCteExplainAnalyzeLines( + fragmentInstanceStatisticsDrawer, + distributedQueryPlan.getInstances(), + context.isVerbose()); + int maxLineLength = fragmentInstanceStatisticsDrawer.getMaxLineLength(); + if (warnMessage != null) { + lines.add(warnMessage); + maxLineLength = Math.max(maxLineLength, warnMessage.length()); + } + context.addCteExplainResult(table, new Pair<>(maxLineLength, lines)); + } catch (FragmentInstanceFetchException e) { + throw new IoTDBRuntimeException( + "Failed to fetch fragment instance statistics", + TSStatusCode.INTERNAL_SERVER_ERROR.getStatusCode()); + } + } + + private void handleCteExplainResults(MPPQueryContext context, long queryId, Table table) { + QueryExecution execution = (QueryExecution) coordinator.getQueryExecution(queryId); + DistributedQueryPlan distributedQueryPlan = execution.getDistributedPlan(); + if (distributedQueryPlan == null) { + context.addCteExplainResult(table, new Pair<>(0, ImmutableList.of())); + return; + } + + List lines = distributedQueryPlan.getPlanText(); + context.addCteExplainResult(table, new Pair<>(-1, lines)); + } + + private static class CteMaterializerHolder { + private static CteMaterializer INSTANCE = new CteMaterializer(); + + private CteMaterializerHolder() { + // Empty constructor + } + } + + public static CteMaterializer getInstance() { + return CteMaterializerHolder.INSTANCE; + } + + public static void setInstance(CteMaterializer instance) { + CteMaterializerHolder.INSTANCE = instance; + } +} diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/QueryPlanner.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/QueryPlanner.java index 106b406257f6..071a6a05da67 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/QueryPlanner.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/QueryPlanner.java @@ -30,6 +30,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.analyzer.NodeRef; import org.apache.iotdb.db.queryengine.plan.relational.analyzer.RelationType; import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.GapFillStartAndEndTimeExtractVisitor; +import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.PredicateWithUncorrelatedScalarSubqueryReconstructor; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode.Aggregation; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.FilterNode; @@ -124,6 +125,9 @@ public class QueryPlanner { private final Optional outerContext; private final Map, RelationPlan> recursiveSubqueries; + private final PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor; + // private final Map, Symbol> lambdaDeclarationToSymbolMap; // private final SubqueryPlanner subqueryPlanner; @@ -133,13 +137,18 @@ public QueryPlanner( MPPQueryContext queryContext, Optional outerContext, SessionInfo session, - Map, RelationPlan> recursiveSubqueries) { + Map, RelationPlan> recursiveSubqueries, + PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor) { requireNonNull(analysis, "analysis is null"); requireNonNull(symbolAllocator, "symbolAllocator is null"); requireNonNull(queryContext, "queryContext is null"); requireNonNull(outerContext, "outerContext is null"); requireNonNull(session, "session is null"); requireNonNull(recursiveSubqueries, "recursiveSubqueries is null"); + requireNonNull( + predicateWithUncorrelatedScalarSubqueryReconstructor, + "predicateWithUncorrelatedScalarSubqueryReconstructor is null"); this.analysis = analysis; this.symbolAllocator = symbolAllocator; @@ -149,8 +158,16 @@ public QueryPlanner( this.outerContext = outerContext; this.subqueryPlanner = new SubqueryPlanner( - analysis, symbolAllocator, queryContext, outerContext, session, recursiveSubqueries); + analysis, + symbolAllocator, + queryContext, + outerContext, + session, + recursiveSubqueries, + predicateWithUncorrelatedScalarSubqueryReconstructor); this.recursiveSubqueries = recursiveSubqueries; + this.predicateWithUncorrelatedScalarSubqueryReconstructor = + predicateWithUncorrelatedScalarSubqueryReconstructor; } public RelationPlan plan(Query query) { @@ -296,6 +313,9 @@ public RelationPlan plan(QuerySpecification node) { builder = limit(builder, node.getLimit(), orderingScheme); builder = builder.appendProjections(outputs, symbolAllocator, queryContext); + for (Expression expr : expressions) { + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(expr); + } return new RelationPlan( builder.getRoot(), analysis.getScope(node), computeOutputs(builder, outputs), outerContext); @@ -350,6 +370,9 @@ private PlanBuilder planWindowFunctions( subPlan = subqueryPlanner.handleSubqueries(subPlan, inputs, analysis.getSubqueries(node)); subPlan = subPlan.appendProjections(inputs, symbolAllocator, queryContext); + for (Expression input : inputs) { + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(input); + } // Add projection to coerce inputs to their site-specific types. // This is important because the same lexical expression may need to be coerced @@ -734,7 +757,13 @@ private static List computeOutputs( private PlanBuilder planQueryBody(QueryBody queryBody) { RelationPlan relationPlan = new RelationPlanner( - analysis, symbolAllocator, queryContext, outerContext, session, recursiveSubqueries) + analysis, + symbolAllocator, + queryContext, + outerContext, + session, + recursiveSubqueries, + predicateWithUncorrelatedScalarSubqueryReconstructor) .process(queryBody, null); return newPlanBuilder(relationPlan, analysis); @@ -749,7 +778,8 @@ private PlanBuilder planFrom(QuerySpecification node) { queryContext, outerContext, session, - recursiveSubqueries) + recursiveSubqueries, + predicateWithUncorrelatedScalarSubqueryReconstructor) .process(node.getFrom().orElse(null), null); return newPlanBuilder(relationPlan, analysis); } else { @@ -763,9 +793,12 @@ private PlanBuilder filter(PlanBuilder subPlan, Expression predicate, Node node) } subPlan = subqueryPlanner.handleSubqueries(subPlan, predicate, analysis.getSubqueries(node)); - return subPlan.withNewRoot( - new FilterNode( - queryIdAllocator.genPlanNodeId(), subPlan.getRoot(), subPlan.rewrite(predicate))); + PlanBuilder planBuilder = + subPlan.withNewRoot( + new FilterNode( + queryIdAllocator.genPlanNodeId(), subPlan.getRoot(), subPlan.rewrite(predicate))); + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(predicate); + return planBuilder; } private PlanBuilder aggregate(PlanBuilder subPlan, QuerySpecification node) { @@ -801,6 +834,9 @@ private PlanBuilder aggregate(PlanBuilder subPlan, QuerySpecification node) { List inputs = inputBuilder.build(); subPlan = subqueryPlanner.handleSubqueries(subPlan, inputs, analysis.getSubqueries(node)); subPlan = subPlan.appendProjections(inputs, symbolAllocator, queryContext); + for (Expression input : inputs) { + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(input); + } Function rewrite = subPlan.getTranslations()::rewrite; diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/RelationPlanner.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/RelationPlanner.java index 9732f8221d63..d442d51a2141 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/RelationPlanner.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/RelationPlanner.java @@ -54,6 +54,8 @@ import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableMetadataImpl; import org.apache.iotdb.db.queryengine.plan.relational.metadata.TreeDeviceViewSchema; import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.IrUtils; +import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.PredicateWithUncorrelatedScalarSubqueryReconstructor; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExceptNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.FilterNode; @@ -125,6 +127,7 @@ import org.apache.iotdb.db.queryengine.plan.statement.crud.InsertRowStatement; import org.apache.iotdb.db.queryengine.plan.statement.crud.InsertRowsStatement; import org.apache.iotdb.db.queryengine.plan.statement.crud.InsertTabletStatement; +import org.apache.iotdb.db.utils.cte.CteDataStore; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; @@ -187,19 +190,27 @@ public class RelationPlanner extends AstVisitor { private final SubqueryPlanner subqueryPlanner; private final Map, RelationPlan> recursiveSubqueries; + private final PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor; + public RelationPlanner( final Analysis analysis, final SymbolAllocator symbolAllocator, final MPPQueryContext queryContext, final Optional outerContext, final SessionInfo sessionInfo, - final Map, RelationPlan> recursiveSubqueries) { + final Map, RelationPlan> recursiveSubqueries, + PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor) { requireNonNull(analysis, "analysis is null"); requireNonNull(symbolAllocator, "symbolAllocator is null"); requireNonNull(queryContext, "queryContext is null"); requireNonNull(outerContext, "outerContext is null"); requireNonNull(sessionInfo, "session is null"); requireNonNull(recursiveSubqueries, "recursiveSubqueries is null"); + requireNonNull( + predicateWithUncorrelatedScalarSubqueryReconstructor, + "predicateWithUncorrelatedScalarSubqueryReconstructor is null"); this.analysis = analysis; this.symbolAllocator = symbolAllocator; @@ -207,6 +218,8 @@ public RelationPlanner( this.idAllocator = queryContext.getQueryId(); this.outerContext = outerContext; this.sessionInfo = sessionInfo; + this.predicateWithUncorrelatedScalarSubqueryReconstructor = + predicateWithUncorrelatedScalarSubqueryReconstructor; this.subqueryPlanner = new SubqueryPlanner( analysis, @@ -214,14 +227,21 @@ public RelationPlanner( queryContext, outerContext, sessionInfo, - recursiveSubqueries); + recursiveSubqueries, + predicateWithUncorrelatedScalarSubqueryReconstructor); this.recursiveSubqueries = recursiveSubqueries; } @Override protected RelationPlan visitQuery(final Query node, final Void context) { return new QueryPlanner( - analysis, symbolAllocator, queryContext, outerContext, sessionInfo, recursiveSubqueries) + analysis, + symbolAllocator, + queryContext, + outerContext, + sessionInfo, + recursiveSubqueries, + predicateWithUncorrelatedScalarSubqueryReconstructor) .plan(node); } @@ -238,33 +258,86 @@ protected RelationPlan visitTable(final Table table, final Void context) { } final Scope scope = analysis.getScope(table); + final Query namedQuery = analysis.getNamedQuery(table); // Common Table Expression - final Query namedQuery = analysis.getNamedQuery(table); if (namedQuery != null) { - RelationPlan subPlan; - if (analysis.isExpandableQuery(namedQuery)) { - // recursive cte - throw new SemanticException("unexpected recursive cte"); - } else { - subPlan = process(namedQuery, null); + return processNamedQuery(table, namedQuery, scope); + } + + return processPhysicalTable(table, scope); + } + + private RelationPlan processNamedQuery(Table table, Query namedQuery, Scope scope) { + if (analysis.isExpandableQuery(namedQuery)) { + throw new SemanticException("unexpected recursive cte"); + } + + if (namedQuery.isMaterialized() && namedQuery.isDone()) { + RelationPlan materializedCtePlan = processMaterializedCte(table, namedQuery, scope); + if (materializedCtePlan != null) { + return materializedCtePlan; } + } - // Add implicit coercions if view query produces types that don't match the declared output - // types of the view (e.g., if the underlying tables referenced by the view changed) - List types = - analysis.getOutputDescriptor(table).getAllFields().stream() - .map(Field::getType) - .collect(toImmutableList()); + return processRegularCte(table, namedQuery, scope); + } + + private RelationPlan processMaterializedCte(Table table, Query query, Scope scope) { + CteDataStore dataStore = query.getCteDataStore(); + if (dataStore == null) { + return null; + } + + List cteSymbols = + dataStore.getTableSchema().getColumns().stream() + .map(column -> symbolAllocator.newSymbol(column.getName(), column.getType())) + .collect(Collectors.toList()); + + CteScanNode cteScanNode = + new CteScanNode(idAllocator.genPlanNodeId(), table.getName(), cteSymbols, dataStore); - NodeAndMappings coerced = coerce(subPlan, types, symbolAllocator, idAllocator); - return new RelationPlan(coerced.getNode(), scope, coerced.getFields(), outerContext); + List columnIndex2TsBlockColumnIndexList = + dataStore.getColumnIndex2TsBlockColumnIndexList(); + if (columnIndex2TsBlockColumnIndexList == null) { + return new RelationPlan(cteScanNode, scope, cteSymbols, outerContext); } + List outputSymbols = new ArrayList<>(); + Assignments.Builder assignments = Assignments.builder(); + for (int index : columnIndex2TsBlockColumnIndexList) { + Symbol columnSymbol = cteSymbols.get(index); + outputSymbols.add(columnSymbol); + assignments.put(columnSymbol, columnSymbol.toSymbolReference()); + } + + // Project Node + ProjectNode projectNode = + new ProjectNode( + queryContext.getQueryId().genPlanNodeId(), cteScanNode, assignments.build()); + + return new RelationPlan(projectNode, scope, outputSymbols, outerContext); + } + + private RelationPlan processRegularCte(Table table, Query namedQuery, Scope scope) { + RelationPlan subPlan = process(namedQuery, null); + // Add implicit coercions if view query produces types that don't match the declared output + // types of the view (e.g., if the underlying tables referenced by the view changed) + List types = + analysis.getOutputDescriptor(table).getAllFields().stream() + .map(Field::getType) + .collect(toImmutableList()); + + NodeAndMappings coerced = coerce(subPlan, types, symbolAllocator, idAllocator); + return new RelationPlan(coerced.getNode(), scope, coerced.getFields(), outerContext); + } + + private RelationPlan processPhysicalTable(Table table, Scope scope) { final ImmutableList.Builder outputSymbolsBuilder = ImmutableList.builder(); final ImmutableMap.Builder symbolToColumnSchema = ImmutableMap.builder(); final Collection fields = scope.getRelationType().getAllFields(); final QualifiedName qualifiedName = analysis.getRelationName(table); + if (!qualifiedName.getPrefix().isPresent()) { throw new IllegalStateException("Table " + table.getName() + " has no prefix!"); } @@ -292,7 +365,6 @@ protected RelationPlan visitTable(final Table table, final Void context) { } final List outputSymbols = outputSymbolsBuilder.build(); - final Map tableColumnSchema = symbolToColumnSchema.build(); analysis.addTableSchema(qualifiedObjectName, tableColumnSchema); @@ -324,19 +396,19 @@ protected RelationPlan visitTable(final Table table, final Void context) { tableColumnSchema, tagAndAttributeIndexMap); return new RelationPlan(tableScanNode, scope, outputSymbols, outerContext); - - // Collection fields = analysis.getMaterializedViewStorageTableFields(node); - // Query namedQuery = analysis.getNamedQuery(node); - // Collection fields = analysis.getMaterializedViewStorageTableFields(node); - // plan = addRowFilters(node, plan); - // plan = addColumnMasks(node, plan); } @Override protected RelationPlan visitQuerySpecification( final QuerySpecification node, final Void context) { return new QueryPlanner( - analysis, symbolAllocator, queryContext, outerContext, sessionInfo, recursiveSubqueries) + analysis, + symbolAllocator, + queryContext, + outerContext, + sessionInfo, + recursiveSubqueries, + predicateWithUncorrelatedScalarSubqueryReconstructor) .plan(node); } @@ -863,11 +935,10 @@ protected RelationPlan visitPatternRecognitionRelation( .forEach(outputLayout::add); } + List expressions = + extractPatternRecognitionExpressions(node.getVariableDefinitions(), node.getMeasures()); planBuilder = - subqueryPlanner.handleSubqueries( - planBuilder, - extractPatternRecognitionExpressions(node.getVariableDefinitions(), node.getMeasures()), - analysis.getSubqueries(node)); + subqueryPlanner.handleSubqueries(planBuilder, expressions, analysis.getSubqueries(node)); PatternRecognitionComponents components = planPatternRecognitionComponents( @@ -880,6 +951,10 @@ protected RelationPlan visitPatternRecognitionRelation( outputLayout.addAll(components.getMeasureOutputs()); + for (Expression expr : expressions) { + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(expr); + } + if (!oneRowOutput) { Set inputSymbolsOnOutput = ImmutableSet.copyOf(outputLayout.build()); subPlan.getFieldMappings().stream() diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/SubqueryPlanner.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/SubqueryPlanner.java index ebc32b6a247e..1484a47d5afe 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/SubqueryPlanner.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/SubqueryPlanner.java @@ -29,6 +29,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.analyzer.RelationType; import org.apache.iotdb.db.queryengine.plan.relational.analyzer.Scope; import org.apache.iotdb.db.queryengine.plan.relational.planner.QueryPlanner.PlanAndMappings; +import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.PredicateWithUncorrelatedScalarSubqueryReconstructor; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ApplyNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CorrelatedJoinNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.EnforceSingleRowNode; @@ -80,6 +81,8 @@ class SubqueryPlanner { private final MPPQueryContext plannerContext; private final SessionInfo session; private final Map, RelationPlan> recursiveSubqueries; + private final PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor; SubqueryPlanner( Analysis analysis, @@ -87,13 +90,18 @@ class SubqueryPlanner { MPPQueryContext plannerContext, Optional outerContext, SessionInfo session, - Map, RelationPlan> recursiveSubqueries) { + Map, RelationPlan> recursiveSubqueries, + PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor) { requireNonNull(analysis, "analysis is null"); requireNonNull(symbolAllocator, "symbolAllocator is null"); requireNonNull(plannerContext, "plannerContext is null"); requireNonNull(outerContext, "outerContext is null"); requireNonNull(session, "session is null"); requireNonNull(recursiveSubqueries, "recursiveSubqueries is null"); + requireNonNull( + predicateWithUncorrelatedScalarSubqueryReconstructor, + "predicateWithUncorrelatedScalarSubqueryReconstructor is null"); this.analysis = analysis; this.symbolAllocator = symbolAllocator; @@ -101,6 +109,8 @@ class SubqueryPlanner { this.plannerContext = plannerContext; this.session = session; this.recursiveSubqueries = recursiveSubqueries; + this.predicateWithUncorrelatedScalarSubqueryReconstructor = + predicateWithUncorrelatedScalarSubqueryReconstructor; } public PlanBuilder handleSubqueries( @@ -125,6 +135,9 @@ public PlanBuilder handleSubqueries( List scalarSubqueries = subqueries.getSubqueries(); if (!scalarSubqueries.isEmpty()) { + // try to execute un-correlated scalar subqueries in the predicate in advance to utilize + // predicate pushdown if possible + tryFoldUncorrelatedScalarSubqueryInPredicate(expression, plannerContext); for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, scalarSubqueries))) { builder = planScalarSubquery(builder, cluster); @@ -151,6 +164,12 @@ public PlanBuilder handleSubqueries( return builder; } + private void tryFoldUncorrelatedScalarSubqueryInPredicate( + Expression expression, MPPQueryContext context) { + predicateWithUncorrelatedScalarSubqueryReconstructor + .reconstructPredicateWithUncorrelatedScalarSubquery(context, analysis, expression); + } + /** * Find subqueries from the candidate set that are children of the given parent and that have not * already been handled in the subplan @@ -209,6 +228,7 @@ private PlanBuilder planInPredicate( subPlan = planInPredicate( subPlan, value, subquery, output, predicate, analysis.getPredicateCoercions(predicate)); + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(value); return new PlanBuilder( subPlan @@ -345,7 +365,8 @@ private RelationPlan planSubquery(Expression subquery, TranslationMap outerConte plannerContext, Optional.of(outerContext), session, - recursiveSubqueries) + recursiveSubqueries, + predicateWithUncorrelatedScalarSubqueryReconstructor) .process(subquery, null); } @@ -375,6 +396,7 @@ private PlanBuilder planQuantifiedComparison( subPlan = planQuantifiedComparison( subPlan, operator, quantifier, value, subquery, output, predicateCoercions); + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(value); return new PlanBuilder( subPlan .getTranslations() @@ -389,6 +411,7 @@ private PlanBuilder planQuantifiedComparison( subPlan = planInPredicate( subPlan, value, subquery, output, quantifiedComparison, predicateCoercions); + predicateWithUncorrelatedScalarSubqueryReconstructor.clearShadowExpression(value); return new PlanBuilder( subPlan .getTranslations() diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/TableLogicalPlanner.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/TableLogicalPlanner.java index f69d7cd2cebb..2d7ed174d2ad 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/TableLogicalPlanner.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/TableLogicalPlanner.java @@ -50,6 +50,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.metadata.QualifiedObjectName; import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableMetadataImpl; import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableSchema; +import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.PredicateWithUncorrelatedScalarSubqueryReconstructor; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExplainAnalyzeNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.FilterNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.IntoNode; @@ -115,6 +116,9 @@ public class TableLogicalPlanner { private final Metadata metadata; private final WarningCollector warningCollector; + private PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor; + @TestOnly public TableLogicalPlanner( MPPQueryContext queryContext, @@ -145,6 +149,16 @@ public TableLogicalPlanner( this.symbolAllocator = requireNonNull(symbolAllocator, "symbolAllocator is null"); this.warningCollector = requireNonNull(warningCollector, "warningCollector is null"); this.planOptimizers = planOptimizers; + this.predicateWithUncorrelatedScalarSubqueryReconstructor = + new PredicateWithUncorrelatedScalarSubqueryReconstructor(); + } + + @TestOnly + public void setPredicateWithUncorrelatedScalarSubqueryReconstructor( + PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor) { + this.predicateWithUncorrelatedScalarSubqueryReconstructor = + predicateWithUncorrelatedScalarSubqueryReconstructor; } public LogicalQueryPlan plan(final Analysis analysis) { @@ -316,7 +330,7 @@ private PlanNode createOutputPlan(RelationPlan plan, Analysis analysis) { int columnNumber = 0; // TODO perfect the logic of outputDescriptor - if (queryContext.isExplainAnalyze()) { + if (queryContext.isExplainAnalyze() && !queryContext.isInnerTriggeredQuery()) { outputs.add(new Symbol(ColumnHeaderConstant.EXPLAIN_ANALYZE)); names.add(ColumnHeaderConstant.EXPLAIN_ANALYZE); columnHeaders.add(new ColumnHeader(ColumnHeaderConstant.EXPLAIN_ANALYZE, TSDataType.TEXT)); @@ -372,6 +386,10 @@ private RelationPlan createRelationPlan(Analysis analysis, PipeEnriched pipeEnri } private RelationPlan createRelationPlan(Analysis analysis, Query query) { + // materialize cte if needed + if (!queryContext.isInnerTriggeredQuery()) { + CteMaterializer.getInstance().materializeCTE(analysis, queryContext); + } return getRelationPlanner(analysis).process(query, null); } @@ -385,7 +403,13 @@ private RelationPlan createRelationPlan(Analysis analysis, Delete statement) { private RelationPlanner getRelationPlanner(Analysis analysis) { return new RelationPlanner( - analysis, symbolAllocator, queryContext, Optional.empty(), sessionInfo, ImmutableMap.of()); + analysis, + symbolAllocator, + queryContext, + Optional.empty(), + sessionInfo, + ImmutableMap.of(), + predicateWithUncorrelatedScalarSubqueryReconstructor); } private PlanNode planCreateOrUpdateDevice( diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/AddExchangeNodes.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/AddExchangeNodes.java index 9cef48070285..a1c26e1b7017 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/AddExchangeNodes.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/AddExchangeNodes.java @@ -27,6 +27,7 @@ import org.apache.iotdb.db.queryengine.plan.planner.plan.node.WritePlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.TableDeviceSourceNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CollectNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExchangeNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExplainAnalyzeNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TableFunctionProcessorNode; @@ -100,6 +101,14 @@ public PlanNode visitTableScan( return node; } + @Override + public PlanNode visitCteScan( + CteScanNode node, TableDistributedPlanGenerator.PlanContext context) { + context.nodeDistributionMap.put( + node.getPlanNodeId(), new NodeDistribution(NO_CHILD, DataPartition.NOT_ASSIGNED)); + return node; + } + @Override public PlanNode visitTableDeviceFetch( final TableDeviceFetchNode node, final TableDistributedPlanGenerator.PlanContext context) { diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/TableDistributedPlanner.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/TableDistributedPlanner.java index cb474d39470f..ff7686fe8689 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/TableDistributedPlanner.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/distribute/TableDistributedPlanner.java @@ -29,6 +29,7 @@ import org.apache.iotdb.db.queryengine.plan.planner.plan.FragmentInstance; import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.SubPlan; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanGraphPrinter; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeId; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.WritePlanNode; @@ -101,6 +102,14 @@ public DistributedQueryPlan plan() { TableDistributedPlanGenerator.PlanContext planContext = new TableDistributedPlanGenerator.PlanContext(); PlanNode outputNodeWithExchange = generateDistributedPlanWithOptimize(planContext); + List planText = null; + if (mppQueryContext.isExplain() && mppQueryContext.isInnerTriggeredQuery()) { + planText = + outputNodeWithExchange.accept( + new PlanGraphPrinter(), + new PlanGraphPrinter.GraphContext( + mppQueryContext.getTypeProvider().getTemplatedInfo())); + } if (analysis.isQuery()) { analysis @@ -110,7 +119,13 @@ public DistributedQueryPlan plan() { adjustUpStream(outputNodeWithExchange, planContext); - return generateDistributedPlan(outputNodeWithExchange, planContext.nodeDistributionMap); + DistributedQueryPlan distributedPlan = + generateDistributedPlan(outputNodeWithExchange, planContext.nodeDistributionMap); + if (planText != null) { + distributedPlan.addPlanText(planText); + } + + return distributedPlan; } public PlanNode generateDistributedPlanWithOptimize( diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/ir/PredicateWithUncorrelatedScalarSubqueryReconstructor.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/ir/PredicateWithUncorrelatedScalarSubqueryReconstructor.java new file mode 100644 index 000000000000..bce197c2681a --- /dev/null +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/ir/PredicateWithUncorrelatedScalarSubqueryReconstructor.java @@ -0,0 +1,236 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.iotdb.db.queryengine.plan.relational.planner.ir; + +import org.apache.iotdb.commons.exception.IoTDBException; +import org.apache.iotdb.db.protocol.session.SessionManager; +import org.apache.iotdb.db.queryengine.common.MPPQueryContext; +import org.apache.iotdb.db.queryengine.common.MPPQueryContext.ExplainType; +import org.apache.iotdb.db.queryengine.common.header.DatasetHeader; +import org.apache.iotdb.db.queryengine.plan.Coordinator; +import org.apache.iotdb.db.queryengine.plan.execution.ExecutionResult; +import org.apache.iotdb.db.queryengine.plan.planner.LocalExecutionPlanner; +import org.apache.iotdb.db.queryengine.plan.relational.analyzer.Analysis; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.BinaryLiteral; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.BooleanLiteral; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.DereferenceExpression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.DoubleLiteral; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Expression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.FunctionCall; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Identifier; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Literal; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.LogicalExpression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.LongLiteral; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.NotExpression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Query; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.StringLiteral; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.SubqueryExpression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.With; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.WithQuery; +import org.apache.iotdb.db.queryengine.plan.relational.sql.parser.SqlParser; +import org.apache.iotdb.rpc.TSStatusCode; + +import org.apache.tsfile.block.column.Column; +import org.apache.tsfile.enums.TSDataType; +import org.apache.tsfile.read.common.block.TsBlock; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkArgument; + +public class PredicateWithUncorrelatedScalarSubqueryReconstructor { + + private static final Coordinator coordinator = Coordinator.getInstance(); + + public PredicateWithUncorrelatedScalarSubqueryReconstructor() {} + + public void reconstructPredicateWithUncorrelatedScalarSubquery( + MPPQueryContext context, Analysis analysis, Expression expression) { + if (expression instanceof LogicalExpression) { + LogicalExpression logicalExpression = (LogicalExpression) expression; + for (Expression term : logicalExpression.getTerms()) { + reconstructPredicateWithUncorrelatedScalarSubquery(context, analysis, term); + } + } else if (expression instanceof NotExpression) { + NotExpression notExpression = (NotExpression) expression; + reconstructPredicateWithUncorrelatedScalarSubquery( + context, analysis, notExpression.getValue()); + } else if (expression instanceof ComparisonExpression) { + ComparisonExpression comparisonExpression = (ComparisonExpression) expression; + Expression left = comparisonExpression.getLeft(); + Expression right = comparisonExpression.getRight(); + if ((left instanceof Identifier + || left instanceof FunctionCall + || left instanceof DereferenceExpression) + && right instanceof SubqueryExpression) { + Optional result = + fetchUncorrelatedSubqueryResultForPredicate( + context, analysis.getSqlParser(), (SubqueryExpression) right, analysis.getWith()); + // If the subquery result is not present, we cannot reconstruct the predicate. + result.ifPresent(comparisonExpression::setShadowRight); + } else if ((right instanceof Identifier + || right instanceof FunctionCall + || right instanceof DereferenceExpression) + && left instanceof SubqueryExpression) { + Optional result = + fetchUncorrelatedSubqueryResultForPredicate( + context, analysis.getSqlParser(), (SubqueryExpression) left, analysis.getWith()); + result.ifPresent(comparisonExpression::setShadowLeft); + } + } + } + + /** + * @return an Optional containing the result of the uncorrelated scalar subquery. Returns + * Optional.empty() if the subquery cannot be executed in advance or if it does not return a + * valid result. + */ + public Optional fetchUncorrelatedSubqueryResultForPredicate( + MPPQueryContext context, + SqlParser relationSqlParser, + SubqueryExpression subqueryExpression, + With with) { + final long queryId = SessionManager.getInstance().requestQueryId(); + Throwable t = null; + + try { + Query query = subqueryExpression.getQuery(); + Query q = query; + if (with != null) { + List tables = context.getTables(query); + List withQueries = + with.getQueries().stream() + .filter( + x -> + tables.contains(x.getName()) + && !x.getQuery().isMaterialized() + && !x.getQuery().isDone()) + .collect(Collectors.toList()); + + if (!withQueries.isEmpty()) { + With w = new With(with.getLocation().orElse(null), with.isRecursive(), withQueries); + q = + new Query( + Optional.of(w), + query.getQueryBody(), + query.getFill(), + query.getOrderBy(), + query.getOffset(), + query.getLimit()); + } + } + final ExecutionResult executionResult = + coordinator.executeForTableModel( + q, + relationSqlParser, + SessionManager.getInstance().getCurrSession(), + queryId, + SessionManager.getInstance() + .getSessionInfoOfTableModel(SessionManager.getInstance().getCurrSession()), + "Try to Fetch Uncorrelated Scalar Subquery Result for Predicate", + LocalExecutionPlanner.getInstance().metadata, + context.getCteQueries(), + ExplainType.NONE, + context.getTimeOut(), + false); + + // This may occur when the subquery cannot be executed in advance (for example, with + // correlated scalar subqueries). + // Since we cannot determine the subquery's validity beforehand, we must submit the subquery. + // This approach may slow down filter involving correlated scalar subqueries. + if (executionResult.status.getCode() != TSStatusCode.SUCCESS_STATUS.getStatusCode()) { + return Optional.empty(); + } + + while (coordinator.getQueryExecution(queryId).hasNextResult()) { + final Optional tsBlock; + try { + tsBlock = coordinator.getQueryExecution(queryId).getBatchResult(); + } catch (final IoTDBException e) { + t = e; + throw new RuntimeException("Failed to Fetch Subquery Result.", e); + } + if (!tsBlock.isPresent() || tsBlock.get().isEmpty()) { + continue; + } + final Column[] columns = tsBlock.get().getValueColumns(); + checkArgument(columns.length == 1, "Scalar Subquery result should only have one column."); + checkArgument( + tsBlock.get().getPositionCount() == 1 && !tsBlock.get().getColumn(0).isNull(0), + "Scalar Subquery result should only have one row."); + + // column type + DatasetHeader datasetHeader = coordinator.getQueryExecution(queryId).getDatasetHeader(); + List dataTypes = datasetHeader.getRespDataTypes(); + checkArgument(dataTypes.size() == 1, "Scalar Subquery result should only have one column."); + + switch (dataTypes.get(0)) { + case INT32: + case DATE: + return Optional.of(new LongLiteral(Long.toString(columns[0].getInt(0)))); + case INT64: + case TIMESTAMP: + return Optional.of(new LongLiteral(Long.toString(columns[0].getLong(0)))); + case FLOAT: + return Optional.of(new DoubleLiteral(Double.toString(columns[0].getFloat(0)))); + case DOUBLE: + return Optional.of(new DoubleLiteral(Double.toString(columns[0].getDouble(0)))); + case BOOLEAN: + return Optional.of(new BooleanLiteral(Boolean.toString(columns[0].getBoolean(0)))); + case BLOB: + return Optional.of(new BinaryLiteral(columns[0].getBinary(0).toString())); + case TEXT: + case STRING: + return Optional.of(new StringLiteral(columns[0].getBinary(0).toString())); + default: + throw new IllegalArgumentException( + String.format( + "Unsupported data type for scalar subquery result: %s", + columns[0].getDataType())); + } + } + } catch (final Throwable throwable) { + t = throwable; + } finally { + coordinator.cleanupQueryExecution(queryId, null, t); + } + return Optional.empty(); + } + + public void clearShadowExpression(Expression expression) { + if (expression instanceof LogicalExpression) { + LogicalExpression logicalExpression = (LogicalExpression) expression; + for (Expression term : logicalExpression.getTerms()) { + clearShadowExpression(term); + } + } else if (expression instanceof NotExpression) { + NotExpression notExpression = (NotExpression) expression; + clearShadowExpression(notExpression.getValue()); + } else if (expression instanceof ComparisonExpression) { + ComparisonExpression comparisonExpression = (ComparisonExpression) expression; + comparisonExpression.clearShadow(); + clearShadowExpression(comparisonExpression.getLeft()); + clearShadowExpression(comparisonExpression.getRight()); + } + } +} diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/node/CteScanNode.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/node/CteScanNode.java new file mode 100644 index 000000000000..48eb58c980d0 --- /dev/null +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/node/CteScanNode.java @@ -0,0 +1,132 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.queryengine.plan.relational.planner.node; + +import org.apache.iotdb.common.rpc.thrift.TRegionReplicaSet; +import org.apache.iotdb.db.queryengine.common.DataNodeEndPoints; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeId; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanVisitor; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.SourceNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.QualifiedName; +import org.apache.iotdb.db.utils.cte.CteDataStore; + +import com.google.common.collect.ImmutableList; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkArgument; + +public class CteScanNode extends SourceNode { + private final QualifiedName qualifiedName; + // Indicate the column this node need to output + private final List outputSymbols; + private final CteDataStore dataStore; + + public CteScanNode( + PlanNodeId id, + QualifiedName qualifiedName, + List outputSymbols, + CteDataStore dataStore) { + super(id); + this.qualifiedName = qualifiedName; + this.outputSymbols = outputSymbols; + this.dataStore = dataStore; + } + + public QualifiedName getQualifiedName() { + return qualifiedName; + } + + public CteDataStore getDataStore() { + return dataStore; + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitCteScan(this, context); + } + + @Override + public void open() throws Exception {} + + @Override + public void setRegionReplicaSet(TRegionReplicaSet regionReplicaSet) {} + + @Override + public void close() throws Exception {} + + @Override + public TRegionReplicaSet getRegionReplicaSet() { + return new TRegionReplicaSet( + null, ImmutableList.of(DataNodeEndPoints.getLocalDataNodeLocation())); + } + + @Override + public List getChildren() { + return ImmutableList.of(); + } + + @Override + public void addChild(PlanNode child) {} + + @Override + public PlanNode clone() { + return new CteScanNode(getPlanNodeId(), qualifiedName, outputSymbols, dataStore); + } + + @Override + public int allowedChildCount() { + return 0; + } + + @Override + public List getOutputColumnNames() { + return outputSymbols.stream().map(Symbol::getName).collect(Collectors.toList()); + } + + @Override + public List getOutputSymbols() { + return outputSymbols; + } + + @Override + public PlanNode replaceChildren(List newChildren) { + checkArgument(newChildren.isEmpty(), "newChildren is not empty"); + return this; + } + + @Override + protected void serializeAttributes(ByteBuffer byteBuffer) { + throw new UnsupportedOperationException(); + } + + @Override + protected void serializeAttributes(DataOutputStream stream) throws IOException { + throw new UnsupportedOperationException(); + } +} diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/PushLimitOffsetIntoTableScan.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/PushLimitOffsetIntoTableScan.java index 2bd9b778aec2..ce8a002a465d 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/PushLimitOffsetIntoTableScan.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/PushLimitOffsetIntoTableScan.java @@ -27,6 +27,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.OrderingScheme; import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.FilterNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.GapFillNode; @@ -237,6 +238,12 @@ public PlanNode visitDeviceTableScan(DeviceTableScanNode node, Context context) return node; } + @Override + public PlanNode visitCteScan(CteScanNode node, Context context) { + context.enablePushDown = false; + return node; + } + @Override public PlanNode visitInformationSchemaTableScan( InformationSchemaTableScanNode node, Context context) { diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/TransformSortToStreamSort.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/TransformSortToStreamSort.java index 3eb338a650cf..7eb6dfb81c97 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/TransformSortToStreamSort.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/TransformSortToStreamSort.java @@ -29,6 +29,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTableScanNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.GroupNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.InformationSchemaTableScanNode; @@ -131,6 +132,12 @@ public PlanNode visitGroup(GroupNode node, Context context) { return visitSingleChildProcess(node, context); } + @Override + public PlanNode visitCteScan(CteScanNode node, Context context) { + context.setCanTransform(false); + return node; + } + @Override public PlanNode visitDeviceTableScan(DeviceTableScanNode node, Context context) { context.setTableScanNode(node); diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/UnaliasSymbolReferences.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/UnaliasSymbolReferences.java index 3fdbec6b0ccf..1b56b6d1ee41 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/UnaliasSymbolReferences.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/planner/optimizations/UnaliasSymbolReferences.java @@ -34,6 +34,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ApplyNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AssignUniqueId; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CorrelatedJoinNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.EnforceSingleRowNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExceptNode; @@ -266,6 +267,12 @@ public PlanAndMappings visitInformationSchemaTableScan( mapping); } + @Override + public PlanAndMappings visitCteScan(CteScanNode node, UnaliasContext context) { + Map mapping = new HashMap<>(context.getCorrelationMapping()); + return new PlanAndMappings(node, mapping); + } + @Override public PlanAndMappings visitGapFill(GapFillNode node, UnaliasContext context) { PlanAndMappings rewrittenSource = node.getChild().accept(this, context); diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/ComparisonExpression.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/ComparisonExpression.java index c968ac7c2854..793543540784 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/ComparisonExpression.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/ComparisonExpression.java @@ -101,7 +101,9 @@ public Operator negate() { private final Operator operator; private final Expression left; + private Expression shadowLeft; private final Expression right; + private Expression shadowRight; public ComparisonExpression(Operator operator, Expression left, Expression right) { super(null); @@ -131,11 +133,11 @@ public Operator getOperator() { } public Expression getLeft() { - return left; + return shadowLeft != null ? shadowLeft : left; } public Expression getRight() { - return right; + return shadowRight != null ? shadowRight : right; } @Override @@ -148,6 +150,22 @@ public List getChildren() { return ImmutableList.of(left, right); } + // set by unfold of subquery + public void setShadowLeft(Expression shadowLeft) { + this.shadowLeft = shadowLeft; + } + + // set by unfold of subquery + public void setShadowRight(Expression shadowRight) { + this.shadowRight = shadowRight; + } + + // called after the stage is finished + public void clearShadow() { + this.shadowLeft = null; + this.shadowRight = null; + } + @Override public boolean equals(Object o) { if (this == o) { diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/LongLiteral.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/LongLiteral.java index a6597e9269ce..930714f1181e 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/LongLiteral.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/LongLiteral.java @@ -103,7 +103,7 @@ public boolean shallowEquals(Node other) { return parsedValue == ((LongLiteral) other).parsedValue; } - private static long parse(String value) { + public static long parse(String value) { value = value.replace("_", ""); if (value.startsWith("0x") || value.startsWith("0X")) { diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/Query.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/Query.java index c5df5f846a01..5bed50b17d89 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/Query.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/Query.java @@ -19,6 +19,8 @@ package org.apache.iotdb.db.queryengine.plan.relational.sql.ast; +import org.apache.iotdb.db.utils.cte.CteDataStore; + import com.google.common.collect.ImmutableList; import org.apache.tsfile.utils.RamUsageEstimator; @@ -40,6 +42,12 @@ public class Query extends Statement { private final Optional orderBy; private final Optional offset; private final Optional limit; + // whether this query needs materialization + private boolean materialized = false; + // whether this query has ever been executed + private boolean isExecuted = false; + // materialization has been executed successfully if cteDataStore is not null + private CteDataStore cteDataStore = null; public Query( Optional with, @@ -102,6 +110,34 @@ public Optional getLimit() { return limit; } + public boolean isMaterialized() { + return materialized; + } + + public void setMaterialized(boolean materialized) { + this.materialized = materialized; + } + + public boolean isExecuted() { + return isExecuted; + } + + public void setExecuted(boolean executed) { + isExecuted = executed; + } + + public boolean isDone() { + return cteDataStore != null; + } + + public void setCteDataStore(CteDataStore cteDataStore) { + this.cteDataStore = cteDataStore; + } + + public CteDataStore getCteDataStore() { + return this.cteDataStore; + } + @Override public R accept(AstVisitor visitor, C context) { return visitor.visitQuery(this, context); diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/WithQuery.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/WithQuery.java index ee215cb1bd98..3d7fb15e4f79 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/WithQuery.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/ast/WithQuery.java @@ -38,36 +38,46 @@ public class WithQuery extends Node { private final Identifier name; private final Query query; + private final boolean materialized; @Nullable private final List columnNames; - public WithQuery(Identifier name, Query query) { + public WithQuery(Identifier name, Query query, boolean materialized) { super(null); this.name = name; this.query = requireNonNull(query, "query is null"); this.columnNames = null; + this.materialized = materialized; } - public WithQuery(Identifier name, Query query, List columnNames) { + public WithQuery( + Identifier name, Query query, List columnNames, boolean materialized) { super(null); this.name = name; this.query = requireNonNull(query, "query is null"); this.columnNames = requireNonNull(columnNames, "columnNames is null"); + this.materialized = materialized; } - public WithQuery(NodeLocation location, Identifier name, Query query) { + public WithQuery(NodeLocation location, Identifier name, Query query, boolean materialized) { super(requireNonNull(location, "location is null")); this.name = name; this.query = requireNonNull(query, "query is null"); this.columnNames = null; + this.materialized = materialized; } public WithQuery( - NodeLocation location, Identifier name, Query query, List columnNames) { + NodeLocation location, + Identifier name, + Query query, + List columnNames, + boolean materialized) { super(requireNonNull(location, "location is null")); this.name = name; this.query = requireNonNull(query, "query is null"); this.columnNames = requireNonNull(columnNames, "columnNames is null"); + this.materialized = materialized; } public Identifier getName() { @@ -82,6 +92,10 @@ public Optional> getColumnNames() { return Optional.ofNullable(columnNames); } + public boolean isMaterialized() { + return materialized; + } + @Override public R accept(AstVisitor visitor, C context) { return visitor.visitWithQuery(this, context); @@ -98,13 +112,14 @@ public String toString() { .add("name", name) .add("query", query) .add("columnNames", columnNames) + .add("materialized", materialized) .omitNullValues() .toString(); } @Override public int hashCode() { - return Objects.hash(name, query, columnNames); + return Objects.hash(name, query, columnNames, materialized); } @Override @@ -118,7 +133,8 @@ public boolean equals(Object obj) { WithQuery o = (WithQuery) obj; return Objects.equals(name, o.name) && Objects.equals(query, o.query) - && Objects.equals(columnNames, o.columnNames); + && Objects.equals(columnNames, o.columnNames) + && Objects.equals(materialized, o.materialized); } @Override diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/parser/AstBuilder.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/parser/AstBuilder.java index b041c56b2bc9..937c05b7ac9d 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/parser/AstBuilder.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/relational/sql/parser/AstBuilder.java @@ -2097,10 +2097,17 @@ public Node visitNamedQuery(RelationalSqlParser.NamedQueryContext ctx) { if (ctx.columnAliases() != null) { List columns = visit(ctx.columnAliases().identifier(), Identifier.class); return new WithQuery( - getLocation(ctx), (Identifier) visit(ctx.name), (Query) visit(ctx.query()), columns); + getLocation(ctx), + (Identifier) visit(ctx.name), + (Query) visit(ctx.query()), + columns, + ctx.MATERIALIZED() != null); } else { return new WithQuery( - getLocation(ctx), (Identifier) visit(ctx.name), (Query) visit(ctx.query())); + getLocation(ctx), + (Identifier) visit(ctx.name), + (Query) visit(ctx.query()), + ctx.MATERIALIZED() != null); } } diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/statistics/FragmentInstanceStatisticsDrawer.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/statistics/FragmentInstanceStatisticsDrawer.java index 7a8b7097eb9b..4077552c0499 100644 --- a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/statistics/FragmentInstanceStatisticsDrawer.java +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/statistics/FragmentInstanceStatisticsDrawer.java @@ -30,6 +30,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; public class FragmentInstanceStatisticsDrawer { private int maxLineLength = 0; @@ -83,14 +84,21 @@ public List renderFragmentInstances( Map allStatistics, boolean verbose) { List table = new ArrayList<>(planHeader); - addLine( - table, 0, String.format("Fragment Instances Count: %s", instancesToBeRendered.size() - 1)); - for (FragmentInstance instance : instancesToBeRendered) { + List validInstances = + instancesToBeRendered.stream() + .filter( + instance -> { + TFetchFragmentInstanceStatisticsResp statistics = + allStatistics.get(instance.getId()); + return statistics != null && statistics.getDataRegion() != null; + }) + .collect(Collectors.toList()); + + addLine(table, 0, String.format("Fragment Instances Count: %s", validInstances.size())); + for (FragmentInstance instance : validInstances) { List singleFragmentInstanceArea = new ArrayList<>(); TFetchFragmentInstanceStatisticsResp statistics = allStatistics.get(instance.getId()); - if (statistics == null || statistics.getDataRegion() == null) { - continue; - } + addBlankLine(singleFragmentInstanceArea); addLine( singleFragmentInstanceArea, diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/CteDataReader.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/CteDataReader.java new file mode 100644 index 000000000000..10a9cc93d659 --- /dev/null +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/CteDataReader.java @@ -0,0 +1,52 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.utils.cte; + +import org.apache.iotdb.commons.exception.IoTDBException; + +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.utils.Accountable; + +public interface CteDataReader extends Accountable { + /** + * Check if there is more data in CteDataReader. DiskSpillerReader may run out of current TsBlocks + * , then it needs to read from file and cache more data. This method should be called before + * next() to ensure that there is data to read. + * + * @throws IoTDBException the error occurs when reading data from fileChannel + */ + boolean hasNext() throws IoTDBException; + + /** + * output the cached data in CteDataReader, it needs to be called after hasNext() returns true. + * + * @return next TsBlock + */ + TsBlock next() throws IoTDBException; + + /** + * Close the CteDataReader and release resources. + * + * @throws IoTDBException the error occurs when closing fileChannel + */ + void close() throws IoTDBException; +} diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/CteDataStore.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/CteDataStore.java new file mode 100644 index 000000000000..cb37759e2009 --- /dev/null +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/CteDataStore.java @@ -0,0 +1,109 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.utils.cte; + +import org.apache.iotdb.commons.utils.TestOnly; +import org.apache.iotdb.db.conf.IoTDBConfig; +import org.apache.iotdb.db.conf.IoTDBDescriptor; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableSchema; + +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.utils.Accountable; +import org.apache.tsfile.utils.RamUsageEstimator; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +public class CteDataStore implements Accountable { + private static final long INSTANCE_SIZE = + RamUsageEstimator.shallowSizeOfInstance(CteDataStore.class); + + private final TableSchema tableSchema; + private final List columnIndex2TsBlockColumnIndexList; + + private final List cachedData; + private long cachedBytes; + private int cachedRows; + + // reference count by CteScanReader + private final AtomicInteger count; + + public CteDataStore(TableSchema tableSchema, List columnIndex2TsBlockColumnIndexList) { + this.tableSchema = tableSchema; + this.columnIndex2TsBlockColumnIndexList = columnIndex2TsBlockColumnIndexList; + this.cachedData = new ArrayList<>(); + this.cachedBytes = 0L; + this.cachedRows = 0; + this.count = new AtomicInteger(0); + } + + public boolean addTsBlock(TsBlock tsBlock) { + IoTDBConfig iotConfig = IoTDBDescriptor.getInstance().getConfig(); + long bytesSize = tsBlock.getRetainedSizeInBytes(); + int rows = tsBlock.getPositionCount(); + if (bytesSize + cachedBytes >= iotConfig.getCteBufferSize() + || rows + cachedRows >= iotConfig.getMaxRowsInCteBuffer()) { + return false; + } + cachedData.add(tsBlock); + cachedBytes += bytesSize; + cachedRows += rows; + return true; + } + + public void clear() { + cachedData.clear(); + cachedBytes = 0L; + cachedRows = 0; + } + + public List getCachedData() { + return cachedData; + } + + public TableSchema getTableSchema() { + return tableSchema; + } + + public List getColumnIndex2TsBlockColumnIndexList() { + return columnIndex2TsBlockColumnIndexList; + } + + public int incrementAndGetCount() { + return count.incrementAndGet(); + } + + public int decrementAndGetCount() { + return count.decrementAndGet(); + } + + @Override + public long ramBytesUsed() { + return INSTANCE_SIZE + cachedBytes; + } + + @TestOnly + public int getCount() { + return count.get(); + } +} diff --git a/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/MemoryReader.java b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/MemoryReader.java new file mode 100644 index 000000000000..cd5103f396ee --- /dev/null +++ b/iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/cte/MemoryReader.java @@ -0,0 +1,75 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.utils.cte; + +import org.apache.iotdb.commons.exception.IoTDBException; +import org.apache.iotdb.db.queryengine.plan.planner.memory.MemoryReservationManager; + +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.utils.RamUsageEstimator; + +public class MemoryReader implements CteDataReader { + private static final long INSTANCE_SIZE = + RamUsageEstimator.shallowSizeOfInstance(MemoryReader.class); + + // thread-safe memory manager + private final MemoryReservationManager memoryReservationManager; + // all the data in MemoryReader lies in memory + private final CteDataStore dataStore; + private int tsBlockIndex; + + public MemoryReader(CteDataStore dataStore, MemoryReservationManager memoryReservationManager) { + this.dataStore = dataStore; + this.tsBlockIndex = 0; + this.memoryReservationManager = memoryReservationManager; + if (dataStore.incrementAndGetCount() == 1) { + memoryReservationManager.reserveMemoryCumulatively(dataStore.ramBytesUsed()); + } + } + + @Override + public boolean hasNext() throws IoTDBException { + return dataStore.getCachedData() != null && tsBlockIndex < dataStore.getCachedData().size(); + } + + @Override + public TsBlock next() throws IoTDBException { + if (dataStore.getCachedData() == null || tsBlockIndex >= dataStore.getCachedData().size()) { + return null; + } + return dataStore.getCachedData().get(tsBlockIndex++); + } + + @Override + public void close() throws IoTDBException { + if (dataStore.decrementAndGetCount() == 0) { + memoryReservationManager.releaseMemoryCumulatively(dataStore.ramBytesUsed()); + } + } + + @Override + public long ramBytesUsed() { + // The calculation excludes the memory occupied by the CteDataStore. + // memory allocate/release for CteDataStore is handled during constructor and close + return INSTANCE_SIZE; + } +} diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/execution/operator/CteScanOperatorTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/execution/operator/CteScanOperatorTest.java new file mode 100644 index 000000000000..9a36c47f8950 --- /dev/null +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/execution/operator/CteScanOperatorTest.java @@ -0,0 +1,252 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.queryengine.execution.operator; + +import org.apache.iotdb.commons.schema.table.column.TsTableColumnCategory; +import org.apache.iotdb.db.queryengine.common.QueryId; +import org.apache.iotdb.db.queryengine.execution.operator.source.relational.CteScanOperator; +import org.apache.iotdb.db.queryengine.plan.planner.memory.ThreadSafeMemoryReservationManager; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeId; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.ColumnSchema; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableSchema; +import org.apache.iotdb.db.utils.cte.CteDataStore; + +import com.google.common.collect.ImmutableList; +import org.apache.tsfile.common.conf.TSFileDescriptor; +import org.apache.tsfile.enums.TSDataType; +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.read.common.block.TsBlockBuilder; +import org.apache.tsfile.read.common.block.column.BinaryColumnBuilder; +import org.apache.tsfile.read.common.block.column.DoubleColumnBuilder; +import org.apache.tsfile.read.common.block.column.TimeColumnBuilder; +import org.apache.tsfile.read.common.type.DoubleType; +import org.apache.tsfile.read.common.type.StringType; +import org.apache.tsfile.read.common.type.TimestampType; +import org.apache.tsfile.utils.Binary; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +public class CteScanOperatorTest { + private OperatorContext operatorContext; + private PlanNodeId planNodeId; + private CteDataStore cteDataStore; + private ThreadSafeMemoryReservationManager memoryReservationManager; + private CteScanOperator cteScanOperator; + + @Before + public void setUp() { + // Set up mock objects + operatorContext = mock(OperatorContext.class); + planNodeId = new PlanNodeId("test-plan-node"); + + // Create a simple table schema for testing + TableSchema tableSchema = createTestTableSchema(); + + // Create column index mapping + List columnIndex2TsBlockColumnIndexList = Arrays.asList(0, 1, 2); + + // Initialize CteDataStore + cteDataStore = new CteDataStore(tableSchema, columnIndex2TsBlockColumnIndexList); + + // Add test data to the data store + List testData = createTestTsBlocks(); + for (TsBlock tsBlock : testData) { + cteDataStore.addTsBlock(tsBlock); + } + + memoryReservationManager = new ThreadSafeMemoryReservationManager(new QueryId("1"), "test"); + } + + @After + public void tearDown() throws Exception { + if (cteScanOperator != null) { + cteScanOperator.close(); + } + } + + @Test + public void testConstructor() throws Exception { + cteScanOperator = + new CteScanOperator(operatorContext, planNodeId, cteDataStore, memoryReservationManager); + assertEquals(1, cteDataStore.getCount()); + cteScanOperator.close(); + } + + @Test + public void testEmptyDataStore() throws Exception { + // Create empty data store + TableSchema tableSchema = createTestTableSchema(); + CteDataStore emptyDataStore = new CteDataStore(tableSchema, Arrays.asList(0, 1, 2)); + + cteScanOperator = + new CteScanOperator(operatorContext, planNodeId, emptyDataStore, memoryReservationManager); + // Should not have data + assertFalse(cteScanOperator.hasNext()); + + cteScanOperator.close(); + } + + @Test + public void testNextWithData() throws Exception { + cteScanOperator = + new CteScanOperator(operatorContext, planNodeId, cteDataStore, memoryReservationManager); + // Should have data + assertTrue(cteScanOperator.hasNext()); + TsBlock firstBlock = cteScanOperator.next(); + assertNotNull(firstBlock); + assertEquals(2, firstBlock.getValueColumnCount()); + assertEquals(3, firstBlock.getPositionCount()); + + // Should have data + assertTrue(cteScanOperator.hasNext()); + TsBlock secondBlock = cteScanOperator.next(); + assertNotNull(secondBlock); + assertEquals(2, secondBlock.getValueColumnCount()); + assertEquals(2, secondBlock.getPositionCount()); + + // should return null + TsBlock thirdBlock = cteScanOperator.next(); + assertNull(thirdBlock); + + cteScanOperator.close(); + } + + @Test + public void testIsFinished() throws Exception { + cteScanOperator = + new CteScanOperator(operatorContext, planNodeId, cteDataStore, memoryReservationManager); + + // Initially not finished + assertFalse(cteScanOperator.isFinished()); + // Consume all data + while (cteScanOperator.hasNext()) { + cteScanOperator.next(); + } + // Now should be finished + assertTrue(cteScanOperator.isFinished()); + + cteScanOperator.close(); + } + + @Test + public void testMemory() throws Exception { + cteScanOperator = + new CteScanOperator(operatorContext, planNodeId, cteDataStore, memoryReservationManager); + + long maxReturnSize = TSFileDescriptor.getInstance().getConfig().getMaxTsBlockSizeInBytes(); + assertEquals(maxReturnSize, cteScanOperator.calculateMaxPeekMemory()); + assertEquals(maxReturnSize, cteScanOperator.calculateMaxPeekMemory()); + assertEquals(0L, cteScanOperator.calculateRetainedSizeAfterCallingNext()); + + cteScanOperator.close(); + } + + @Test + public void testMultipleCteScanOperators() throws Exception { + // Test reference counting with multiple operators + CteScanOperator operator1 = + new CteScanOperator(operatorContext, planNodeId, cteDataStore, memoryReservationManager); + assertEquals(1, cteDataStore.getCount()); + CteScanOperator operator2 = + new CteScanOperator(operatorContext, planNodeId, cteDataStore, memoryReservationManager); + assertEquals(2, cteDataStore.getCount()); + + assertEquals(896, cteDataStore.ramBytesUsed()); + + // Both operators should be able to read data + assertTrue(operator1.hasNext()); + assertTrue(operator2.hasNext()); + + // Clean up + operator1.close(); + operator2.close(); + } + + private TableSchema createTestTableSchema() { + List columnSchemas = new ArrayList<>(); + columnSchemas.add( + new ColumnSchema("time", TimestampType.TIMESTAMP, false, TsTableColumnCategory.TIME)); + columnSchemas.add( + new ColumnSchema("name", StringType.STRING, false, TsTableColumnCategory.FIELD)); + columnSchemas.add( + new ColumnSchema("value", DoubleType.DOUBLE, false, TsTableColumnCategory.FIELD)); + + return new TableSchema("test_table", columnSchemas); + } + + private List createTestTsBlocks() { + List blocks = new ArrayList<>(); + + // Create first TsBlock + blocks.add( + createTsBlock( + new long[] {1000L, 2000L, 3000L}, + new String[] {"Alice", "Bob", "Charlie"}, + new double[] {10.5, 20.3, 30.7})); + + // Create second TsBlock + blocks.add( + createTsBlock( + new long[] {4000L, 5000L}, new String[] {"David", "Eve"}, new double[] {40.2, 50.8})); + + return blocks; + } + + private TsBlock createTsBlock(long[] times, String[] names, double[] values) { + TsBlockBuilder builder = + new TsBlockBuilder(ImmutableList.of(TSDataType.STRING, TSDataType.DOUBLE)); + + // Time column + TimeColumnBuilder timeColumn = builder.getTimeColumnBuilder(); + for (long time : times) { + timeColumn.writeLong(time); + } + + // Name column + BinaryColumnBuilder nameColumn = (BinaryColumnBuilder) builder.getColumnBuilder(0); + for (String name : names) { + nameColumn.writeBinary(new Binary(name, StandardCharsets.UTF_8)); + } + + // Value column + DoubleColumnBuilder valueColumn = (DoubleColumnBuilder) builder.getColumnBuilder(1); + for (double value : values) { + valueColumn.writeDouble(value); + } + + builder.declarePositions(times.length); + return builder.build(); + } +} diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/AnalyzerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/AnalyzerTest.java index b50c7cbd33b9..6666c180c5e0 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/AnalyzerTest.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/AnalyzerTest.java @@ -31,6 +31,7 @@ import org.apache.iotdb.commons.partition.executor.SeriesPartitionExecutor; import org.apache.iotdb.commons.schema.table.InsertNodeMeasurementInfo; import org.apache.iotdb.commons.schema.table.TsTable; +import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.protocol.session.IClientSession; import org.apache.iotdb.db.protocol.session.InternalClientSession; import org.apache.iotdb.db.queryengine.common.MPPQueryContext; @@ -81,6 +82,7 @@ import com.google.common.collect.ImmutableSet; import org.apache.tsfile.file.metadata.IDeviceID.Factory; import org.apache.tsfile.utils.Binary; +import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.mockito.Mockito; @@ -145,6 +147,11 @@ public class AnalyzerTest { DistributedQueryPlan distributedQueryPlan; DeviceTableScanNode deviceTableScanNode; + @BeforeClass + public static void setUp() { + IoTDBDescriptor.getInstance().getConfig().setDataNodeId(1); + } + @Test public void testMockQuery() throws OperatorNotFoundException { final String sql = diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/InsertIntoQueryTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/InsertIntoQueryTest.java index 8c1a766f4c3f..879356acaee9 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/InsertIntoQueryTest.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/InsertIntoQueryTest.java @@ -19,6 +19,7 @@ package org.apache.iotdb.db.queryengine.plan.relational.analyzer; +import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.queryengine.plan.planner.plan.DistributedQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; @@ -34,6 +35,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.node.StreamSortNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TopKNode; +import org.junit.BeforeClass; import org.junit.Test; import static org.apache.iotdb.db.queryengine.plan.relational.analyzer.AnalyzerTest.analyzeSQL; @@ -60,6 +62,11 @@ public class InsertIntoQueryTest { TableDistributedPlanner distributionPlanner; DistributedQueryPlan distributedQueryPlan; + @BeforeClass + public static void setUp() { + IoTDBDescriptor.getInstance().getConfig().setDataNodeId(1); + } + @Test public void simpleInsertIntoQuery() { sql = diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/LimitOffsetPushDownTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/LimitOffsetPushDownTest.java index af79ea9a6b9d..fbe814ff25bc 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/LimitOffsetPushDownTest.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/LimitOffsetPushDownTest.java @@ -20,6 +20,7 @@ package org.apache.iotdb.db.queryengine.plan.relational.analyzer; import org.apache.iotdb.commons.conf.IoTDBConstant; +import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.protocol.session.IClientSession; import org.apache.iotdb.db.queryengine.common.QueryId; import org.apache.iotdb.db.queryengine.common.SessionInfo; @@ -39,6 +40,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.node.StreamSortNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TopKNode; +import org.junit.BeforeClass; import org.junit.Test; import java.time.ZoneId; @@ -79,6 +81,11 @@ public class LimitOffsetPushDownTest { DistributedQueryPlan distributedQueryPlan; DeviceTableScanNode deviceTableScanNode; + @BeforeClass + public static void setUp() { + IoTDBDescriptor.getInstance().getConfig().setDataNodeId(1); + } + // without sort operation, limit can be pushed into TableScan, pushLimitToEachDevice==false @Test public void noOrderByTest() { diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/PushAggregationThroughUnionTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/PushAggregationThroughUnionTest.java index e1702a6f2fe7..722c8ce992d4 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/PushAggregationThroughUnionTest.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/PushAggregationThroughUnionTest.java @@ -19,6 +19,7 @@ package org.apache.iotdb.db.queryengine.plan.relational.analyzer; +import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.queryengine.plan.planner.plan.DistributedQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; @@ -31,6 +32,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import org.junit.Before; import org.junit.Test; import java.util.Optional; @@ -53,6 +55,10 @@ import static org.junit.Assert.assertEquals; public class PushAggregationThroughUnionTest { + @Before + public void setUp() { + IoTDBDescriptor.getInstance().getConfig().setDataNodeId(1); + } @Test public void UnionAggregationTest() { diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SortTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SortTest.java index 047063a1b650..2f6a028ea4a4 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SortTest.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SortTest.java @@ -19,6 +19,7 @@ package org.apache.iotdb.db.queryengine.plan.relational.analyzer; +import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.queryengine.common.MPPQueryContext; import org.apache.iotdb.db.queryengine.execution.warnings.WarningCollector; import org.apache.iotdb.db.queryengine.plan.planner.plan.DistributedQueryPlan; @@ -41,6 +42,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TopKNode; import org.apache.iotdb.db.queryengine.plan.statement.component.Ordering; +import org.junit.BeforeClass; import org.junit.Test; import java.util.List; @@ -78,6 +80,11 @@ public class SortTest { DistributedQueryPlan distributedQueryPlan; DeviceTableScanNode deviceTableScanNode; + @BeforeClass + public static void setUp() { + IoTDBDescriptor.getInstance().getConfig().setDataNodeId(1); + } + // order by some_ids, time, others; has filter @Test public void someIDColumnTimeOthersSortTest() { diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SubQueryTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SubQueryTest.java index da7fc24d684d..156123604b5d 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SubQueryTest.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/analyzer/SubQueryTest.java @@ -19,6 +19,7 @@ package org.apache.iotdb.db.queryengine.plan.relational.analyzer; +import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.queryengine.plan.planner.plan.DistributedQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; @@ -37,6 +38,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.node.StreamSortNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TopKNode; +import org.junit.BeforeClass; import org.junit.Test; import java.util.Arrays; @@ -72,6 +74,11 @@ public class SubQueryTest { DistributedQueryPlan distributedQueryPlan; DeviceTableScanNode deviceTableScanNode; + @BeforeClass + public static void setUp() { + IoTDBDescriptor.getInstance().getConfig().setDataNodeId(1); + } + @Test public void subQueryTest1() { // outer query has limit and sort, diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteMaterializerTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteMaterializerTest.java new file mode 100644 index 000000000000..3394b1fbb58c --- /dev/null +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteMaterializerTest.java @@ -0,0 +1,344 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.queryengine.plan.relational.planner; + +import org.apache.iotdb.common.rpc.thrift.TSStatus; +import org.apache.iotdb.commons.exception.IoTDBException; +import org.apache.iotdb.commons.schema.column.ColumnHeader; +import org.apache.iotdb.db.protocol.session.SessionManager; +import org.apache.iotdb.db.queryengine.common.QueryId; +import org.apache.iotdb.db.queryengine.common.header.DatasetHeader; +import org.apache.iotdb.db.queryengine.plan.Coordinator; +import org.apache.iotdb.db.queryengine.plan.execution.ExecutionResult; +import org.apache.iotdb.db.queryengine.plan.execution.QueryExecution; +import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; +import org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Expression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.LongLiteral; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.SymbolReference; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import org.apache.tsfile.enums.TSDataType; +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.read.common.block.column.LongColumn; +import org.apache.tsfile.read.common.block.column.TimeColumn; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanAssert.assertPlan; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.aggregation; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.aggregationFunction; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.cteScan; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.explainAnalyze; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.filter; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.limit; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.offset; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.output; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.singleGroupingSet; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.sort; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.tableScan; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode.Step.SINGLE; +import static org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression.Operator.GREATER_THAN; +import static org.apache.iotdb.db.queryengine.plan.relational.sql.ast.SortItem.NullOrdering.LAST; +import static org.apache.iotdb.db.queryengine.plan.relational.sql.ast.SortItem.Ordering.ASCENDING; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.when; + +@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"}) +@RunWith(PowerMockRunner.class) +@PrepareForTest({Coordinator.class, SessionManager.class}) +public class CteMaterializerTest { + private static PlanTester planTester; + private static Coordinator mockCoordinator; + + @BeforeClass + public static void prepareEnv() { + planTester = new PlanTester(); + mockStatic(Coordinator.class); + mockStatic(SessionManager.class); + + // Create a mock Coordinator instance + mockCoordinator = Mockito.mock(Coordinator.class); + when(Coordinator.getInstance()).thenReturn(mockCoordinator); + + // Create mock SessionManager + SessionManager mockSessionManager = Mockito.mock(SessionManager.class); + when(SessionManager.getInstance()).thenReturn(mockSessionManager); + + // Mock TSStatus with success status + TSStatus mockStatus = Mockito.mock(TSStatus.class); + when(mockStatus.getCode()).thenReturn(200); // Success status code + + // Create a real ExecutionResult instance + ExecutionResult mockResult = new ExecutionResult(new QueryId("1"), mockStatus); + + // Mock the executeForTableModel method + when(mockCoordinator.executeForTableModel( + Mockito.any(), // Statement + Mockito.any(), // SqlParser + Mockito.any(), // IClientSession + Mockito.anyLong(), // queryId + Mockito.any(), // SessionInfo + Mockito.anyString(), // String + Mockito.any(), // Metadata + Mockito.anyMap(), // Map, CteDataStore> + Mockito.any(), // ExplainType + Mockito.anyLong(), // timeOut + Mockito.anyBoolean())) // userQuery + .thenReturn(mockResult); + } + + @Before + public void setUp() throws IoTDBException { + // Create QueryExecution mock + QueryExecution mockQueryExecution = Mockito.mock(QueryExecution.class); + when(mockQueryExecution.hasNextResult()) + .thenReturn(true) // First call returns true + .thenReturn(false); // Subsequent calls return false + + // Create a real DatasetHeader with time and s1 columns + List columnHeaders = + ImmutableList.of( + new ColumnHeader("time", TSDataType.TIMESTAMP), + new ColumnHeader("s1", TSDataType.INT64)); + DatasetHeader mockDatasetHeader = new DatasetHeader(columnHeaders, false); + when(mockQueryExecution.getDatasetHeader()).thenReturn(mockDatasetHeader); + + // Create a TSBlock with sample data for getBatchResult + long[] timestamps = {1000L, 2000L, 3000L}; + long[] values = {10L, 20L, 30L}; + TimeColumn timeColumn = new TimeColumn(3, timestamps); + LongColumn valueColumn = new LongColumn(3, Optional.empty(), values); + TsBlock sampleTsBlock = new TsBlock(timeColumn, valueColumn); + when(mockQueryExecution.getBatchResult()).thenReturn(Optional.of(sampleTsBlock)); + + // Mock coordinator methods + when(mockCoordinator.getQueryExecution(Mockito.anyLong())).thenReturn(mockQueryExecution); + } + + private void mockException() { + CteMaterializer cteMaterializer = Mockito.spy(new CteMaterializer()); + Mockito.doReturn(null) + .when(cteMaterializer) + .fetchCteQueryResult(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); + CteMaterializer.setInstance(cteMaterializer); + } + + @Test + public void testSimpleCte() { + String sql = "with cte1 as materialized (SELECT time, s1 FROM table1) select * from cte1"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + PlanMatchPattern cteScan = cteScan("cte1", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──CteScanNode + */ + assertPlan(logicalQueryPlan, output(cteScan)); + } + + @Test + public void testFieldFilterCte() { + String sql = + "with cte1 as materialized (SELECT time, s1 FROM table1) select * from cte1 where s1 > 10"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + Expression filterPredicate = + new ComparisonExpression(GREATER_THAN, new SymbolReference("s1"), new LongLiteral("10")); + PlanMatchPattern cteScan = cteScan("cte1", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──FilterNode + * └──CteScanNode + */ + assertPlan(logicalQueryPlan, output(filter(filterPredicate, cteScan))); + } + + @Test + public void testTimeFilterCte() { + String sql = + "with cte1 as materialized (SELECT time, s1 FROM table1) select * from cte1 where time > 1000"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + Expression filterPredicate = + new ComparisonExpression( + GREATER_THAN, new SymbolReference("time"), new LongLiteral("1000")); + PlanMatchPattern cteScan = cteScan("cte1", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──FilterNode + * └──CteScanNode + */ + assertPlan(logicalQueryPlan, output(filter(filterPredicate, cteScan))); + } + + @Test + public void testSortCte() { + String sql = + "with cte1 as materialized (SELECT time, s1 FROM table1) select * from cte1 order by s1"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + List orderBy = ImmutableList.of(sort("s1", ASCENDING, LAST)); + PlanMatchPattern cteScan = cteScan("cte1", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──SortNode + * └──CteScanNode + */ + assertPlan(logicalQueryPlan, output(sort(orderBy, cteScan))); + } + + @Test + public void testLimitOffsetCte() { + String sql = + "with cte1 as materialized (SELECT time, s1 FROM table1) select * from cte1 limit 1 offset 2"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + PlanMatchPattern cteScan = cteScan("cte1", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──LimitNode + * └──OffsetNode + * └──CteScanNode + */ + assertPlan(logicalQueryPlan, output(offset(2, limit(3, cteScan)))); + } + + @Test + public void testAggCte() { + String sql = + "with cte1 as materialized (SELECT time, s1 FROM table1) select s1, max(time) from cte1 group by s1"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + PlanMatchPattern cteScan = cteScan("cte1", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──AggregationNode + * └──CteScanNode + */ + assertPlan( + logicalQueryPlan, + output( + aggregation( + singleGroupingSet("s1"), + ImmutableMap.of( + Optional.of("max"), aggregationFunction("max", ImmutableList.of("time"))), + Collections.emptyList(), + Optional.empty(), + SINGLE, + cteScan))); + } + + @Test + public void testCteQueryException() { + CteMaterializer originalCteMaterializer = CteMaterializer.getInstance(); + mockException(); + + String sql = "with cte1 as materialized (SELECT time, s1 FROM table1) select * from cte1"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + PlanMatchPattern tableScan = + tableScan("testdb.table1", ImmutableList.of("time", "s1"), ImmutableSet.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──TableScanNode + */ + assertPlan(logicalQueryPlan, output(tableScan)); + + // reset original CteMaterializer + CteMaterializer.setInstance(originalCteMaterializer); + } + + @Test + public void testExplainAnalyze() { + String sql = + "explain analyze with cte1 as materialized (SELECT time, s1 FROM table1) select * from cte1"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + PlanMatchPattern cteScan = cteScan("cte1", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──ExplainAnalyzeNode + * └──CteScanNode + */ + assertPlan(logicalQueryPlan, output(explainAnalyze(cteScan))); + } + + /** + * This test primarily ensures code coverage: materializeCTE.handleCteExplainResults & + * materializeCTE.fetchCteQueryResult + */ + @Test + public void testExplain() { + String sql = + "with cte1 as (select time, s1 from table1), " + + "cte2 as materialized (select * from cte1) select * from cte2"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql, true); + + PlanMatchPattern cteScan = cteScan("cte2", ImmutableList.of("time", "s1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──CteScanNode + */ + assertPlan(logicalQueryPlan, output(cteScan)); + } +} diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteSubqueryTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteSubqueryTest.java new file mode 100644 index 000000000000..59fbc1a1f8d9 --- /dev/null +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/CteSubqueryTest.java @@ -0,0 +1,168 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.queryengine.plan.relational.planner; + +import org.apache.iotdb.common.rpc.thrift.TSStatus; +import org.apache.iotdb.commons.exception.IoTDBException; +import org.apache.iotdb.commons.schema.column.ColumnHeader; +import org.apache.iotdb.db.protocol.session.SessionManager; +import org.apache.iotdb.db.queryengine.common.QueryId; +import org.apache.iotdb.db.queryengine.common.header.DatasetHeader; +import org.apache.iotdb.db.queryengine.plan.Coordinator; +import org.apache.iotdb.db.queryengine.plan.execution.ExecutionResult; +import org.apache.iotdb.db.queryengine.plan.execution.QueryExecution; +import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; +import org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.LongLiteral; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.SymbolReference; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import org.apache.tsfile.enums.TSDataType; +import org.apache.tsfile.read.common.block.TsBlock; +import org.apache.tsfile.read.common.block.column.LongColumn; +import org.apache.tsfile.read.common.block.column.TimeColumn; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.util.List; +import java.util.Optional; + +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanAssert.assertPlan; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.collect; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.exchange; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.output; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.tableScan; +import static org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression.Operator.EQUAL; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.when; + +@PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"}) +@RunWith(PowerMockRunner.class) +@PrepareForTest({Coordinator.class, SessionManager.class}) +public class CteSubqueryTest { + private PlanTester planTester; + + @Before + public void setUp() throws Exception { + planTester = new PlanTester(); + mockExecuteForTableModel(); + } + + /** + * This test primarily ensures code coverage: + * PredicateWithUncorrelatedScalarSubqueryReconstructor.fetchUncorrelatedSubqueryResultForPredicate + */ + @Test + public void testCteSubquery() throws IoTDBException { + mockExecuteForTableModel(); + + String sql = + "with cte1 as (select time, s2 from table1) select s1 from table1 " + + "where s1 = (select s2 from cte1)"; + + LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + + PlanMatchPattern tableScan = + tableScan( + "testdb.table1", + ImmutableList.of("s1"), + ImmutableSet.of("s1"), + new ComparisonExpression(EQUAL, new SymbolReference("s1"), new LongLiteral("1"))); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──DeviceTableScanNode + */ + assertPlan(logicalQueryPlan, output(tableScan)); + + // Verify DistributionPlan + assertPlan(planTester.getFragmentPlan(0), output(collect(exchange(), exchange(), exchange()))); + + assertPlan(planTester.getFragmentPlan(1), tableScan); + assertPlan(planTester.getFragmentPlan(2), tableScan); + assertPlan(planTester.getFragmentPlan(3), tableScan); + } + + private void mockExecuteForTableModel() throws IoTDBException { + mockStatic(Coordinator.class); + mockStatic(SessionManager.class); + + // Create a mock Coordinator instance + Coordinator mockCoordinator = Mockito.mock(Coordinator.class); + when(Coordinator.getInstance()).thenReturn(mockCoordinator); + + // Create mock SessionManager + SessionManager mockSessionManager = Mockito.mock(SessionManager.class); + when(SessionManager.getInstance()).thenReturn(mockSessionManager); + + // Mock TSStatus with success status + TSStatus mockStatus = Mockito.mock(TSStatus.class); + when(mockStatus.getCode()).thenReturn(200); // Success status code + + // Create a real ExecutionResult instance + ExecutionResult mockResult = new ExecutionResult(new QueryId("1"), mockStatus); + + // Mock the executeForTableModel method + when(mockCoordinator.executeForTableModel( + Mockito.any(), // Statement + Mockito.any(), // SqlParser + Mockito.any(), // IClientSession + Mockito.anyLong(), // queryId + Mockito.any(), // SessionInfo + Mockito.anyString(), // String + Mockito.any(), // Metadata + Mockito.anyMap(), // Map, CteDataStore> + Mockito.any(), // ExplainType + Mockito.anyLong(), // timeOut + Mockito.anyBoolean())) // userQuery + .thenReturn(mockResult); + + // Create QueryExecution mock + QueryExecution mockQueryExecution = Mockito.mock(QueryExecution.class); + when(mockQueryExecution.hasNextResult()) + .thenReturn(true) // First call returns true + .thenReturn(false); // Subsequent calls return false + + // Create a real DatasetHeader with time and s1 columns + List columnHeaders = ImmutableList.of(new ColumnHeader("s2", TSDataType.INT64)); + DatasetHeader mockDatasetHeader = new DatasetHeader(columnHeaders, false); + when(mockQueryExecution.getDatasetHeader()).thenReturn(mockDatasetHeader); + + // Create a TSBlock with sample data for getBatchResult + + TimeColumn timeColumn = new TimeColumn(1, new long[] {1000L}); + LongColumn valueColumn = new LongColumn(1, Optional.empty(), new long[] {1L}); + TsBlock sampleTsBlock = new TsBlock(timeColumn, valueColumn); + when(mockQueryExecution.getBatchResult()).thenReturn(Optional.of(sampleTsBlock)); + + // Mock coordinator methods + when(mockCoordinator.getQueryExecution(Mockito.anyLong())).thenReturn(mockQueryExecution); + } +} diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/PlanTester.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/PlanTester.java index 850755702016..d7d4169a3603 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/PlanTester.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/PlanTester.java @@ -21,8 +21,10 @@ import org.apache.iotdb.common.rpc.thrift.TDataNodeLocation; import org.apache.iotdb.commons.conf.IoTDBConstant; +import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.protocol.session.IClientSession; import org.apache.iotdb.db.queryengine.common.MPPQueryContext; +import org.apache.iotdb.db.queryengine.common.MPPQueryContext.ExplainType; import org.apache.iotdb.db.queryengine.common.QueryId; import org.apache.iotdb.db.queryengine.common.SessionInfo; import org.apache.iotdb.db.queryengine.execution.warnings.WarningCollector; @@ -37,6 +39,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.execution.querystats.PlanOptimizersStatsCollector; import org.apache.iotdb.db.queryengine.plan.relational.metadata.Metadata; import org.apache.iotdb.db.queryengine.plan.relational.planner.distribute.TableDistributedPlanner; +import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.PredicateWithUncorrelatedScalarSubqueryReconstructor; import org.apache.iotdb.db.queryengine.plan.relational.planner.optimizations.DataNodeLocationSupplierFactory; import org.apache.iotdb.db.queryengine.plan.relational.planner.optimizations.PlanOptimizer; import org.apache.iotdb.db.queryengine.plan.relational.security.AllowAllAccessControl; @@ -99,23 +102,47 @@ public PlanTester() { public PlanTester(Metadata metadata) { this.metadata = metadata; + IoTDBDescriptor.getInstance().getConfig().setDataNodeId(1); } public LogicalQueryPlan createPlan(String sql) { - return createPlan(sessionInfo, sql, NOOP, createPlanOptimizersStatsCollector()); + return createPlan(sessionInfo, sql, NOOP, createPlanOptimizersStatsCollector(), false, null); + } + + public LogicalQueryPlan createPlan( + String sql, + PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor) { + return createPlan( + sessionInfo, + sql, + NOOP, + createPlanOptimizersStatsCollector(), + false, + predicateWithUncorrelatedScalarSubqueryReconstructor); + } + + public LogicalQueryPlan createPlan(String sql, boolean explain) { + return createPlan(sessionInfo, sql, NOOP, createPlanOptimizersStatsCollector(), explain, null); } public LogicalQueryPlan createPlan(SessionInfo sessionInfo, String sql) { - return createPlan(sessionInfo, sql, NOOP, createPlanOptimizersStatsCollector()); + return createPlan(sessionInfo, sql, NOOP, createPlanOptimizersStatsCollector(), false, null); } public LogicalQueryPlan createPlan( SessionInfo sessionInfo, String sql, WarningCollector warningCollector, - PlanOptimizersStatsCollector planOptimizersStatsCollector) { + PlanOptimizersStatsCollector planOptimizersStatsCollector, + boolean explain, + PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubqueryReconstructor) { distributedQueryPlan = null; MPPQueryContext context = new MPPQueryContext(sql, queryId, sessionInfo, null, null); + if (explain) { + context.setExplainType(ExplainType.EXPLAIN); + } Analysis analysis = analyze(sql, metadata, context); this.analysis = analysis; @@ -124,6 +151,10 @@ public LogicalQueryPlan createPlan( TableLogicalPlanner logicalPlanner = new TableLogicalPlanner( context, metadata, sessionInfo, symbolAllocator, WarningCollector.NOOP); + if (predicateWithUncorrelatedScalarSubqueryReconstructor != null) { + logicalPlanner.setPredicateWithUncorrelatedScalarSubqueryReconstructor( + predicateWithUncorrelatedScalarSubqueryReconstructor); + } plan = logicalPlanner.plan(analysis); diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/UncorrelatedSubqueryTest.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/UncorrelatedSubqueryTest.java index eebd4e158439..3a0a0acd2036 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/UncorrelatedSubqueryTest.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/UncorrelatedSubqueryTest.java @@ -21,6 +21,7 @@ import org.apache.iotdb.db.queryengine.plan.planner.plan.LogicalQueryPlan; import org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern; +import org.apache.iotdb.db.queryengine.plan.relational.planner.ir.PredicateWithUncorrelatedScalarSubqueryReconstructor; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.JoinNode; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression; import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Expression; @@ -31,7 +32,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import org.junit.Before; import org.junit.Test; +import org.mockito.Mockito; import java.util.Collections; import java.util.Optional; @@ -39,11 +42,8 @@ import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanAssert.assertPlan; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.aggregation; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.aggregationFunction; -import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.aggregationTableScan; -import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.any; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.anyTree; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.collect; -import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.enforceSingleRow; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.exchange; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.filter; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.join; @@ -55,7 +55,6 @@ import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.sort; import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.PlanMatchPattern.tableScan; import static org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode.Step.FINAL; -import static org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode.Step.INTERMEDIATE; import static org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode.Step.PARTIAL; import static org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode.Step.SINGLE; import static org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression.Operator.EQUAL; @@ -63,182 +62,82 @@ import static org.apache.iotdb.db.queryengine.plan.relational.sql.ast.ComparisonExpression.Operator.LESS_THAN_OR_EQUAL; public class UncorrelatedSubqueryTest { + private PlanTester planTester; + private PredicateWithUncorrelatedScalarSubqueryReconstructor + predicateWithUncorrelatedScalarSubquery; + + @Before + public void setUp() throws Exception { + planTester = new PlanTester(); + mockPredicateWithUncorrelatedScalarSubquery(); + } + + private void mockPredicateWithUncorrelatedScalarSubquery() { + predicateWithUncorrelatedScalarSubquery = + Mockito.spy(new PredicateWithUncorrelatedScalarSubqueryReconstructor()); + Mockito.when( + predicateWithUncorrelatedScalarSubquery.fetchUncorrelatedSubqueryResultForPredicate( + Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any())) + .thenReturn(Optional.of(new LongLiteral("1"))); + } @Test public void testUncorrelatedScalarSubqueryInWhereClause() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where s1 = (select max(s1) from table1)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); - - Expression filterPredicate = - new ComparisonExpression(EQUAL, new SymbolReference("s1"), new SymbolReference("max")); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); PlanMatchPattern tableScan = - tableScan("testdb.table1", ImmutableList.of("s1"), ImmutableSet.of("s1")); + tableScan( + "testdb.table1", + ImmutableList.of("s1"), + ImmutableSet.of("s1"), + new ComparisonExpression(EQUAL, new SymbolReference("s1"), new LongLiteral("1"))); // Verify full LogicalPlan /* - * └──OutputNode - * └──ProjectNode - * └──FilterNode - * └──JoinNode - * |──TableScanNode - * ├──AggregationNode - * │ └──AggregationTableScanNode - - */ - assertPlan( - logicalQueryPlan, - output( - project( - filter( - filterPredicate, - join( - JoinNode.JoinType.INNER, - builder -> - builder - .left(tableScan) - .right( - aggregation( - singleGroupingSet(), - ImmutableMap.of( - Optional.of("max"), - aggregationFunction("max", ImmutableList.of("max_9"))), - Collections.emptyList(), - Optional.empty(), - FINAL, - aggregationTableScan( - singleGroupingSet(), - Collections.emptyList(), - Optional.empty(), - PARTIAL, - "testdb.table1", - ImmutableList.of("max_9"), - ImmutableSet.of("s1_6"))))))))); + * └──OutputNode + * └──DeviceTableScanNode + */ + assertPlan(logicalQueryPlan, output(tableScan)); // Verify DistributionPlan - assertPlan( - planTester.getFragmentPlan(0), - output( - project( - filter( - filterPredicate, - join( - JoinNode.JoinType.INNER, - builder -> builder.left(exchange()).right(exchange())))))); + assertPlan(planTester.getFragmentPlan(0), output(collect(exchange(), exchange(), exchange()))); - assertPlan(planTester.getFragmentPlan(1), collect(exchange(), exchange(), exchange())); + assertPlan(planTester.getFragmentPlan(1), tableScan); assertPlan(planTester.getFragmentPlan(2), tableScan); assertPlan(planTester.getFragmentPlan(3), tableScan); - assertPlan(planTester.getFragmentPlan(4), tableScan); - - assertPlan( - planTester.getFragmentPlan(5), - aggregation( - singleGroupingSet(), - ImmutableMap.of( - Optional.of("max"), aggregationFunction("max", ImmutableList.of("max_10"))), - Collections.emptyList(), - Optional.empty(), - FINAL, - collect(exchange(), exchange(), exchange()))); - - assertPlan( - planTester.getFragmentPlan(6), - aggregation( - singleGroupingSet(), - ImmutableMap.of( - Optional.of("max_10"), aggregationFunction("max", ImmutableList.of("max_9"))), - Collections.emptyList(), - Optional.empty(), - INTERMEDIATE, - aggregationTableScan( - singleGroupingSet(), - Collections.emptyList(), - Optional.empty(), - PARTIAL, - "testdb.table1", - ImmutableList.of("max_9"), - ImmutableSet.of("s1_6")))); - - assertPlan( - planTester.getFragmentPlan(7), - aggregation( - singleGroupingSet(), - ImmutableMap.of( - Optional.of("max_10"), aggregationFunction("max", ImmutableList.of("max_9"))), - Collections.emptyList(), - Optional.empty(), - INTERMEDIATE, - aggregationTableScan( - singleGroupingSet(), - Collections.emptyList(), - Optional.empty(), - PARTIAL, - "testdb.table1", - ImmutableList.of("max_9"), - ImmutableSet.of("s1_6")))); - - assertPlan( - planTester.getFragmentPlan(8), - aggregation( - singleGroupingSet(), - ImmutableMap.of( - Optional.of("max_10"), aggregationFunction("max", ImmutableList.of("max_9"))), - Collections.emptyList(), - Optional.empty(), - INTERMEDIATE, - aggregationTableScan( - singleGroupingSet(), - Collections.emptyList(), - Optional.empty(), - PARTIAL, - "testdb.table1", - ImmutableList.of("max_9"), - ImmutableSet.of("s1_6")))); } @Test public void testUncorrelatedScalarSubqueryInWhereClauseWithEnforceSingleRowNode() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where s1 = (select s2 from table1)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); - PlanMatchPattern tableScan1 = - tableScan("testdb.table1", ImmutableList.of("s1"), ImmutableSet.of("s1")); + PlanMatchPattern tableScan = + tableScan( + "testdb.table1", + ImmutableList.of("s1"), + ImmutableSet.of("s1"), + new ComparisonExpression(EQUAL, new SymbolReference("s1"), new LongLiteral("1"))); // Verify LogicalPlan /* - * └──OutputNode - * └──ProjectNode - * └──FilterNode - * └──JoinNode - * |──TableScanNode - * ├──EnforceSingleRowNode - * │ └──TableScanNode - - */ - assertPlan( - logicalQueryPlan, - output( - project( - anyTree( - join( - JoinNode.JoinType.INNER, - builder -> builder.left(tableScan1).right(enforceSingleRow(any()))))))); + * └──OutputNode + * └──DeviceTableScanNode + */ + assertPlan(logicalQueryPlan, output(tableScan)); } @Test public void testUncorrelatedInPredicateSubquery() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where s1 in (select s1 from table1)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); Expression filterPredicate = new SymbolReference("expr"); @@ -287,11 +186,10 @@ public void testUncorrelatedInPredicateSubquery() { @Test public void testUncorrelatedNotInPredicateSubquery() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where s1 not in (select s1 from table1)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); Expression filterPredicate = new NotExpression(new SymbolReference("expr")); @@ -323,11 +221,10 @@ public void testUncorrelatedNotInPredicateSubquery() { @Test public void testUncorrelatedAnyComparisonSubquery() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where s1 > any (select s1 from table1)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); PlanMatchPattern tableScan1 = tableScan("testdb.table1", ImmutableList.of("s1"), ImmutableSet.of("s1")); @@ -452,11 +349,10 @@ public void testUncorrelatedAnyComparisonSubquery() { @Test public void testUncorrelatedEqualsSomeComparisonSubquery() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where s1 = some (select s1 from table1)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); Expression filterPredicate = new SymbolReference("expr"); @@ -488,11 +384,10 @@ public void testUncorrelatedEqualsSomeComparisonSubquery() { @Test public void testUncorrelatedAllComparisonSubquery() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where s1 != all (select s1 from table1)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); PlanMatchPattern tableScan1 = tableScan("testdb.table1", ImmutableList.of("s1"), ImmutableSet.of("s1")); @@ -519,11 +414,10 @@ public void testUncorrelatedAllComparisonSubquery() { @Test public void testUncorrelatedExistsSubquery() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where exists(select s2 from table2)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); PlanMatchPattern tableScan1 = tableScan("testdb.table1", ImmutableList.of("s1"), ImmutableSet.of("s1")); @@ -568,11 +462,10 @@ public void testUncorrelatedExistsSubquery() { @Test public void testUncorrelatedNotExistsSubquery() { - PlanTester planTester = new PlanTester(); - String sql = "SELECT s1 FROM table1 where not exists(select s2 from table2)"; - LogicalQueryPlan logicalQueryPlan = planTester.createPlan(sql); + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); PlanMatchPattern tableScan1 = tableScan("testdb.table1", ImmutableList.of("s1"), ImmutableSet.of("s1")); @@ -616,4 +509,38 @@ public void testUncorrelatedNotExistsSubquery() { SINGLE, tableScan2))))))); } + + @Test + public void testUncorrelatedHavingSubquery() { + String sql = + "SELECT min(time) as min FROM table1 group by s1 having min(time) > (select max(time) from table2)"; + LogicalQueryPlan logicalQueryPlan = + planTester.createPlan(sql, predicateWithUncorrelatedScalarSubquery); + + PlanMatchPattern tableScan = + tableScan("testdb.table1", ImmutableList.of("time", "s1"), ImmutableSet.of("time", "s1")); + PlanMatchPattern agg = + aggregation( + singleGroupingSet("s1"), + ImmutableMap.of( + Optional.of("min"), aggregationFunction("min", ImmutableList.of("time"))), + ImmutableList.of(), + Optional.empty(), + SINGLE, + tableScan); + + Expression filterPredicate = + new ComparisonExpression(GREATER_THAN, new SymbolReference("min"), new LongLiteral("1")); + + // Verify full LogicalPlan + /* + * └──OutputNode + * └──FilterNode + * ├──ProjectNode + * └──Aggregation + * └──TableScanNode + */ + + assertPlan(logicalQueryPlan, output(filter(filterPredicate, project(agg)))); + } } diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/AliasMatcher.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/AliasMatcher.java index 272b1a613412..178a9e1b3e66 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/AliasMatcher.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/AliasMatcher.java @@ -22,7 +22,9 @@ import org.apache.iotdb.db.queryengine.common.SessionInfo; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; import org.apache.iotdb.db.queryengine.plan.relational.metadata.Metadata; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableSchema; import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import java.util.Optional; @@ -56,9 +58,15 @@ public boolean shapeMatches(PlanNode node) { public MatchResult detailMatches( PlanNode node, SessionInfo sessionInfo, Metadata metadata, SymbolAliases symbolAliases) { Optional symbol = matcher.getAssignedSymbol(node, sessionInfo, metadata, symbolAliases); + if (!symbol.isPresent() && node instanceof CteScanNode) { + TableSchema tableSchema = ((CteScanNode) node).getDataStore().getTableSchema(); + symbol = matcher.getCteSymbol(tableSchema); + } + if (symbol.isPresent() && alias.isPresent()) { return match(alias.get(), symbol.get().toSymbolReference()); } + return new MatchResult(symbol.isPresent()); } diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/ColumnReference.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/ColumnReference.java index a93176c19a84..cff4126cde71 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/ColumnReference.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/ColumnReference.java @@ -23,6 +23,7 @@ import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; import org.apache.iotdb.db.queryengine.plan.relational.metadata.ColumnSchema; import org.apache.iotdb.db.queryengine.plan.relational.metadata.Metadata; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableSchema; import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TableScanNode; @@ -90,6 +91,18 @@ private Optional getAssignedSymbol( return result; } + @Override + public Optional getCteSymbol(TableSchema tableSchema) { + Optional result = Optional.empty(); + for (ColumnSchema columnSchema : tableSchema.getColumns()) { + if (columnSchema.getName().equals(columnName)) { + result = Optional.of(Symbol.of(columnName)); + break; + } + } + return result; + } + @Override public String toString() { return format("Column %s:%s", tableName, columnName); diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/CteScanMatcher.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/CteScanMatcher.java new file mode 100644 index 000000000000..8db77ad51ae8 --- /dev/null +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/CteScanMatcher.java @@ -0,0 +1,86 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, + * * software distributed under the License is distributed on an + * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * * KIND, either express or implied. See the License for the + * * specific language governing permissions and limitations + * * under the License. + * + */ + +package org.apache.iotdb.db.queryengine.plan.relational.planner.assertions; + +import org.apache.iotdb.db.queryengine.common.SessionInfo; +import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.Metadata; +import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; + +import java.util.List; +import java.util.stream.Collectors; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static com.google.common.base.Preconditions.checkState; +import static org.apache.iotdb.db.queryengine.plan.relational.planner.assertions.MatchResult.NO_MATCH; + +public class CteScanMatcher implements Matcher { + protected final String expectedCteName; + // this field empty means no need to match + protected final List outputSymbols; + + public CteScanMatcher(String expectedCteName, List outputSymbols) { + this.expectedCteName = expectedCteName; + this.outputSymbols = outputSymbols; + } + + @Override + public boolean shapeMatches(PlanNode node) { + return node instanceof CteScanNode; + } + + @Override + public MatchResult detailMatches( + PlanNode node, SessionInfo sessionInfo, Metadata metadata, SymbolAliases symbolAliases) { + checkState( + shapeMatches(node), + "Plan testing framework error: shapeMatches returned false in detailMatches in %s", + this.getClass().getName()); + + CteScanNode cteScanNode = (CteScanNode) node; + String actualCteName = cteScanNode.getQualifiedName().toString(); + + if (!expectedCteName.equalsIgnoreCase(actualCteName)) { + return NO_MATCH; + } + + if (!outputSymbols.isEmpty() + && !outputSymbols.equals( + cteScanNode.getOutputSymbols().stream() + .map(Symbol::getName) + .collect(Collectors.toList()))) { + return NO_MATCH; + } + + return new MatchResult(true); + } + + @Override + public String toString() { + return toStringHelper(this) + .omitNullValues() + .add("expectedCteName", expectedCteName) + .add("outputSymbols", outputSymbols) + .toString(); + } +} diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/DeviceTableScanMatcher.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/DeviceTableScanMatcher.java index 061286ae4a62..9998146446be 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/DeviceTableScanMatcher.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/DeviceTableScanMatcher.java @@ -21,6 +21,7 @@ import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Expression; import java.util.List; import java.util.Optional; @@ -38,6 +39,15 @@ public DeviceTableScanMatcher( super(expectedTableName, hasTableLayout, outputSymbols, assignmentsKeys); } + public DeviceTableScanMatcher( + String expectedTableName, + Optional hasTableLayout, + List outputSymbols, + Set assignmentsKeys, + Expression pushDownPredicate) { + super(expectedTableName, hasTableLayout, outputSymbols, assignmentsKeys, pushDownPredicate); + } + @Override public boolean shapeMatches(PlanNode node) { return node instanceof DeviceTableScanNode; @@ -51,6 +61,7 @@ public String toString() { .add("hasTableLayout", hasTableLayout.orElse(null)) .add("outputSymbols", outputSymbols) .add("assignmentsKeys", assignmentsKeys) + .add("pushDownPredicate", pushDownPredicate) .toString(); } } diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/PlanMatchPattern.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/PlanMatchPattern.java index 3ec60536aec4..2b2a6e557e1e 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/PlanMatchPattern.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/PlanMatchPattern.java @@ -31,6 +31,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTreeDeviceViewScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AssignUniqueId; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CollectNode; +import org.apache.iotdb.db.queryengine.plan.relational.planner.node.CteScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.EnforceSingleRowNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExchangeNode; @@ -221,12 +222,39 @@ public static PlanMatchPattern tableScan( return pattern; } + public static PlanMatchPattern tableScan( + String expectedTableName, + List outputSymbols, + Set assignmentsKeys, + Expression pushDownPredicate) { + PlanMatchPattern pattern = + node(DeviceTableScanNode.class) + .with( + new DeviceTableScanMatcher( + expectedTableName, + Optional.empty(), + outputSymbols, + assignmentsKeys, + pushDownPredicate)); + outputSymbols.forEach( + symbol -> pattern.withAlias(symbol, new ColumnReference(expectedTableName, symbol))); + return pattern; + } + public static PlanMatchPattern tableScan( String expectedTableName, Map columnReferences) { PlanMatchPattern result = tableScan(expectedTableName); return result.addColumnReferences(expectedTableName, columnReferences); } + public static PlanMatchPattern cteScan(String expectedCteName, List outputSymbols) { + PlanMatchPattern pattern = + node(CteScanNode.class).with(new CteScanMatcher(expectedCteName, outputSymbols)); + outputSymbols.forEach( + symbol -> pattern.withAlias(symbol, new ColumnReference(expectedCteName, symbol))); + return pattern; + } + public static PlanMatchPattern tableFunctionProcessor( Consumer handler, PlanMatchPattern... source) { TableFunctionProcessorMatcher.Builder builder = new TableFunctionProcessorMatcher.Builder(); diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/RvalueMatcher.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/RvalueMatcher.java index 3af1f25cbbdb..5dc691db37f2 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/RvalueMatcher.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/RvalueMatcher.java @@ -22,6 +22,7 @@ import org.apache.iotdb.db.queryengine.common.SessionInfo; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNode; import org.apache.iotdb.db.queryengine.plan.relational.metadata.Metadata; +import org.apache.iotdb.db.queryengine.plan.relational.metadata.TableSchema; import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; import java.util.Optional; @@ -39,4 +40,11 @@ public interface RvalueMatcher { */ Optional getAssignedSymbol( PlanNode node, SessionInfo sessionInfo, Metadata metadata, SymbolAliases symbolAliases); + + /** + * Get the column symbol of the CTE. It should be found in CTE's table schema instead of metadata + */ + default Optional getCteSymbol(TableSchema tableSchema) { + return Optional.empty(); + } } diff --git a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/TableScanMatcher.java b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/TableScanMatcher.java index b860a18c16a3..9d1f3091b27f 100644 --- a/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/TableScanMatcher.java +++ b/iotdb-core/datanode/src/test/java/org/apache/iotdb/db/queryengine/plan/relational/planner/assertions/TableScanMatcher.java @@ -24,6 +24,7 @@ import org.apache.iotdb.db.queryengine.plan.relational.metadata.Metadata; import org.apache.iotdb.db.queryengine.plan.relational.planner.Symbol; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TableScanNode; +import org.apache.iotdb.db.queryengine.plan.relational.sql.ast.Expression; import java.util.List; import java.util.Optional; @@ -41,6 +42,7 @@ public abstract class TableScanMatcher implements Matcher { protected final List outputSymbols; // this field empty means no need to match protected Set assignmentsKeys; + protected Expression pushDownPredicate; public TableScanMatcher( String expectedTableName, @@ -53,6 +55,16 @@ public TableScanMatcher( this.assignmentsKeys = requireNonNull(assignmentsKeys, "assignmentsKeys is null"); } + public TableScanMatcher( + String expectedTableName, + Optional hasTableLayout, + List outputSymbols, + Set assignmentsKeys, + Expression pushDownPredicate) { + this(expectedTableName, hasTableLayout, outputSymbols, assignmentsKeys); + this.pushDownPredicate = requireNonNull(pushDownPredicate, "pushDownPredicate is null"); + } + @Override public MatchResult detailMatches( PlanNode node, SessionInfo sessionInfo, Metadata metadata, SymbolAliases symbolAliases) { @@ -84,6 +96,11 @@ public MatchResult detailMatches( return NO_MATCH; } + if (pushDownPredicate != null + && !pushDownPredicate.equals(tableScanNode.getPushDownPredicate())) { + return NO_MATCH; + } + return new MatchResult(true); } } diff --git a/iotdb-core/node-commons/src/assembly/resources/conf/iotdb-system.properties.template b/iotdb-core/node-commons/src/assembly/resources/conf/iotdb-system.properties.template index 4330a11b1f60..9d63529355a5 100644 --- a/iotdb-core/node-commons/src/assembly/resources/conf/iotdb-system.properties.template +++ b/iotdb-core/node-commons/src/assembly/resources/conf/iotdb-system.properties.template @@ -1123,6 +1123,21 @@ batch_size=100000 # Datatype: long sort_buffer_size_in_bytes=0 +# The buffer size for CTE materialization. If cte_buffer_size_in_bytes <= 0, a default value of 128 KB is used; otherwise the specified value +# will be used. +# effectiveMode: hot_reload +# Datatype: long +# Privilege: SYSTEM +# Unit: bytes +cte_buffer_size_in_bytes=131072 + + +# Max rows for CTE materialization +# effectiveMode: hot_reload +# Datatype: int +# Privilege: SYSTEM +max_rows_in_cte_buffer=1000 + # The maximum mod entries size that each FragmentInstance can cache. # if mods_cache_size_limit_per_fi_in_bytes <= 0, default value will be used, default value = min(32MB, memory for query operators / query_thread_count / 2) # if mods_cache_size_limit_per_fi_in_bytes > 0, the specified value will be used. diff --git a/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/conf/IoTDBConstant.java b/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/conf/IoTDBConstant.java index adf72842797e..c7575577ef15 100644 --- a/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/conf/IoTDBConstant.java +++ b/iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/conf/IoTDBConstant.java @@ -216,6 +216,9 @@ private IoTDBConstant() {} public static final String MULTI_LEVEL_PATH_WILDCARD = "**"; public static final String TIME = "time"; + public static final String CTE_QUERY = "CTE Query"; + public static final String MAIN_QUERY = "Main Query"; + // sdt parameters public static final String LOSS = "loss"; public static final String SDT = "sdt"; @@ -268,6 +271,7 @@ private IoTDBConstant() {} public static final String UDF_FOLDER_NAME = "udf"; public static final String TRIGGER_FOLDER_NAME = "trigger"; public static final String PIPE_FOLDER_NAME = "pipe"; + public static final String CTE_FOLDER_NAME = "cte"; public static final String TMP_FOLDER_NAME = "tmp"; public static final String DELETION_FOLDER_NAME = "deletion"; @@ -300,6 +304,7 @@ private IoTDBConstant() {} public static final String SETTLE_SUFFIX = ".settle"; public static final String MODS_SETTLE_FILE_SUFFIX = ".mods.settle"; public static final String BLANK = ""; + public static final String SPACE = " "; // write ahead log public static final String WAL_FILE_PREFIX = "_"; diff --git a/iotdb-core/relational-grammar/src/main/antlr4/org/apache/iotdb/db/relational/grammar/sql/RelationalSql.g4 b/iotdb-core/relational-grammar/src/main/antlr4/org/apache/iotdb/db/relational/grammar/sql/RelationalSql.g4 index 1690ea4c855b..be6844302bb9 100644 --- a/iotdb-core/relational-grammar/src/main/antlr4/org/apache/iotdb/db/relational/grammar/sql/RelationalSql.g4 +++ b/iotdb-core/relational-grammar/src/main/antlr4/org/apache/iotdb/db/relational/grammar/sql/RelationalSql.g4 @@ -1019,7 +1019,7 @@ groupingSet ; namedQuery - : name=identifier (columnAliases)? AS '(' query ')' + : name=identifier (columnAliases)? AS MATERIALIZED? '(' query ')' ; setQuantifier