From 1acbde3a8750280781efe9b61ef0cb16824ef375 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 12:24:39 +0530 Subject: [PATCH 01/12] WIP --- .../documentstore/postgres/FlatPostgresCollection.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java index 430dd36b..66d370b1 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java @@ -174,16 +174,6 @@ public boolean delete(Filter filter) { throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); } - @Override - public BulkDeleteResult delete(Set keys) { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); - } - - @Override - public boolean deleteAll() { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); - } - @Override public boolean deleteSubDoc(Key key, String subDocPath) { throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); From fc33186ae1a808e7529ddc3555821622ff62a7bd Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 14:01:13 +0530 Subject: [PATCH 02/12] Implement deletes for flat collections --- .../FlatCollectionWriteTest.java | 257 ++++++++++++++++-- .../core/documentstore/Collection.java | 5 + .../postgres/FlatPostgresCollection.java | 105 ++++++- 3 files changed, 349 insertions(+), 18 deletions(-) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java index 5e8a26d0..73fb0364 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java @@ -834,31 +834,256 @@ void testBulkUpsertAndReturnOlderDocuments() { class DeleteTests { @Test - @DisplayName("Should throw UnsupportedOperationException for delete by key") - void testDeleteByKey() { - Key keyToDelete = new SingleValueKey("default", "1"); - assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(keyToDelete)); + @DisplayName("Should delete document by single key") + void testDeleteByKey() throws Exception { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "delete-key-test"); + node.put("item", "ToDeleteByKey"); + node.put("price", 50); + Key key = new SingleValueKey(DEFAULT_TENANT, "delete-key-test"); + flatCollection.create(key, new JSONDocument(node)); + + assertTrue(flatCollection.delete(key)); + queryAndAssert(key, rs -> assertFalse(rs.next())); } @Test - @DisplayName("Should throw UnsupportedOperationException for delete by filter") - void testDeleteByFilter() { - Filter filter = Filter.eq("item", "Soap"); - assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(filter)); + @DisplayName("Should delete documents by multiple keys") + void testDeleteByKeys() throws Exception { + Key key1 = new SingleValueKey(DEFAULT_TENANT, "delete-keys-1"); + Key key2 = new SingleValueKey(DEFAULT_TENANT, "delete-keys-2"); + Key key3 = new SingleValueKey(DEFAULT_TENANT, "delete-keys-3"); + + for (int i = 1; i <= 3; i++) { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "delete-keys-" + i); + node.put("item", "Item" + i); + node.put("price", i * 10); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "delete-keys-" + i), new JSONDocument(node)); + } + + // Delete keys 1 and 2, keep 3 + BulkDeleteResult result = flatCollection.delete(Set.of(key1, key2)); + assertEquals(2, result.getDeletedCount()); + + queryAndAssert(key1, rs -> assertFalse(rs.next())); + queryAndAssert(key2, rs -> assertFalse(rs.next())); + queryAndAssert(key3, rs -> assertTrue(rs.next())); } @Test - @DisplayName("Should throw UnsupportedOperationException for delete by keys") - void testDeleteByKeys() { - Set keys = - Set.of(new SingleValueKey("default", "1"), new SingleValueKey("default", "2")); - assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(keys)); + @DisplayName("Should delete all documents") + void testDeleteAll() throws Exception { + for (int i = 1; i <= 2; i++) { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "delete-all-" + i); + node.put("item", "AllItem" + i); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "delete-all-" + i), new JSONDocument(node)); + } + + assertTrue(flatCollection.deleteAll()); + + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format("SELECT COUNT(*) FROM \"%s\"", FLAT_COLLECTION_NAME)); + ResultSet rs = ps.executeQuery()) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } } @Test - @DisplayName("Should throw UnsupportedOperationException for deleteAll") - void testDeleteAll() { - assertThrows(UnsupportedOperationException.class, () -> flatCollection.deleteAll()); + @DisplayName("Should delete with various filter types: EQ, GT, IN, legacy Filter") + void testDeleteWithFilters() throws Exception { + // Setup: Create documents for different filter scenarios + // Doc 1: For EQ filter test + ObjectNode node1 = OBJECT_MAPPER.createObjectNode(); + node1.put("id", "filter-eq"); + node1.put("item", "ToBeDeleted"); + node1.put("price", 100); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-eq"), new JSONDocument(node1)); + + // Doc 2 & 3: For GT filter test + ObjectNode node2 = OBJECT_MAPPER.createObjectNode(); + node2.put("id", "filter-gt-expensive"); + node2.put("item", "Expensive"); + node2.put("price", 1000); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-expensive"), new JSONDocument(node2)); + + ObjectNode node3 = OBJECT_MAPPER.createObjectNode(); + node3.put("id", "filter-gt-cheap"); + node3.put("item", "Cheap"); + node3.put("price", 10); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-cheap"), new JSONDocument(node3)); + + // Doc 4, 5, 6: For IN filter test + for (String fruit : List.of("Apple", "Banana", "Cherry")) { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "filter-in-" + fruit.toLowerCase()); + node.put("item", fruit); + node.put("price", 50); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-in-" + fruit.toLowerCase()), + new JSONDocument(node)); + } + + // Doc 7: For legacy Filter test + ObjectNode node7 = OBJECT_MAPPER.createObjectNode(); + node7.put("id", "filter-legacy"); + node7.put("item", "LegacyItem"); + node7.put("price", 75); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), new JSONDocument(node7)); + + // Test 1: EQ filter + org.hypertrace.core.documentstore.query.Filter eqFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("ToBeDeleted"))) + .build(); + assertTrue(flatCollection.delete(eqFilter)); + queryAndAssert(new SingleValueKey(DEFAULT_TENANT, "filter-eq"), rs -> assertFalse(rs.next())); + + // Test 2: GT filter (price > 500) + org.hypertrace.core.documentstore.query.Filter gtFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("price"), + RelationalOperator.GT, + ConstantExpression.of(500))) + .build(); + assertTrue(flatCollection.delete(gtFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-expensive"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-cheap"), rs -> assertTrue(rs.next())); + + // Test 3: IN filter + org.hypertrace.core.documentstore.query.Filter inFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.IN, + ConstantExpression.ofStrings(List.of("Apple", "Banana")))) + .build(); + assertTrue(flatCollection.delete(inFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-in-apple"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-in-banana"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-in-cherry"), rs -> assertTrue(rs.next())); + + // Test 4: Legacy Filter API + Filter legacyFilter = Filter.eq("item", "LegacyItem"); + assertTrue(flatCollection.delete(legacyFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), rs -> assertFalse(rs.next())); + } + + @Test + @DisplayName("Should delete with composite AND filter and nested JSONB filter") + void testDeleteWithCompositeAndNestedFilters() throws Exception { + // Setup for AND filter + ObjectNode node1 = OBJECT_MAPPER.createObjectNode(); + node1.put("id", "and-match"); + node1.put("item", "TargetItem"); + node1.put("price", 100); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "and-match"), new JSONDocument(node1)); + + ObjectNode node2 = OBJECT_MAPPER.createObjectNode(); + node2.put("id", "and-nomatch"); + node2.put("item", "TargetItem"); + node2.put("price", 200); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "and-nomatch"), new JSONDocument(node2)); + + // Setup for JSONB nested filter + ObjectNode node3 = OBJECT_MAPPER.createObjectNode(); + node3.put("id", "jsonb-nike"); + node3.put("item", "Product1"); + ObjectNode props1 = OBJECT_MAPPER.createObjectNode(); + props1.put("brand", "Nike"); + node3.set("props", props1); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "jsonb-nike"), new JSONDocument(node3)); + + ObjectNode node4 = OBJECT_MAPPER.createObjectNode(); + node4.put("id", "jsonb-adidas"); + node4.put("item", "Product2"); + ObjectNode props2 = OBJECT_MAPPER.createObjectNode(); + props2.put("brand", "Adidas"); + node4.set("props", props2); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "jsonb-adidas"), new JSONDocument(node4)); + + // Test 1: AND filter (item = 'TargetItem' AND price = 100) + org.hypertrace.core.documentstore.query.Filter andFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + org.hypertrace.core.documentstore.expression.impl.LogicalExpression.and( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("TargetItem")), + RelationalExpression.of( + IdentifierExpression.of("price"), + RelationalOperator.EQ, + ConstantExpression.of(100)))) + .build(); + assertTrue(flatCollection.delete(andFilter)); + queryAndAssert(new SingleValueKey(DEFAULT_TENANT, "and-match"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "and-nomatch"), rs -> assertTrue(rs.next())); + + // Test 2: Nested JSONB filter (props.brand = 'Nike') + org.hypertrace.core.documentstore.query.Filter jsonbFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + org.hypertrace.core.documentstore.expression.impl.JsonIdentifierExpression.of( + "props", "brand"), + RelationalOperator.EQ, + ConstantExpression.of("Nike"))) + .build(); + assertTrue(flatCollection.delete(jsonbFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "jsonb-nike"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "jsonb-adidas"), rs -> assertTrue(rs.next())); + } + + @Test + @DisplayName("Should handle edge cases: no match returns false, null filter throws exception") + void testDeleteEdgeCases() { + // Test 1: No match returns false + org.hypertrace.core.documentstore.query.Filter noMatchFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("NonExistentItem12345"))) + .build(); + assertFalse(flatCollection.delete(noMatchFilter)); + + // Test 2: Null filter throws exception + assertThrows( + IllegalArgumentException.class, + () -> flatCollection.delete((org.hypertrace.core.documentstore.query.Filter) null)); } } diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java index 4d0430fb..699fdc18 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java @@ -159,8 +159,13 @@ CloseableIterator query( * @param filter The filter to determine documents to be deleted. Only the filter clause. * @return True if the documents are deleted, false otherwise. */ + @Deprecated boolean delete(Filter filter); + default boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { + throw new IllegalArgumentException("Not implemented!"); + } + /** * Delete the documents for the given keys * diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java index 66d370b1..71af0c5f 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java @@ -166,12 +166,113 @@ public BulkUpdateResult bulkOperationOnArrayValue(BulkArrayValueUpdateRequest re @Override public boolean delete(Key key) { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); + String pkForTable = getPKForTable(tableIdentifier.getTableName()); + String deleteSQL = + String.format( + "DELETE FROM %s WHERE %s = ?", + tableIdentifier, PostgresUtils.wrapFieldNamesWithDoubleQuotes(pkForTable)); + try (PreparedStatement preparedStatement = client.getConnection().prepareStatement(deleteSQL)) { + preparedStatement.setString(1, key.toString()); + preparedStatement.executeUpdate(); + return true; + } catch (SQLException e) { + LOGGER.error("SQLException deleting document. key: {}", key, e); + } + return false; + } + + @Override + public boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { + + Preconditions.checkArgument(filter != null, "Filter cannot be null"); + + Query query = Query.builder().setFilter(filter).build(); + + // Create parser with flat field transformer + PostgresQueryParser queryParser = + new PostgresQueryParser(tableIdentifier, query, new FlatPostgresFieldTransformer()); + + String filterClause = queryParser.buildFilterClause(); + + if (filterClause.isEmpty()) { + throw new IllegalArgumentException("Parsed filter is invalid"); + } + + String sql = "DELETE FROM " + tableIdentifier + " " + filterClause; + LOGGER.debug("Delete SQL: {}", sql); + + try (Connection conn = client.getPooledConnection(); + PreparedStatement ps = + queryExecutor.buildPreparedStatement( + sql, queryParser.getParamsBuilder().build(), conn)) { + int deletedCount = ps.executeUpdate(); + LOGGER.debug("Deleted {} rows", deletedCount); + return deletedCount > 0; + } catch (SQLException e) { + LOGGER.error("SQLException deleting documents. filter: {}", filter, e); + } + return false; } @Override + @Deprecated public boolean delete(Filter filter) { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); + if (filter == null) { + throw new IllegalArgumentException("Filter must be provided"); + } + + // Convert legacy Filter to query.Filter using FilterToQueryConverter + org.hypertrace.core.documentstore.expression.type.FilterTypeExpression expression = + org.hypertrace.core.documentstore.postgres.query.v1.FilterToQueryConverter.convert(filter); + + org.hypertrace.core.documentstore.query.Filter queryFilter = + org.hypertrace.core.documentstore.query.Filter.builder().expression(expression).build(); + + return delete(queryFilter); + } + + @Override + public BulkDeleteResult delete(Set keys) { + if (keys == null || keys.isEmpty()) { + return new BulkDeleteResult(0); + } + + String pkColumn = getPKForTable(tableIdentifier.getTableName()); + String quotedPkColumn = PostgresUtils.wrapFieldNamesWithDoubleQuotes(pkColumn); + + String ids = + keys.stream().map(key -> "'" + key.toString() + "'").collect(Collectors.joining(", ")); + + String deleteSQL = + String.format("DELETE FROM %s WHERE %s IN (%s)", tableIdentifier, quotedPkColumn, ids); + + LOGGER.debug("Bulk delete SQL: {}", deleteSQL); + + try (Connection conn = client.getPooledConnection(); + PreparedStatement ps = conn.prepareStatement(deleteSQL)) { + int deletedCount = ps.executeUpdate(); + LOGGER.debug("Bulk deleted {} rows", deletedCount); + return new BulkDeleteResult(deletedCount); + } catch (SQLException e) { + LOGGER.error("SQLException bulk deleting documents. keys: {}", keys, e); + } + return new BulkDeleteResult(0); + } + + @Override + public boolean deleteAll() { + String deleteSQL = String.format("DELETE FROM %s", tableIdentifier); + LOGGER.debug("Delete all SQL: {}", deleteSQL); + + try (Connection conn = client.getPooledConnection(); + PreparedStatement ps = conn.prepareStatement(deleteSQL)) { + int deletedCount = ps.executeUpdate(); + LOGGER.debug("Deleted all {} rows", deletedCount); + return true; + } catch (SQLException e) { + LOGGER.error("SQLException deleting all documents.", e); + } + return false; } @Override From 911e266df1fece4105f99a80e9bf9e74bef89e3d Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 14:08:08 +0530 Subject: [PATCH 03/12] Fix compilation issue --- .../postgres/FlatPostgresCollection.java | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java index 71af0c5f..2550f59e 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java @@ -217,18 +217,8 @@ public boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { @Override @Deprecated public boolean delete(Filter filter) { - if (filter == null) { - throw new IllegalArgumentException("Filter must be provided"); - } - - // Convert legacy Filter to query.Filter using FilterToQueryConverter - org.hypertrace.core.documentstore.expression.type.FilterTypeExpression expression = - org.hypertrace.core.documentstore.postgres.query.v1.FilterToQueryConverter.convert(filter); - - org.hypertrace.core.documentstore.query.Filter queryFilter = - org.hypertrace.core.documentstore.query.Filter.builder().expression(expression).build(); - - return delete(queryFilter); + throw new UnsupportedOperationException( + "DELETE not supported for legacy Filter, use delete(org.hypertrace.core.documentstore.query.Filter filter) rather"); } @Override From f18546588d243533c61c24a98ae9db70931b67a4 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 17:20:37 +0530 Subject: [PATCH 04/12] Fix failing test case --- .../documentstore/FlatCollectionWriteTest.java | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java index 73fb0364..aede748a 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java @@ -934,14 +934,6 @@ void testDeleteWithFilters() throws Exception { new JSONDocument(node)); } - // Doc 7: For legacy Filter test - ObjectNode node7 = OBJECT_MAPPER.createObjectNode(); - node7.put("id", "filter-legacy"); - node7.put("item", "LegacyItem"); - node7.put("price", 75); - flatCollection.create( - new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), new JSONDocument(node7)); - // Test 1: EQ filter org.hypertrace.core.documentstore.query.Filter eqFilter = org.hypertrace.core.documentstore.query.Filter.builder() @@ -986,11 +978,9 @@ void testDeleteWithFilters() throws Exception { queryAndAssert( new SingleValueKey(DEFAULT_TENANT, "filter-in-cherry"), rs -> assertTrue(rs.next())); - // Test 4: Legacy Filter API - Filter legacyFilter = Filter.eq("item", "LegacyItem"); - assertTrue(flatCollection.delete(legacyFilter)); - queryAndAssert( - new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), rs -> assertFalse(rs.next())); + // Test 4: Legacy Filter API throws UnsupportedOperationException + Filter legacyFilter = Filter.eq("item", "Cherry"); + assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(legacyFilter)); } @Test From 3dad7d2aff5c79672b80f8be673cb86486404649 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 17:53:32 +0530 Subject: [PATCH 05/12] Add tests --- .../FlatCollectionWriteTest.java | 114 ++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java index aede748a..80a72d65 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java @@ -1075,6 +1075,120 @@ void testDeleteEdgeCases() { IllegalArgumentException.class, () -> flatCollection.delete((org.hypertrace.core.documentstore.query.Filter) null)); } + + @Test + @DisplayName("delete(Filter) should return false when SQLException occurs (dropped table)") + void testDeleteByFilterReturnsFalseOnSQLException() throws Exception { + // Create a temporary table, get collection, then drop the table to trigger SQLException + String tempTable = "temp_delete_filter_test"; + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + + // Create temp table + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format( + "CREATE TABLE \"%s\" (\"id\" TEXT PRIMARY KEY, \"item\" TEXT)", tempTable))) { + ps.execute(); + } + + // Get collection for the temp table + Collection tempCollection = + postgresDatastore.getCollectionForType(tempTable, DocumentType.FLAT); + + // Drop the table to cause SQLException on delete + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement(String.format("DROP TABLE \"%s\"", tempTable))) { + ps.execute(); + } + + org.hypertrace.core.documentstore.query.Filter filter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("SomeValue"))) + .build(); + + // SQLException should be caught and method should return false + assertFalse(tempCollection.delete(filter)); + } + + @Test + @DisplayName("delete(Set) should return BulkDeleteResult(0) when SQLException occurs") + void testDeleteByKeysReturnsZeroOnSQLException() throws Exception { + // Create a temporary table, get collection, then drop the table to trigger SQLException + String tempTable = "temp_delete_keys_test"; + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + + // Create temp table + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format( + "CREATE TABLE \"%s\" (\"id\" TEXT PRIMARY KEY, \"item\" TEXT)", tempTable))) { + ps.execute(); + } + + // Get collection for the temp table + Collection tempCollection = + postgresDatastore.getCollectionForType(tempTable, DocumentType.FLAT); + + // Insert a document to force schema caching (getPKForTable is called during create) + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "temp-key"); + node.put("item", "temp-item"); + tempCollection.create(new SingleValueKey(DEFAULT_TENANT, "temp-key"), new JSONDocument(node)); + + // Drop the table to cause SQLException on delete + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement(String.format("DROP TABLE \"%s\"", tempTable))) { + ps.execute(); + } + + Set keys = + Set.of( + new SingleValueKey(DEFAULT_TENANT, "key1"), + new SingleValueKey(DEFAULT_TENANT, "key2")); + + // SQLException should be caught and method should return BulkDeleteResult with 0 count + BulkDeleteResult result = tempCollection.delete(keys); + assertEquals(0, result.getDeletedCount()); + } + + @Test + @DisplayName("deleteAll() should return false when SQLException occurs (dropped table)") + void testDeleteAllReturnsFalseOnSQLException() throws Exception { + // Create a temporary table, get collection, then drop the table to trigger SQLException + String tempTable = "temp_delete_all_test"; + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + + // Create temp table + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format( + "CREATE TABLE \"%s\" (\"id\" TEXT PRIMARY KEY, \"item\" TEXT)", tempTable))) { + ps.execute(); + } + + // Get collection for the temp table + Collection tempCollection = + postgresDatastore.getCollectionForType(tempTable, DocumentType.FLAT); + + // Drop the table to cause SQLException on delete + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement(String.format("DROP TABLE \"%s\"", tempTable))) { + ps.execute(); + } + + // SQLException should be caught and method should return false + assertFalse(tempCollection.deleteAll()); + } } @Nested From 5577d29c8457d69c72f5164f25c86d135e09b065 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Wed, 4 Feb 2026 15:11:58 +0530 Subject: [PATCH 06/12] Adds LegacyFilterToQueryFilterTransformer --- .../FlatCollectionFilterTransformerTest.java | 885 ++++++++++++++++++ .../LegacyFilterToQueryFilterTransformer.java | 318 +++++++ 2 files changed, 1203 insertions(+) create mode 100644 document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java create mode 100644 document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java new file mode 100644 index 00000000..eee87ebb --- /dev/null +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java @@ -0,0 +1,885 @@ +package org.hypertrace.core.documentstore; + +import static org.hypertrace.core.documentstore.utils.Utils.readFileFromResource; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.typesafe.config.ConfigFactory; +import java.io.IOException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.hypertrace.core.documentstore.expression.impl.ConstantExpression; +import org.hypertrace.core.documentstore.expression.impl.IdentifierExpression; +import org.hypertrace.core.documentstore.expression.impl.LogicalExpression; +import org.hypertrace.core.documentstore.expression.impl.RelationalExpression; +import org.hypertrace.core.documentstore.expression.operators.RelationalOperator; +import org.hypertrace.core.documentstore.postgres.PostgresDatastore; +import org.hypertrace.core.documentstore.postgres.query.v1.transformer.LegacyFilterToQueryFilterTransformer; +import org.hypertrace.core.documentstore.query.Query; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +/** + * Integration tests that validate the {@link LegacyFilterToQueryFilterTransformer} by comparing + * query results obtained using: + * + *
    + *
  1. Legacy Filter (org.hypertrace.core.documentstore.Filter) + *
  2. New Query Filter (org.hypertrace.core.documentstore.query.Filter) - transformed from legacy + *
+ * + *

Both approaches should yield identical results for the same filter conditions. + */ +@Testcontainers +public class FlatCollectionFilterTransformerTest { + + private static final Logger LOGGER = + LoggerFactory.getLogger(FlatCollectionFilterTransformerTest.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final String FLAT_COLLECTION_NAME = "filterTestFlat"; + private static final String INSERT_STATEMENTS_FILE = "query/pg_flat_collection_insert.json"; + + private static Datastore postgresDatastore; + private static Collection flatCollection; + private static GenericContainer postgres; + private static LegacyFilterToQueryFilterTransformer transformer; + + @BeforeAll + public static void init() throws IOException { + postgres = + new GenericContainer<>(DockerImageName.parse("postgres:13.1")) + .withEnv("POSTGRES_PASSWORD", "postgres") + .withEnv("POSTGRES_USER", "postgres") + .withExposedPorts(5432) + .waitingFor(Wait.forListeningPort()); + postgres.start(); + + String postgresConnectionUrl = + String.format("jdbc:postgresql://localhost:%s/", postgres.getMappedPort(5432)); + + Map postgresConfig = new HashMap<>(); + postgresConfig.put("url", postgresConnectionUrl); + postgresConfig.put("user", "postgres"); + postgresConfig.put("password", "postgres"); + + postgresDatastore = + DatastoreProvider.getDatastore("Postgres", ConfigFactory.parseMap(postgresConfig)); + + createFlatCollectionSchema(); + flatCollection = + postgresDatastore.getCollectionForType(FLAT_COLLECTION_NAME, DocumentType.FLAT); + + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + transformer = + new LegacyFilterToQueryFilterTransformer( + pgDatastore.getSchemaRegistry(), FLAT_COLLECTION_NAME); + } + + private static void createFlatCollectionSchema() { + String createTableSQL = + String.format( + "CREATE TABLE \"%s\" (" + + "\"id\" TEXT PRIMARY KEY," + + "\"item\" TEXT," + + "\"price\" INTEGER," + + "\"quantity\" INTEGER," + + "\"date\" TIMESTAMPTZ," + + "\"in_stock\" BOOLEAN," + + "\"tags\" TEXT[]," + + "\"categoryTags\" TEXT[]," + + "\"props\" JSONB," + + "\"sales\" JSONB," + + "\"numbers\" INTEGER[]," + + "\"scores\" DOUBLE PRECISION[]," + + "\"flags\" BOOLEAN[]," + + "\"big_number\" BIGINT," + + "\"rating\" REAL," + + "\"created_date\" DATE," + + "\"weight\" DOUBLE PRECISION" + + ");", + FLAT_COLLECTION_NAME); + + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + + try (Connection connection = pgDatastore.getPostgresClient(); + PreparedStatement statement = connection.prepareStatement(createTableSQL)) { + statement.execute(); + LOGGER.info("Created flat collection table: {}", FLAT_COLLECTION_NAME); + } catch (Exception e) { + LOGGER.error("Failed to create flat collection schema: {}", e.getMessage(), e); + } + } + + private static void executeInsertStatements() { + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + try { + String jsonContent = readFileFromResource(INSERT_STATEMENTS_FILE).orElseThrow(); + JsonNode rootNode = OBJECT_MAPPER.readTree(jsonContent); + JsonNode statementsNode = rootNode.get("statements"); + + if (statementsNode == null || !statementsNode.isArray()) { + throw new RuntimeException("Invalid JSON format: 'statements' array not found"); + } + + try (Connection connection = pgDatastore.getPostgresClient()) { + for (JsonNode statementNode : statementsNode) { + String statement = statementNode.asText().trim(); + if (!statement.isEmpty()) { + statement = statement.replace("myTestFlat", FLAT_COLLECTION_NAME); + try (PreparedStatement preparedStatement = connection.prepareStatement(statement)) { + preparedStatement.executeUpdate(); + } catch (Exception e) { + LOGGER.error("Failed to execute INSERT statement: {}", e.getMessage(), e); + throw e; + } + } + } + } + LOGGER.info("Inserted initial data into: {}", FLAT_COLLECTION_NAME); + } catch (Exception e) { + LOGGER.error("Failed to execute INSERT statements: {}", e.getMessage(), e); + } + } + + @BeforeEach + public void setupData() { + clearTable(); + executeInsertStatements(); + } + + private static void clearTable() { + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + String deleteSQL = String.format("DELETE FROM \"%s\"", FLAT_COLLECTION_NAME); + try (Connection connection = pgDatastore.getPostgresClient(); + PreparedStatement statement = connection.prepareStatement(deleteSQL)) { + statement.executeUpdate(); + } catch (Exception e) { + LOGGER.error("Failed to clear table: {}", e.getMessage(), e); + } + } + + @AfterAll + public static void shutdown() { + if (postgres != null) { + postgres.stop(); + } + } + + @Nested + @DisplayName("Equality Operators (EQ, NEQ)") + class EqualityOperatorTests { + + @Test + @DisplayName("EQ: Should return same results for legacy and transformed filter") + void testEqOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.EQ, "item", "Soap"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List newResults = collectResults(flatCollection.find(query)); + + assertNotNull(newResults); + assertFalse(newResults.isEmpty(), "Should find at least one document with item='Soap'"); + + for (Document doc : newResults) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + assertEquals("Soap", node.get("item").asText()); + } + } + + @Test + @DisplayName("NEQ: Should return same results for legacy and transformed filter") + void testNeqOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.NEQ, "item", "Soap"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + assertTrue( + !node.has("item") || !node.get("item").asText().equals("Soap"), + "Should not contain item='Soap'"); + } + } + } + + @Nested + @DisplayName("Comparison Operators (GT, GTE, LT, LTE)") + class ComparisonOperatorTests { + + @Test + @DisplayName("GT: Should return documents with price > 10") + void testGtOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.GT, "price", 10); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with price > 10"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + if (node.has("price") && !node.get("price").isNull()) { + assertTrue(node.get("price").asInt() > 10, "Price should be > 10"); + } + } + } + + @Test + @DisplayName("GTE: Should return documents with price >= 10") + void testGteOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.GTE, "price", 10); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with price >= 10"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + if (node.has("price") && !node.get("price").isNull()) { + assertTrue(node.get("price").asInt() >= 10, "Price should be >= 10"); + } + } + } + + @Test + @DisplayName("LT: Should return documents with price < 10") + void testLtOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.LT, "price", 10); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with price < 10"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + if (node.has("price") && !node.get("price").isNull()) { + assertTrue(node.get("price").asInt() < 10, "Price should be < 10"); + } + } + } + + @Test + @DisplayName("LTE: Should return documents with price <= 10") + void testLteOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.LTE, "price", 10); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with price <= 10"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + if (node.has("price") && !node.get("price").isNull()) { + assertTrue(node.get("price").asInt() <= 10, "Price should be <= 10"); + } + } + } + } + + @Nested + @DisplayName("Collection Operators (IN, NOT_IN)") + class CollectionOperatorTests { + + @Test + @DisplayName("IN: Should return documents with item in list") + void testInOperator() throws Exception { + List items = List.of("Soap", "Mirror"); + Filter legacyFilter = new Filter(Filter.Op.IN, "item", items); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with item in list"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + String item = node.get("item").asText(); + assertTrue(items.contains(item), "Item should be in the list: " + item); + } + } + + @Test + @DisplayName("NOT_IN: Should return documents with item not in list") + void testNotInOperator() throws Exception { + List items = List.of("Soap", "Mirror"); + Filter legacyFilter = new Filter(Filter.Op.NOT_IN, "item", items); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with item not in list"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + if (node.has("item") && !node.get("item").isNull()) { + String item = node.get("item").asText(); + assertFalse(items.contains(item), "Item should NOT be in the list: " + item); + } + } + } + + @Test + @DisplayName("IN with numbers: Should return documents with price in list") + void testInOperatorWithNumbers() throws Exception { + List prices = List.of(5, 10, 20); + Filter legacyFilter = new Filter(Filter.Op.IN, "price", prices); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with price in list"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + if (node.has("price") && !node.get("price").isNull()) { + int price = node.get("price").asInt(); + assertTrue(prices.contains(price), "Price should be in the list: " + price); + } + } + } + } + + @Nested + @DisplayName("Logical Operators (AND, OR)") + class LogicalOperatorTests { + + @Test + @DisplayName("AND: Should return documents matching all conditions") + void testAndOperator() throws Exception { + Filter itemFilter = new Filter(Filter.Op.EQ, "item", "Soap"); + Filter priceFilter = new Filter(Filter.Op.GT, "price", 10); + Filter legacyAndFilter = itemFilter.and(priceFilter); + + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyAndFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents matching AND condition"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + assertEquals("Soap", node.get("item").asText()); + assertTrue(node.get("price").asInt() > 10, "Price should be > 10"); + } + } + + @Test + @DisplayName("OR: Should return documents matching any condition") + void testOrOperator() throws Exception { + Filter soapFilter = new Filter(Filter.Op.EQ, "item", "Soap"); + Filter mirrorFilter = new Filter(Filter.Op.EQ, "item", "Mirror"); + Filter legacyOrFilter = soapFilter.or(mirrorFilter); + + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyOrFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents matching OR condition"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + String item = node.get("item").asText(); + assertTrue( + item.equals("Soap") || item.equals("Mirror"), + "Item should be 'Soap' or 'Mirror', got: " + item); + } + } + + @Test + @DisplayName("Nested AND/OR: Should handle complex conditions") + void testNestedAndOr() throws Exception { + Filter soapFilter = new Filter(Filter.Op.EQ, "item", "Soap"); + Filter priceFilter = new Filter(Filter.Op.GT, "price", 10); + Filter soapAndPrice = soapFilter.and(priceFilter); + + Filter combFilter = new Filter(Filter.Op.EQ, "item", "Comb"); + Filter complexFilter = soapAndPrice.or(combFilter); + + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(complexFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents matching complex condition"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + String item = node.get("item").asText(); + int price = + node.has("price") && !node.get("price").isNull() ? node.get("price").asInt() : 0; + + boolean matchesSoapAndPrice = item.equals("Soap") && price > 10; + boolean matchesComb = item.equals("Comb"); + + assertTrue( + matchesSoapAndPrice || matchesComb, + "Document should match (Soap AND price>10) OR Comb. Got: " + item + ", " + price); + } + } + } + + @Nested + @DisplayName("Boolean Operators (EXISTS, NOT_EXISTS)") + class ExistsOperatorTests { + + @Test + @DisplayName("EXISTS: Should return documents where field exists") + void testExistsOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.EXISTS, "in_stock", null); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + assertTrue( + node.has("in_stock") && !node.get("in_stock").isNull(), + "Document should have in_stock field"); + } + } + + @Test + @DisplayName("NOT_EXISTS: Should return documents where field does not exist") + void testNotExistsOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.NOT_EXISTS, "in_stock", null); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + assertTrue( + !node.has("in_stock") || node.get("in_stock").isNull(), + "Document should not have in_stock field or it should be null"); + } + } + } + + @Nested + @DisplayName("String Operators (LIKE, CONTAINS)") + class StringOperatorTests { + + @Test + @DisplayName("LIKE: Should return documents matching pattern") + void testLikeOperator() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.LIKE, "item", "Sha.*"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents matching LIKE pattern"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + String item = node.get("item").asText(); + assertTrue(item.startsWith("Sha"), "Item should start with 'Sha': " + item); + } + } + } + + @Nested + @DisplayName("Nested JSONB Filters (STRING and STRING_ARRAY)") + class NestedJsonbFilterTests { + + @Test + @DisplayName("EQ on nested STRING field: props.brand") + void testNestedStringEq() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.EQ, "props.brand", "Dettol"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with props.brand='Dettol'"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode props = node.get("props"); + assertNotNull(props, "props should exist"); + assertEquals("Dettol", props.get("brand").asText()); + } + } + + @Test + @DisplayName("NEQ on nested STRING field: props.brand") + void testNestedStringNeq() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.NEQ, "props.brand", "Dettol"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode props = node.get("props"); + if (props != null && props.has("brand") && !props.get("brand").isNull()) { + assertNotEquals("Dettol", props.get("brand").asText()); + } + } + } + + @Test + @DisplayName("IN on nested STRING field: props.brand") + void testNestedStringIn() throws Exception { + List brands = List.of("Dettol", "Lifebuoy"); + Filter legacyFilter = new Filter(Filter.Op.IN, "props.brand", brands); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with props.brand in list"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode props = node.get("props"); + assertNotNull(props, "props should exist"); + String brand = props.get("brand").asText(); + assertTrue(brands.contains(brand), "Brand should be in list: " + brand); + } + } + + @Test + @DisplayName("EQ on deeply nested STRING field: props.seller.name") + void testDeeplyNestedStringEq() throws Exception { + Filter legacyFilter = + new Filter(Filter.Op.EQ, "props.seller.name", "Metro Chemicals Pvt. Ltd."); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse( + results.isEmpty(), + "Should find documents with props.seller.name='Metro Chemicals Pvt. Ltd.'"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode seller = node.path("props").path("seller"); + assertNotNull(seller, "seller should exist"); + assertEquals("Metro Chemicals Pvt. Ltd.", seller.get("name").asText()); + } + } + + @Test + @DisplayName("EQ on triple-nested STRING field: props.seller.address.city") + void testTripleNestedStringEq() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.EQ, "props.seller.address.city", "Kolkata"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse( + results.isEmpty(), "Should find documents with props.seller.address.city='Kolkata'"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode address = node.path("props").path("seller").path("address"); + assertNotNull(address, "address should exist"); + assertEquals("Kolkata", address.get("city").asText()); + } + } + + @Test + @DisplayName("CONTAINS on nested STRING_ARRAY field: props.colors") + void testNestedStringArrayContains() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.CONTAINS, "props.colors", "Blue"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents with props.colors containing 'Blue'"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode colors = node.path("props").path("colors"); + assertTrue(colors.isArray(), "colors should be an array"); + boolean containsBlue = false; + for (JsonNode color : colors) { + if ("Blue".equals(color.asText())) { + containsBlue = true; + break; + } + } + assertTrue(containsBlue, "colors should contain 'Blue'"); + } + } + + @Test + @DisplayName("CONTAINS on nested STRING_ARRAY field: props.source-loc") + void testNestedStringArraySourceLocContains() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.CONTAINS, "props.source-loc", "warehouse-A"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse( + results.isEmpty(), + "Should find documents with props.source-loc containing 'warehouse-A'"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode sourceLoc = node.path("props").path("source-loc"); + assertTrue(sourceLoc.isArray(), "source-loc should be an array"); + boolean containsWarehouseA = false; + for (JsonNode loc : sourceLoc) { + if ("warehouse-A".equals(loc.asText())) { + containsWarehouseA = true; + break; + } + } + assertTrue(containsWarehouseA, "source-loc should contain 'warehouse-A'"); + } + } + + @Test + @DisplayName("AND on nested JSONB fields: props.brand AND props.seller.address.city") + void testNestedJsonbAnd() throws Exception { + Filter brandFilter = new Filter(Filter.Op.EQ, "props.brand", "Dettol"); + Filter cityFilter = new Filter(Filter.Op.EQ, "props.seller.address.city", "Mumbai"); + Filter legacyAndFilter = brandFilter.and(cityFilter); + + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyAndFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents matching nested AND condition"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode props = node.get("props"); + assertEquals("Dettol", props.get("brand").asText()); + assertEquals("Mumbai", props.path("seller").path("address").get("city").asText()); + } + } + + @Test + @DisplayName("OR on nested JSONB fields: props.brand='Dettol' OR props.brand='Sunsilk'") + void testNestedJsonbOr() throws Exception { + Filter dettolFilter = new Filter(Filter.Op.EQ, "props.brand", "Dettol"); + Filter sunsilkFilter = new Filter(Filter.Op.EQ, "props.brand", "Sunsilk"); + Filter legacyOrFilter = dettolFilter.or(sunsilkFilter); + + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyOrFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse(results.isEmpty(), "Should find documents matching nested OR condition"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + String brand = node.path("props").get("brand").asText(); + assertTrue( + brand.equals("Dettol") || brand.equals("Sunsilk"), + "Brand should be 'Dettol' or 'Sunsilk', got: " + brand); + } + } + + @Test + @DisplayName("LIKE on nested STRING field: props.product-code") + void testNestedStringLike() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.LIKE, "props.product-code", "SOAP-.*"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + assertFalse( + results.isEmpty(), "Should find documents with props.product-code like 'SOAP-.*'"); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + String productCode = node.path("props").get("product-code").asText(); + assertTrue( + productCode.startsWith("SOAP-"), + "product-code should start with 'SOAP-': " + productCode); + } + } + + @Test + @DisplayName("EXISTS on nested STRING field: props.brand") + void testNestedStringExists() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.EXISTS, "props.brand", null); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode brand = node.path("props").path("brand"); + assertTrue( + !brand.isMissingNode() && !brand.isNull(), "props.brand should exist and not be null"); + } + } + + @Test + @DisplayName("NOT_EXISTS on nested STRING field: props.brand") + void testNestedStringNotExists() throws Exception { + Filter legacyFilter = new Filter(Filter.Op.NOT_EXISTS, "props.brand", null); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + Query query = Query.builder().setFilter(newFilter).build(); + List results = collectResults(flatCollection.find(query)); + + assertNotNull(results); + for (Document doc : results) { + JsonNode node = OBJECT_MAPPER.readTree(doc.toJson()); + JsonNode props = node.get("props"); + if (props != null && !props.isNull()) { + JsonNode brand = props.get("brand"); + assertTrue( + brand == null || brand.isNull() || brand.isMissingNode(), + "props.brand should not exist or be null"); + } + } + } + } + + @Nested + @DisplayName("Transformer Unit Tests") + class TransformerUnitTests { + + @Test + @DisplayName("Should return null for null input") + void testNullInput() { + org.hypertrace.core.documentstore.query.Filter result = transformer.transform(null); + assertNull(result); + } + + @Test + @DisplayName("Should correctly transform simple EQ filter") + void testSimpleEqTransformation() { + Filter legacyFilter = new Filter(Filter.Op.EQ, "name", "test"); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + assertNotNull(newFilter); + assertNotNull(newFilter.getExpression()); + assertInstanceOf(RelationalExpression.class, newFilter.getExpression()); + + RelationalExpression expr = (RelationalExpression) newFilter.getExpression(); + assertEquals(RelationalOperator.EQ, expr.getOperator()); + assertInstanceOf(IdentifierExpression.class, expr.getLhs()); + assertInstanceOf(ConstantExpression.class, expr.getRhs()); + } + + @Test + @DisplayName("Should correctly transform AND filter") + void testAndTransformation() { + Filter f1 = new Filter(Filter.Op.EQ, "a", "1"); + Filter f2 = new Filter(Filter.Op.EQ, "b", "2"); + Filter andFilter = f1.and(f2); + + org.hypertrace.core.documentstore.query.Filter newFilter = transformer.transform(andFilter); + + assertNotNull(newFilter); + assertInstanceOf(LogicalExpression.class, newFilter.getExpression()); + + LogicalExpression logicalExpr = (LogicalExpression) newFilter.getExpression(); + assertEquals(2, logicalExpr.getOperands().size()); + } + + @Test + @DisplayName("Should correctly transform OR filter") + void testOrTransformation() { + Filter f1 = new Filter(Filter.Op.EQ, "a", "1"); + Filter f2 = new Filter(Filter.Op.EQ, "b", "2"); + Filter orFilter = f1.or(f2); + + org.hypertrace.core.documentstore.query.Filter newFilter = transformer.transform(orFilter); + + assertNotNull(newFilter); + assertInstanceOf(LogicalExpression.class, newFilter.getExpression()); + } + } + + /** Helper method to collect all documents from a CloseableIterator. */ + private List collectResults(CloseableIterator iterator) { + List results = new ArrayList<>(); + try (iterator) { + while (iterator.hasNext()) { + results.add(iterator.next()); + } + } catch (IOException e) { + LOGGER.warn("Error closing iterator", e); + } + return results; + } +} diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java new file mode 100644 index 00000000..050cdf93 --- /dev/null +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java @@ -0,0 +1,318 @@ +package org.hypertrace.core.documentstore.postgres.query.v1.transformer; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.hypertrace.core.documentstore.commons.ColumnMetadata; +import org.hypertrace.core.documentstore.commons.SchemaRegistry; +import org.hypertrace.core.documentstore.expression.impl.ConstantExpression; +import org.hypertrace.core.documentstore.expression.impl.DataType; +import org.hypertrace.core.documentstore.expression.impl.IdentifierExpression; +import org.hypertrace.core.documentstore.expression.impl.JsonFieldType; +import org.hypertrace.core.documentstore.expression.impl.JsonIdentifierExpression; +import org.hypertrace.core.documentstore.expression.impl.LogicalExpression; +import org.hypertrace.core.documentstore.expression.impl.RelationalExpression; +import org.hypertrace.core.documentstore.expression.operators.RelationalOperator; +import org.hypertrace.core.documentstore.expression.type.FilterTypeExpression; +import org.hypertrace.core.documentstore.expression.type.SelectTypeExpression; +import org.hypertrace.core.documentstore.query.Filter; + +/** + * Transforms the legacy {@link org.hypertrace.core.documentstore.Filter} to the newer {@link + * org.hypertrace.core.documentstore.query.Filter} format. Since the legacy filter does not carry + * any type information, this class interfaces with {@link SchemaRegistry} to find the type info. + */ +public class LegacyFilterToQueryFilterTransformer { + + private final SchemaRegistry schemaRegistry; + private final String tableName; + + public LegacyFilterToQueryFilterTransformer( + SchemaRegistry schemaRegistry, String tableName) { + this.schemaRegistry = schemaRegistry; + this.tableName = tableName; + } + + /** + * Transforms a legacy Filter to the new query Filter. + * + * @param legacyFilter the legacy filter to transform + * @return the transformed query Filter, or null if legacyFilter is null + */ + public Filter transform(org.hypertrace.core.documentstore.Filter legacyFilter) { + if (legacyFilter == null) { + return null; + } + + FilterTypeExpression expression = transformToExpression(legacyFilter); + return Filter.builder().expression(expression).build(); + } + + /** + * Transforms a legacy Filter to a FilterTypeExpression. + * + * @param legacyFilter the legacy filter to transform + * @return the FilterTypeExpression representation + */ + private FilterTypeExpression transformToExpression( + org.hypertrace.core.documentstore.Filter legacyFilter) { + if (legacyFilter.isComposite()) { + return transformCompositeFilter(legacyFilter); + } else { + return transformLeafFilter(legacyFilter); + } + } + + /** Transforms a composite filter (AND/OR) to a LogicalExpression. */ + private FilterTypeExpression transformCompositeFilter( + org.hypertrace.core.documentstore.Filter legacyFilter) { + org.hypertrace.core.documentstore.Filter.Op op = legacyFilter.getOp(); + org.hypertrace.core.documentstore.Filter[] childFilters = legacyFilter.getChildFilters(); + + if (childFilters == null || childFilters.length == 0) { + throw new IllegalArgumentException("Composite filter must have child filters"); + } + + List operands = + Arrays.stream(childFilters).map(this::transformToExpression).collect(Collectors.toList()); + + switch (op) { + case AND: + return LogicalExpression.and(operands); + case OR: + return LogicalExpression.or(operands); + default: + throw new UnsupportedOperationException("Unsupported composite operator: " + op); + } + } + + /** Transforms a leaf filter (comparison operators) to a RelationalExpression. */ + private FilterTypeExpression transformLeafFilter( + org.hypertrace.core.documentstore.Filter legacyFilter) { + String fieldName = legacyFilter.getFieldName(); + Object value = legacyFilter.getValue(); + org.hypertrace.core.documentstore.Filter.Op op = legacyFilter.getOp(); + + SelectTypeExpression lhs = createIdentifierExpression(fieldName, op, value); + RelationalOperator relationalOp = mapOperator(op); + ConstantExpression rhs = createConstantExpression(value, op); + + return RelationalExpression.of(lhs, relationalOp, rhs); + } + + /** + * Creates the appropriate identifier expression based on the field name and schema. + * + *

Uses the schema registry to determine if a field is: + * + *

    + *
  • A direct column → IdentifierExpression + *
  • A JSONB nested path → JsonIdentifierExpression with inferred field type + *
+ */ + private SelectTypeExpression createIdentifierExpression( + String fieldName, org.hypertrace.core.documentstore.Filter.Op op, Object value) { + if (fieldName == null || fieldName.isEmpty()) { + throw new IllegalArgumentException("Field name cannot be null or empty"); + } + + // Check if the full path is a direct column + if (schemaRegistry.getColumnOrRefresh(tableName, fieldName).isPresent()) { + return IdentifierExpression.of(fieldName); + } + + // Try to find a JSONB column prefix + Optional jsonbColumn = findJsonbColumnPrefix(fieldName); + if (jsonbColumn.isPresent()) { + String columnName = jsonbColumn.get(); + String[] jsonPath = getNestedPath(fieldName, columnName); + JsonFieldType fieldType = inferJsonFieldType(value); + return JsonIdentifierExpression.of(columnName, fieldType, jsonPath); + } + + // Fallback: treat as direct column (will fail at query time if column doesn't exist) + return IdentifierExpression.of(fieldName); + } + + /** + * Finds the JSONB column that serves as the prefix for the given path. + * + *

Resolution strategy: progressively try shorter prefixes to find a JSONB column. + */ + private Optional findJsonbColumnPrefix(String path) { + if (!path.contains(".")) { + return Optional.empty(); + } + + String[] parts = path.split("\\."); + StringBuilder columnBuilder = new StringBuilder(parts[0]); + + for (int i = 0; i < parts.length - 1; i++) { + if (i > 0) { + columnBuilder.append(".").append(parts[i]); + } + String candidateColumn = columnBuilder.toString(); + Optional colMeta = + schemaRegistry.getColumnOrRefresh(tableName, candidateColumn); + + if (colMeta.isPresent() && colMeta.get().getCanonicalType() == DataType.JSON) { + return Optional.of(candidateColumn); + } + } + + return Optional.empty(); + } + + /** Extracts the nested JSONB path from a full path given the resolved column name. */ + private String[] getNestedPath(String fullPath, String columnName) { + if (fullPath.equals(columnName)) { + return new String[0]; + } + String nested = fullPath.substring(columnName.length() + 1); + return nested.split("\\."); + } + + /** Infers the JsonFieldType from the filter value. */ + private JsonFieldType inferJsonFieldType(Object value) { + if (value == null) { + return JsonFieldType.STRING; // Default to STRING + } + + // For collections, check the first element + if (value instanceof Collection) { + Collection collection = (Collection) value; + if (collection.isEmpty()) { + return JsonFieldType.STRING; + } + Object firstElement = collection.iterator().next(); + return inferTypeFromElement(firstElement); + } + + if (value instanceof Object[]) { + Object[] array = (Object[]) value; + if (array.length == 0) { + return JsonFieldType.STRING; + } + return inferTypeFromElement(array[0]); + } + + return inferTypeFromElement(value); + } + + /** Infers JsonFieldType from a single element. */ + private JsonFieldType inferTypeFromElement(Object element) { + if (element instanceof String) { + return JsonFieldType.STRING; + } + if (element instanceof Number) { + return JsonFieldType.NUMBER; + } + if (element instanceof Boolean) { + return JsonFieldType.BOOLEAN; + } + throw new IllegalArgumentException( + "Unsupported value type for JsonFieldType inference: " + element.getClass().getName()); + } + + /** Maps legacy Filter.Op to RelationalOperator. */ + private RelationalOperator mapOperator(org.hypertrace.core.documentstore.Filter.Op op) { + switch (op) { + case EQ: + return RelationalOperator.EQ; + case NEQ: + return RelationalOperator.NEQ; + case GT: + return RelationalOperator.GT; + case LT: + return RelationalOperator.LT; + case GTE: + return RelationalOperator.GTE; + case LTE: + return RelationalOperator.LTE; + case IN: + return RelationalOperator.IN; + case NOT_IN: + return RelationalOperator.NOT_IN; + case CONTAINS: + return RelationalOperator.CONTAINS; + case NOT_CONTAINS: + return RelationalOperator.NOT_CONTAINS; + case EXISTS: + return RelationalOperator.EXISTS; + case NOT_EXISTS: + return RelationalOperator.NOT_EXISTS; + case LIKE: + return RelationalOperator.LIKE; + default: + throw new UnsupportedOperationException("Unsupported operator: " + op); + } + } + + /** + * Creates a ConstantExpression from the filter value. + * + *

Handles different value types including collections for IN/NOT_IN operators. + */ + @SuppressWarnings("unchecked") + private ConstantExpression createConstantExpression( + Object value, org.hypertrace.core.documentstore.Filter.Op op) { + if (value == null) { + // For EXISTS/NOT_EXISTS, value can be null + return ConstantExpression.of((String) null); + } + + // Handle collection types for IN/NOT_IN operators + if (value instanceof Collection) { + return createConstantExpressionFromCollection((Collection) value); + } + + if (value instanceof Object[]) { + return createConstantExpressionFromCollection(Arrays.asList((Object[]) value)); + } + + if (value instanceof String) { + return ConstantExpression.of((String) value); + } + + if (value instanceof Number) { + return ConstantExpression.of((Number) value); + } + + if (value instanceof Boolean) { + return ConstantExpression.of((Boolean) value); + } + + // Fallback: convert to string + return ConstantExpression.of(value.toString()); + } + + /** Creates a ConstantExpression from a collection of values. */ + @SuppressWarnings("unchecked") + private ConstantExpression createConstantExpressionFromCollection(Collection collection) { + if (collection.isEmpty()) { + throw new IllegalArgumentException("Collection cannot be empty for IN/NOT_IN operators"); + } + + Object firstElement = collection.iterator().next(); + + if (firstElement instanceof String) { + return ConstantExpression.ofStrings( + collection.stream().map(Object::toString).collect(Collectors.toList())); + } + + if (firstElement instanceof Number) { + return ConstantExpression.ofNumbers( + collection.stream().map(v -> (Number) v).collect(Collectors.toList())); + } + + if (firstElement instanceof Boolean) { + return ConstantExpression.ofBooleans( + collection.stream().map(v -> (Boolean) v).collect(Collectors.toList())); + } + + throw new IllegalArgumentException( + "Unsupported collection element type: " + firstElement.getClass().getName()); + } +} From a56075bf58846ba97d2d54765247ca475e4775f3 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Wed, 4 Feb 2026 15:26:20 +0530 Subject: [PATCH 07/12] Use filter transformer --- .../FlatCollectionWriteTest.java | 95 ++++--------------- .../core/documentstore/Collection.java | 5 - .../postgres/FlatPostgresCollection.java | 18 ++-- 3 files changed, 29 insertions(+), 89 deletions(-) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java index 80a72d65..33678ce2 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java @@ -935,26 +935,12 @@ void testDeleteWithFilters() throws Exception { } // Test 1: EQ filter - org.hypertrace.core.documentstore.query.Filter eqFilter = - org.hypertrace.core.documentstore.query.Filter.builder() - .expression( - RelationalExpression.of( - IdentifierExpression.of("item"), - RelationalOperator.EQ, - ConstantExpression.of("ToBeDeleted"))) - .build(); + Filter eqFilter = Filter.eq("item", "ToBeDeleted"); assertTrue(flatCollection.delete(eqFilter)); queryAndAssert(new SingleValueKey(DEFAULT_TENANT, "filter-eq"), rs -> assertFalse(rs.next())); // Test 2: GT filter (price > 500) - org.hypertrace.core.documentstore.query.Filter gtFilter = - org.hypertrace.core.documentstore.query.Filter.builder() - .expression( - RelationalExpression.of( - IdentifierExpression.of("price"), - RelationalOperator.GT, - ConstantExpression.of(500))) - .build(); + Filter gtFilter = new Filter(Filter.Op.GT, "price", 500); assertTrue(flatCollection.delete(gtFilter)); queryAndAssert( new SingleValueKey(DEFAULT_TENANT, "filter-gt-expensive"), rs -> assertFalse(rs.next())); @@ -962,14 +948,7 @@ void testDeleteWithFilters() throws Exception { new SingleValueKey(DEFAULT_TENANT, "filter-gt-cheap"), rs -> assertTrue(rs.next())); // Test 3: IN filter - org.hypertrace.core.documentstore.query.Filter inFilter = - org.hypertrace.core.documentstore.query.Filter.builder() - .expression( - RelationalExpression.of( - IdentifierExpression.of("item"), - RelationalOperator.IN, - ConstantExpression.ofStrings(List.of("Apple", "Banana")))) - .build(); + Filter inFilter = new Filter(Filter.Op.IN, "item", List.of("Apple", "Banana")); assertTrue(flatCollection.delete(inFilter)); queryAndAssert( new SingleValueKey(DEFAULT_TENANT, "filter-in-apple"), rs -> assertFalse(rs.next())); @@ -978,9 +957,11 @@ void testDeleteWithFilters() throws Exception { queryAndAssert( new SingleValueKey(DEFAULT_TENANT, "filter-in-cherry"), rs -> assertTrue(rs.next())); - // Test 4: Legacy Filter API throws UnsupportedOperationException - Filter legacyFilter = Filter.eq("item", "Cherry"); - assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(legacyFilter)); + // Test 4: Delete the remaining Cherry item + Filter cherryFilter = Filter.eq("item", "Cherry"); + assertTrue(flatCollection.delete(cherryFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-in-cherry"), rs -> assertFalse(rs.next())); } @Test @@ -1021,34 +1002,14 @@ void testDeleteWithCompositeAndNestedFilters() throws Exception { new SingleValueKey(DEFAULT_TENANT, "jsonb-adidas"), new JSONDocument(node4)); // Test 1: AND filter (item = 'TargetItem' AND price = 100) - org.hypertrace.core.documentstore.query.Filter andFilter = - org.hypertrace.core.documentstore.query.Filter.builder() - .expression( - org.hypertrace.core.documentstore.expression.impl.LogicalExpression.and( - RelationalExpression.of( - IdentifierExpression.of("item"), - RelationalOperator.EQ, - ConstantExpression.of("TargetItem")), - RelationalExpression.of( - IdentifierExpression.of("price"), - RelationalOperator.EQ, - ConstantExpression.of(100)))) - .build(); + Filter andFilter = Filter.eq("item", "TargetItem").and(Filter.eq("price", 100)); assertTrue(flatCollection.delete(andFilter)); queryAndAssert(new SingleValueKey(DEFAULT_TENANT, "and-match"), rs -> assertFalse(rs.next())); queryAndAssert( new SingleValueKey(DEFAULT_TENANT, "and-nomatch"), rs -> assertTrue(rs.next())); // Test 2: Nested JSONB filter (props.brand = 'Nike') - org.hypertrace.core.documentstore.query.Filter jsonbFilter = - org.hypertrace.core.documentstore.query.Filter.builder() - .expression( - RelationalExpression.of( - org.hypertrace.core.documentstore.expression.impl.JsonIdentifierExpression.of( - "props", "brand"), - RelationalOperator.EQ, - ConstantExpression.of("Nike"))) - .build(); + Filter jsonbFilter = Filter.eq("props.brand", "Nike"); assertTrue(flatCollection.delete(jsonbFilter)); queryAndAssert( new SingleValueKey(DEFAULT_TENANT, "jsonb-nike"), rs -> assertFalse(rs.next())); @@ -1060,26 +1021,18 @@ void testDeleteWithCompositeAndNestedFilters() throws Exception { @DisplayName("Should handle edge cases: no match returns false, null filter throws exception") void testDeleteEdgeCases() { // Test 1: No match returns false - org.hypertrace.core.documentstore.query.Filter noMatchFilter = - org.hypertrace.core.documentstore.query.Filter.builder() - .expression( - RelationalExpression.of( - IdentifierExpression.of("item"), - RelationalOperator.EQ, - ConstantExpression.of("NonExistentItem12345"))) - .build(); + Filter noMatchFilter = Filter.eq("item", "NonExistentItem12345"); assertFalse(flatCollection.delete(noMatchFilter)); // Test 2: Null filter throws exception - assertThrows( - IllegalArgumentException.class, - () -> flatCollection.delete((org.hypertrace.core.documentstore.query.Filter) null)); + assertThrows(IllegalArgumentException.class, () -> flatCollection.delete((Filter) null)); } @Test - @DisplayName("delete(Filter) should return false when SQLException occurs (dropped table)") - void testDeleteByFilterReturnsFalseOnSQLException() throws Exception { - // Create a temporary table, get collection, then drop the table to trigger SQLException + @DisplayName( + "delete(Filter) should throw exception when table is dropped (schema lookup fails)") + void testDeleteByFilterThrowsExceptionOnDroppedTable() throws Exception { + // Create a temporary table, get collection, then drop the table to trigger exception String tempTable = "temp_delete_filter_test"; PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; @@ -1096,24 +1049,16 @@ void testDeleteByFilterReturnsFalseOnSQLException() throws Exception { Collection tempCollection = postgresDatastore.getCollectionForType(tempTable, DocumentType.FLAT); - // Drop the table to cause SQLException on delete + // Drop the table - this will cause schema lookup to fail when delete is called try (Connection conn = pgDatastore.getPostgresClient(); PreparedStatement ps = conn.prepareStatement(String.format("DROP TABLE \"%s\"", tempTable))) { ps.execute(); } - org.hypertrace.core.documentstore.query.Filter filter = - org.hypertrace.core.documentstore.query.Filter.builder() - .expression( - RelationalExpression.of( - IdentifierExpression.of("item"), - RelationalOperator.EQ, - ConstantExpression.of("SomeValue"))) - .build(); - - // SQLException should be caught and method should return false - assertFalse(tempCollection.delete(filter)); + // With legacy filter transformer, schema lookup happens first and throws exception + Filter filter = Filter.eq("item", "SomeValue"); + assertThrows(Exception.class, () -> tempCollection.delete(filter)); } @Test diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java index 699fdc18..4d0430fb 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java @@ -159,13 +159,8 @@ CloseableIterator query( * @param filter The filter to determine documents to be deleted. Only the filter clause. * @return True if the documents are deleted, false otherwise. */ - @Deprecated boolean delete(Filter filter); - default boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { - throw new IllegalArgumentException("Not implemented!"); - } - /** * Delete the documents for the given keys * diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java index 2550f59e..94ce5aed 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java @@ -44,6 +44,7 @@ import org.hypertrace.core.documentstore.postgres.query.v1.PostgresQueryParser; import org.hypertrace.core.documentstore.postgres.query.v1.parser.filter.nonjson.field.PostgresDataType; import org.hypertrace.core.documentstore.postgres.query.v1.transformer.FlatPostgresFieldTransformer; +import org.hypertrace.core.documentstore.postgres.query.v1.transformer.LegacyFilterToQueryFilterTransformer; import org.hypertrace.core.documentstore.postgres.update.FlatUpdateContext; import org.hypertrace.core.documentstore.postgres.update.parser.FlatCollectionSubDocSetOperatorParser; import org.hypertrace.core.documentstore.postgres.update.parser.FlatCollectionSubDocUpdateOperatorParser; @@ -182,11 +183,17 @@ public boolean delete(Key key) { } @Override - public boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { + public boolean delete(Filter filter) { Preconditions.checkArgument(filter != null, "Filter cannot be null"); - Query query = Query.builder().setFilter(filter).build(); + LegacyFilterToQueryFilterTransformer filterTransformer = + new LegacyFilterToQueryFilterTransformer(schemaRegistry, tableIdentifier.getTableName()); + + org.hypertrace.core.documentstore.query.Filter transformedFilter = + filterTransformer.transform(filter); + + Query query = Query.builder().setFilter(transformedFilter).build(); // Create parser with flat field transformer PostgresQueryParser queryParser = @@ -214,13 +221,6 @@ public boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { return false; } - @Override - @Deprecated - public boolean delete(Filter filter) { - throw new UnsupportedOperationException( - "DELETE not supported for legacy Filter, use delete(org.hypertrace.core.documentstore.query.Filter filter) rather"); - } - @Override public BulkDeleteResult delete(Set keys) { if (keys == null || keys.isEmpty()) { From f65050169427d4f8a4497e67a73d9dfa85c49d29 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Wed, 4 Feb 2026 15:31:10 +0530 Subject: [PATCH 08/12] Added more test cases --- .../FlatCollectionFilterTransformerTest.java | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java index eee87ebb..0865e8b6 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java @@ -7,6 +7,7 @@ import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; @@ -868,6 +869,71 @@ void testOrTransformation() { assertNotNull(newFilter); assertInstanceOf(LogicalExpression.class, newFilter.getExpression()); } + + @Test + @DisplayName("Should infer NUMBER type for nested field with numeric value") + void testNestedFieldWithNumberValue() { + Filter legacyFilter = new Filter(Filter.Op.EQ, "props.count", 42); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + assertNotNull(newFilter); + RelationalExpression expr = (RelationalExpression) newFilter.getExpression(); + ConstantExpression rhs = (ConstantExpression) expr.getRhs(); + assertEquals(42, rhs.getValue()); + } + + @Test + @DisplayName("Should infer BOOLEAN type for nested field with boolean value") + void testNestedFieldWithBooleanValue() { + Filter legacyFilter = new Filter(Filter.Op.EQ, "props.active", true); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + assertNotNull(newFilter); + RelationalExpression expr = (RelationalExpression) newFilter.getExpression(); + ConstantExpression rhs = (ConstantExpression) expr.getRhs(); + assertEquals(true, rhs.getValue()); + } + + @Test + @DisplayName("Should handle IN filter with Object[] array of strings") + void testInFilterWithObjectArray() { + Object[] values = new Object[] {"value1", "value2"}; + Filter legacyFilter = new Filter(Filter.Op.IN, "props.tags", values); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + assertNotNull(newFilter); + RelationalExpression expr = (RelationalExpression) newFilter.getExpression(); + assertEquals(RelationalOperator.IN, expr.getOperator()); + } + + @Test + @DisplayName("Should handle IN filter with boolean collection") + void testInFilterWithBooleanCollection() { + List values = List.of(true, false); + Filter legacyFilter = new Filter(Filter.Op.IN, "props.flags", values); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + assertNotNull(newFilter); + RelationalExpression expr = (RelationalExpression) newFilter.getExpression(); + assertEquals(RelationalOperator.IN, expr.getOperator()); + ConstantExpression rhs = (ConstantExpression) expr.getRhs(); + assertNotNull(rhs.getValue()); + } + + @Test + @DisplayName("Should throw exception for unsupported collection element type") + void testUnsupportedCollectionElementType() { + List values = List.of(new java.util.Date()); + Filter legacyFilter = new Filter(Filter.Op.IN, "field", values); + + Exception exception = + assertThrows(IllegalArgumentException.class, () -> transformer.transform(legacyFilter)); + assertTrue(exception.getMessage().contains("Unsupported collection element type")); + } } /** Helper method to collect all documents from a CloseableIterator. */ From 58c965aaa0c820183f18fad42b3ec6ce8d21dad3 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Wed, 4 Feb 2026 15:58:29 +0530 Subject: [PATCH 09/12] Added more test cases --- .../FlatCollectionFilterTransformerTest.java | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java index 0865e8b6..4687c5f3 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java @@ -934,6 +934,79 @@ void testUnsupportedCollectionElementType() { assertThrows(IllegalArgumentException.class, () -> transformer.transform(legacyFilter)); assertTrue(exception.getMessage().contains("Unsupported collection element type")); } + + @Test + @DisplayName("Should throw exception for null field name") + void testNullFieldName() { + Filter legacyFilter = new Filter(Filter.Op.EQ, null, "value"); + + Exception exception = + assertThrows(IllegalArgumentException.class, () -> transformer.transform(legacyFilter)); + assertTrue(exception.getMessage().contains("Field name cannot be null or empty")); + } + + @Test + @DisplayName("Should throw exception for empty field name") + void testEmptyFieldName() { + Filter legacyFilter = new Filter(Filter.Op.EQ, "", "value"); + + Exception exception = + assertThrows(IllegalArgumentException.class, () -> transformer.transform(legacyFilter)); + assertTrue(exception.getMessage().contains("Field name cannot be null or empty")); + } + + @Test + @DisplayName("Should throw exception for empty collection in IN operator") + void testEmptyCollectionInOperator() { + List emptyList = List.of(); + Filter legacyFilter = new Filter(Filter.Op.IN, "field", emptyList); + + Exception exception = + assertThrows(IllegalArgumentException.class, () -> transformer.transform(legacyFilter)); + assertTrue(exception.getMessage().contains("Collection cannot be empty")); + } + + @Test + @DisplayName("Should use toString fallback for unknown value type") + void testToStringFallbackForUnknownType() { + // Use a custom object that will fall through to toString + Object customValue = + new Object() { + @Override + public String toString() { + return "custom-value"; + } + }; + Filter legacyFilter = new Filter(Filter.Op.EQ, "field", customValue); + org.hypertrace.core.documentstore.query.Filter newFilter = + transformer.transform(legacyFilter); + + assertNotNull(newFilter); + RelationalExpression expr = (RelationalExpression) newFilter.getExpression(); + ConstantExpression rhs = (ConstantExpression) expr.getRhs(); + assertEquals("custom-value", rhs.getValue()); + } + + @Test + @DisplayName("Should throw exception for unsupported value type in nested field") + void testUnsupportedValueTypeInNestedField() { + Filter legacyFilter = new Filter(Filter.Op.EQ, "props.custom", new java.util.Date()); + + Exception exception = + assertThrows(IllegalArgumentException.class, () -> transformer.transform(legacyFilter)); + assertTrue(exception.getMessage().contains("Unsupported value type for JsonFieldType")); + } + + @Test + @DisplayName("Should handle IN with empty Object[] array") + void testInFilterWithEmptyObjectArray() { + Object[] values = new Object[] {}; + Filter legacyFilter = new Filter(Filter.Op.IN, "field", values); + + Exception exception = + assertThrows(IllegalArgumentException.class, () -> transformer.transform(legacyFilter)); + assertTrue(exception.getMessage().contains("Collection cannot be empty")); + } } /** Helper method to collect all documents from a CloseableIterator. */ From fea22dcd5170a2e6a1641bf248c4615c14afffd7 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Thu, 5 Feb 2026 11:35:22 +0530 Subject: [PATCH 10/12] Rename test class --- ...erTest.java => LegacyToQueryFilterTransformationTest.java} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename document-store/src/integrationTest/java/org/hypertrace/core/documentstore/{FlatCollectionFilterTransformerTest.java => LegacyToQueryFilterTransformationTest.java} (99%) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/LegacyToQueryFilterTransformationTest.java similarity index 99% rename from document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java rename to document-store/src/integrationTest/java/org/hypertrace/core/documentstore/LegacyToQueryFilterTransformationTest.java index 4687c5f3..f4a5c087 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionFilterTransformerTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/LegacyToQueryFilterTransformationTest.java @@ -53,10 +53,10 @@ *

Both approaches should yield identical results for the same filter conditions. */ @Testcontainers -public class FlatCollectionFilterTransformerTest { +public class LegacyToQueryFilterTransformationTest { private static final Logger LOGGER = - LoggerFactory.getLogger(FlatCollectionFilterTransformerTest.class); + LoggerFactory.getLogger(LegacyToQueryFilterTransformationTest.class); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final String FLAT_COLLECTION_NAME = "filterTestFlat"; private static final String INSERT_STATEMENTS_FILE = "query/pg_flat_collection_insert.json"; From 22d7b2d20b53c899a7d8f320c2ea0946e5b97244 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Thu, 5 Feb 2026 11:43:57 +0530 Subject: [PATCH 11/12] Add datatype to superclass for JsonIdentifierExpression --- .../documentstore/expression/impl/JsonIdentifierExpression.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/expression/impl/JsonIdentifierExpression.java b/document-store/src/main/java/org/hypertrace/core/documentstore/expression/impl/JsonIdentifierExpression.java index 7bfb232f..ed766040 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/expression/impl/JsonIdentifierExpression.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/expression/impl/JsonIdentifierExpression.java @@ -67,7 +67,7 @@ public static JsonIdentifierExpression of( protected JsonIdentifierExpression( String name, String columnName, List jsonPath, JsonFieldType fieldType) { - super(name); + super(name, DataType.JSON); this.columnName = columnName; this.jsonPath = jsonPath; this.fieldType = fieldType; From ba8a0b15d2e20e1d4c7421410ca082ae213fa986 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Fri, 6 Feb 2026 11:52:01 +0530 Subject: [PATCH 12/12] Added docs --- .../LegacyFilterToQueryFilterTransformer.java | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java index 050cdf93..267f900b 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/query/v1/transformer/LegacyFilterToQueryFilterTransformer.java @@ -137,9 +137,15 @@ private SelectTypeExpression createIdentifierExpression( } /** - * Finds the JSONB column that serves as the prefix for the given path. + * So say the identifier in the filter is props.inheritiedAttributes.color = 'red'. Now, three + * possibilities arise: * - *

Resolution strategy: progressively try shorter prefixes to find a JSONB column. + *

props is the json col and inheritiedAttributes.color is the path. props.inheritiedAttributes + * is the jsonb col and color is the path. props.inheritiedAttributes.color is the jsonb col. This + * method progressively looks for prefixes and checks its type to determine what case it is. + * + * @param path + * @return */ private Optional findJsonbColumnPrefix(String path) { if (!path.contains(".")) { @@ -165,12 +171,16 @@ private Optional findJsonbColumnPrefix(String path) { return Optional.empty(); } - /** Extracts the nested JSONB path from a full path given the resolved column name. */ - private String[] getNestedPath(String fullPath, String columnName) { - if (fullPath.equals(columnName)) { + /** + * Extracts the JSONB path portion after removing the column name prefix. For example, if the path + * is "props.inheritiedAttributes.color", and the column name is "props", then the path is + * "inheritiedAttributes.color". + */ + private String[] getNestedPath(String fullPath, String jsonbColName) { + if (fullPath.equals(jsonbColName)) { return new String[0]; } - String nested = fullPath.substring(columnName.length() + 1); + String nested = fullPath.substring(jsonbColName.length() + 1); return nested.split("\\."); }