Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,13 @@ public void invalidateTable(Identifier identifier) {
if (partitionCache != null) {
partitionCache.invalidate(identifier);
}
// clear all branches of this table
for (Identifier i : tableCache.asMap().keySet()) {
if (identifier.getTableName().equals(i.getTableName())
&& identifier.getDatabaseName().equals(i.getDatabaseName())) {
tableCache.invalidate(i);
}
}
}

// ================================== Cache Public API
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -754,4 +754,39 @@ public void testDropSnapshotPartition(@TempDir java.nio.file.Path tempDir) throw

spark.close();
}

@Test
public void testChainTableCacheInvalidation(@TempDir java.nio.file.Path tempDir)
throws IOException {
Path warehousePath = new Path("file:" + tempDir.toString());
SparkSession.Builder builder = createSparkSessionBuilder(warehousePath);
SparkSession spark = builder.getOrCreate();
spark.sql("CREATE DATABASE IF NOT EXISTS my_db1");
spark.sql("USE spark_catalog.my_db1");
spark.sql(
"CREATE TABLE chain_test_t ("
+ " `t1` string ,"
+ " `t2` string ,"
+ " `t3` string"
+ ") PARTITIONED BY (`date` string)"
+ "TBLPROPERTIES ("
+ " 'chain-table.enabled' = 'true'"
+ " ,'primary-key' = 'date,t1'"
+ " ,'sequence.field' = 't2'"
+ " ,'bucket-key' = 't1'"
+ " ,'bucket' = '1'"
+ " ,'partition.timestamp-pattern' = '$date'"
+ " ,'partition.timestamp-formatter' = 'yyyyMMdd'"
+ ")");
setupChainTableBranches(spark, "chain_test_t");
spark.sql(
"insert overwrite `chain_test_t$branch_delta` partition (date = '20260224') values ('1', '1', '1');");
assertThat(
spark.sql("SELECT * FROM `chain_test_t`").collectAsList().stream()
.map(Row::toString)
.collect(Collectors.toList()))
.containsExactlyInAnyOrder("[1,1,1,20260224]");
spark.sql("DROP TABLE IF EXISTS `my_db1`.`chain_test_t`;");
spark.close();
}
}