Skip to content

Commit

Permalink
[FLINK-33023][table-planner][JUnit5 Migration] Module: flink-table-pl…
Browse files Browse the repository at this point in the history
…anner/planner/plan (TableTestBase)
  • Loading branch information
Jiabao-Sun committed Sep 27, 2023
1 parent 4d8fa05 commit 332905c
Show file tree
Hide file tree
Showing 285 changed files with 4,892 additions and 4,237 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.apache.flink.table.planner.utils.TableTestUtil;

/** Plan test for calc merge. */
public class CalcMergeTest extends CalcMergeTestBase {
class CalcMergeTest extends CalcMergeTestBase {
@Override
protected boolean isBatchMode() {
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,23 +26,24 @@
import org.apache.flink.table.planner.utils.BatchTableTestUtil;
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil;
import org.apache.flink.table.planner.utils.TableTestBase;
import org.apache.flink.testutils.junit.extensions.parameterized.ParameterizedTestExtension;
import org.apache.flink.testutils.junit.extensions.parameterized.Parameters;

import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;

import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;

/** Plan test for dynamic filtering. */
@RunWith(Parameterized.class)
public class DynamicFilteringTest extends TableTestBase {
@ExtendWith(ParameterizedTestExtension.class)
class DynamicFilteringTest extends TableTestBase {

// Notes that the here name is used to load the correct plan.
@Parameterized.Parameters(name = "mode = {0}")
public static Collection<Object[]> data() {
@Parameters(name = "mode = {0}")
private static Collection<Object[]> data() {
return Arrays.asList(
new Object[][] {
{BatchShuffleMode.ALL_EXCHANGES_BLOCKING},
Expand All @@ -52,14 +53,14 @@ public static Collection<Object[]> data() {

private final BatchShuffleMode batchShuffleMode;

public DynamicFilteringTest(BatchShuffleMode batchShuffleMode) {
DynamicFilteringTest(BatchShuffleMode batchShuffleMode) {
this.batchShuffleMode = batchShuffleMode;
}

private BatchTableTestUtil util;

@Before
public void before() {
@BeforeEach
void before() {
util = batchTestUtil(TableConfig.getDefault());
util.tableEnv()
.getConfig()
Expand Down Expand Up @@ -117,8 +118,8 @@ public void before() {
+ ")");
}

@Test
public void testLegacySource() {
@TestTemplate
void testLegacySource() {
util.tableEnv()
.executeSql(
"CREATE TABLE legacy_source (\n"
Expand All @@ -142,35 +143,35 @@ public void testLegacySource() {
Collections.singletonList(ExplainDetail.JSON_EXECUTION_PLAN)));
}

@Test
public void testSimpleDynamicFiltering() {
@TestTemplate
void testSimpleDynamicFiltering() {
// the execution plan contains 'Placeholder-Filter' operator
util.verifyExplain(
"SELECT * FROM fact1, dim WHERE p1 = p AND x > 10",
JavaScalaConversionUtil.toScala(
Collections.singletonList(ExplainDetail.JSON_EXECUTION_PLAN)));
}

@Test
public void testDynamicFilteringWithMultipleInput() {
@TestTemplate
void testDynamicFilteringWithMultipleInput() {
// the execution plan does not contain 'Placeholder-Filter' operator
util.verifyExplain(
"SELECT * FROM fact1, dim, fact2 WHERE p1 = p and p1 = p2 AND x > 10",
JavaScalaConversionUtil.toScala(
Collections.singletonList(ExplainDetail.JSON_EXECUTION_PLAN)));
}

@Test
public void testDuplicateFactTables() {
@TestTemplate
void testDuplicateFactTables() {
// the fact tables can not be reused
util.verifyExplain(
"SELECT * FROM (SELECT * FROM fact1, dim WHERE p1 = p AND x > 10) t1 JOIN fact1 t2 ON t1.y = t2.b1",
JavaScalaConversionUtil.toScala(
Collections.singletonList(ExplainDetail.JSON_EXECUTION_PLAN)));
}

@Test
public void testReuseDimSide() {
@TestTemplate
void testReuseDimSide() {
// dynamic filtering collector will be reused for both fact tables
util.verifyExplain(
"SELECT * FROM fact1, dim WHERE p1 = p AND x > 10 "
Expand All @@ -180,8 +181,8 @@ public void testReuseDimSide() {
Collections.singletonList(ExplainDetail.JSON_EXECUTION_PLAN)));
}

@Test
public void testDynamicFilteringWithStaticPartitionPruning() {
@TestTemplate
void testDynamicFilteringWithStaticPartitionPruning() {
util.verifyExplain(
"SELECT * FROM fact1, dim WHERE p1 = p AND x > 10 and p1 > 1",
JavaScalaConversionUtil.toScala(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,16 @@
import org.apache.flink.table.planner.utils.BatchTableTestUtil;
import org.apache.flink.table.planner.utils.TableTestBase;

import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

/** Tests for ForwardHashExchangeProcessor. */
public class ForwardHashExchangeTest extends TableTestBase {
class ForwardHashExchangeTest extends TableTestBase {

private BatchTableTestUtil util;

@Before
public void before() {
@BeforeEach
void before() {
util = batchTestUtil(TableConfig.getDefault());

util.tableEnv()
Expand Down Expand Up @@ -77,21 +77,21 @@ public void before() {
}

@Test
public void testRankWithHashShuffle() {
void testRankWithHashShuffle() {
util.verifyExecPlan(
"SELECT * FROM (SELECT a, b, RANK() OVER(PARTITION BY a ORDER BY b) rk FROM T) WHERE rk <= 10");
}

@Test
public void testSortAggregateWithHashShuffle() {
void testSortAggregateWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
util.verifyExecPlan(" SELECT a, SUM(b) AS b FROM T GROUP BY a");
}

@Test
public void testOverAggOnHashAggWithHashShuffle() {
void testOverAggOnHashAggWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "SortAgg");
Expand All @@ -106,15 +106,15 @@ public void testOverAggOnHashAggWithHashShuffle() {
}

@Test
public void testOverAggOnHashAggWithGlobalShuffle() {
void testOverAggOnHashAggWithGlobalShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "SortAgg");
util.verifyExecPlan("SELECT b, RANK() OVER (ORDER BY b) FROM (SELECT SUM(b) AS b FROM T)");
}

@Test
public void testOverAggOnSortAggWithHashShuffle() {
void testOverAggOnSortAggWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
Expand All @@ -129,15 +129,15 @@ public void testOverAggOnSortAggWithHashShuffle() {
}

@Test
public void testOverAggOnSortAggWithGlobalShuffle() {
void testOverAggOnSortAggWithGlobalShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
util.verifyExecPlan("SELECT b, RANK() OVER (ORDER BY b) FROM (SELECT SUM(b) AS b FROM T)");
}

@Test
public void testHashAggOnHashJoinWithHashShuffle() {
void testHashAggOnHashJoinWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(
Expand All @@ -149,7 +149,7 @@ public void testHashAggOnHashJoinWithHashShuffle() {
}

@Test
public void testOnePhaseSortAggOnSortMergeJoinWithHashShuffle() {
void testOnePhaseSortAggOnSortMergeJoinWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(
Expand All @@ -164,7 +164,7 @@ public void testOnePhaseSortAggOnSortMergeJoinWithHashShuffle() {
}

@Test
public void testTwoPhaseSortAggOnSortMergeJoinWithHashShuffle() {
void testTwoPhaseSortAggOnSortMergeJoinWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(
Expand All @@ -179,7 +179,7 @@ public void testTwoPhaseSortAggOnSortMergeJoinWithHashShuffle() {
}

@Test
public void testAutoPhaseSortAggOnSortMergeJoinWithHashShuffle() {
void testAutoPhaseSortAggOnSortMergeJoinWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(
Expand All @@ -194,7 +194,7 @@ public void testAutoPhaseSortAggOnSortMergeJoinWithHashShuffle() {
}

@Test
public void testHashAggOnNestedLoopJoinWithGlobalShuffle() {
void testHashAggOnNestedLoopJoinWithGlobalShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "SortAgg");
Expand All @@ -208,7 +208,7 @@ public void testHashAggOnNestedLoopJoinWithGlobalShuffle() {
}

@Test
public void testSortAggOnNestedLoopJoinWithGlobalShuffle() {
void testSortAggOnNestedLoopJoinWithGlobalShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
Expand All @@ -222,7 +222,7 @@ public void testSortAggOnNestedLoopJoinWithGlobalShuffle() {
}

@Test
public void testRankOnHashAggWithHashShuffle() {
void testRankOnHashAggWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "SortAgg");
Expand All @@ -235,7 +235,7 @@ public void testRankOnHashAggWithHashShuffle() {
}

@Test
public void testRankOnHashAggWithGlobalShuffle() {
void testRankOnHashAggWithGlobalShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "SortAgg");
Expand All @@ -248,7 +248,7 @@ public void testRankOnHashAggWithGlobalShuffle() {
}

@Test
public void testRankOnOnePhaseSortAggWithHashShuffle() {
void testRankOnOnePhaseSortAggWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
Expand All @@ -264,7 +264,7 @@ public void testRankOnOnePhaseSortAggWithHashShuffle() {
}

@Test
public void testRankOnTwoPhaseSortAggWithHashShuffle() {
void testRankOnTwoPhaseSortAggWithHashShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
Expand All @@ -280,7 +280,7 @@ public void testRankOnTwoPhaseSortAggWithHashShuffle() {
}

@Test
public void testRankOnOnePhaseSortAggWithGlobalShuffle() {
void testRankOnOnePhaseSortAggWithGlobalShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
Expand All @@ -296,7 +296,7 @@ public void testRankOnOnePhaseSortAggWithGlobalShuffle() {
}

@Test
public void testRankOnTwoPhaseSortAggWithGlobalShuffle() {
void testRankOnTwoPhaseSortAggWithGlobalShuffle() {
util.tableEnv()
.getConfig()
.set(ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg");
Expand All @@ -312,7 +312,7 @@ public void testRankOnTwoPhaseSortAggWithGlobalShuffle() {
}

@Test
public void testHashJoinWithMultipleInputDisabled() {
void testHashJoinWithMultipleInputDisabled() {
util.tableEnv()
.getConfig()
.set(
Expand All @@ -328,7 +328,7 @@ public void testHashJoinWithMultipleInputDisabled() {
}

@Test
public void testSortJoinWithMultipleInputDisabled() {
void testSortJoinWithMultipleInputDisabled() {
util.tableEnv()
.getConfig()
.set(
Expand All @@ -344,7 +344,7 @@ public void testSortJoinWithMultipleInputDisabled() {
}

@Test
public void testMultipleInputs() {
void testMultipleInputs() {
util.getTableEnv()
.getConfig()
.set(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED, false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,17 @@
import org.apache.flink.table.planner.utils.TableTestBase;

import org.apache.calcite.sql.SqlMatchRecognize;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

/** Tests for {@link SqlMatchRecognize}. */
public class MatchRecognizeTest extends TableTestBase {
class MatchRecognizeTest extends TableTestBase {

private BatchTableTestUtil util;

@Before
public void before() {
@BeforeEach
void before() {
util = batchTestUtil(TableConfig.getDefault());
util.getTableEnv()
.executeSql(
Expand All @@ -47,13 +47,13 @@ public void before() {
+ ")");
}

@After
public void after() {
@AfterEach
void after() {
util.getTableEnv().executeSql("DROP TABLE Ticker");
}

@Test
public void testCascadeMatch() {
void testCascadeMatch() {
String sqlQuery =
"SELECT *\n"
+ "FROM (\n"
Expand Down
Loading

0 comments on commit 332905c

Please sign in to comment.