Skip to content

Commit

Permalink
[FLINK-26232][avro] Migrate tests to JUnit5
Browse files Browse the repository at this point in the history
  • Loading branch information
RyanSkraba committed Mar 9, 2022
1 parent d0d97cb commit a3dff68
Show file tree
Hide file tree
Showing 28 changed files with 552 additions and 577 deletions.
5 changes: 5 additions & 0 deletions flink-formats/flink-avro/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,11 @@ under the License.
</goals>
</execution>
</executions>
<configuration>
<excludes>
<exclude>META-INF/services/org.junit.jupiter.api.extension.Extension</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
import static org.apache.flink.formats.avro.AvroBulkFormatTestUtils.ROW_TYPE;

/** IT cases for {@link AbstractAvroBulkFormat}. */
public class AvroBulkFormatITCase extends SourceTestSuiteBase<RowData> {
class AvroBulkFormatITCase extends SourceTestSuiteBase<RowData> {

private static final RowDataSerializer SERIALIZER = new RowDataSerializer(ROW_TYPE);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
import static org.assertj.core.api.Assertions.assertThat;

/** Tests for {@link AbstractAvroBulkFormat}. */
public class AvroBulkFormatTest {
class AvroBulkFormatTest {

private static final List<RowData> TEST_DATA =
Arrays.asList(
Expand Down Expand Up @@ -116,7 +116,7 @@ public void after() throws IOException {
}

@Test
public void testReadWholeFileWithOneSplit() throws IOException {
void testReadWholeFileWithOneSplit() throws IOException {
AvroBulkFormatTestUtils.TestingAvroBulkFormat bulkFormat =
new AvroBulkFormatTestUtils.TestingAvroBulkFormat();
assertSplit(
Expand All @@ -132,7 +132,7 @@ public void testReadWholeFileWithOneSplit() throws IOException {
}

@Test
public void testReadWholeFileWithMultipleSplits() throws IOException {
void testReadWholeFileWithMultipleSplits() throws IOException {
AvroBulkFormatTestUtils.TestingAvroBulkFormat bulkFormat =
new AvroBulkFormatTestUtils.TestingAvroBulkFormat();
long splitLength = tmpFile.length() / 3;
Expand All @@ -149,7 +149,7 @@ public void testReadWholeFileWithMultipleSplits() throws IOException {
}

@Test
public void testSplitsAtCriticalLocations() throws IOException {
void testSplitsAtCriticalLocations() throws IOException {
AvroBulkFormatTestUtils.TestingAvroBulkFormat bulkFormat =
new AvroBulkFormatTestUtils.TestingAvroBulkFormat();
assertSplit(
Expand All @@ -168,7 +168,7 @@ public void testSplitsAtCriticalLocations() throws IOException {
}

@Test
public void testRestoreReader() throws IOException {
void testRestoreReader() throws IOException {
AvroBulkFormatTestUtils.TestingAvroBulkFormat bulkFormat =
new AvroBulkFormatTestUtils.TestingAvroBulkFormat();
long splitLength = tmpFile.length() / 3;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,60 +24,59 @@
import org.apache.flink.formats.avro.utils.TestDataGenerator;

import org.apache.avro.generic.GenericRecord;
import org.junit.Test;
import org.junit.jupiter.api.Test;

import java.time.Instant;
import java.util.Random;

import static org.apache.flink.formats.avro.utils.AvroTestUtils.writeRecord;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.assertj.core.api.Assertions.assertThat;

/** Tests for {@link AvroDeserializationSchema}. */
public class AvroDeserializationSchemaTest {
class AvroDeserializationSchemaTest {

private static final Address address = TestDataGenerator.generateRandomAddress(new Random());

@Test
public void testNullRecord() throws Exception {
void testNullRecord() throws Exception {
DeserializationSchema<Address> deserializer =
AvroDeserializationSchema.forSpecific(Address.class);

Address deserializedAddress = deserializer.deserialize(null);
assertNull(deserializedAddress);
assertThat(deserializedAddress).isNull();
}

@Test
public void testGenericRecord() throws Exception {
void testGenericRecord() throws Exception {
DeserializationSchema<GenericRecord> deserializationSchema =
AvroDeserializationSchema.forGeneric(address.getSchema());

byte[] encodedAddress = writeRecord(address, Address.getClassSchema());
GenericRecord genericRecord = deserializationSchema.deserialize(encodedAddress);
assertEquals(address.getCity(), genericRecord.get("city").toString());
assertEquals(address.getNum(), genericRecord.get("num"));
assertEquals(address.getState(), genericRecord.get("state").toString());
assertThat(genericRecord.get("city").toString()).isEqualTo(address.getCity());
assertThat(genericRecord.get("num")).isEqualTo(address.getNum());
assertThat(genericRecord.get("state").toString()).isEqualTo(address.getState());
}

@Test
public void testSpecificRecord() throws Exception {
void testSpecificRecord() throws Exception {
DeserializationSchema<Address> deserializer =
AvroDeserializationSchema.forSpecific(Address.class);

byte[] encodedAddress = writeRecord(address);
Address deserializedAddress = deserializer.deserialize(encodedAddress);
assertEquals(address, deserializedAddress);
assertThat(deserializedAddress).isEqualTo(address);
}

@Test
public void testSpecificRecordWithUnionLogicalType() throws Exception {
void testSpecificRecordWithUnionLogicalType() throws Exception {
Random rnd = new Random();
UnionLogicalType data = new UnionLogicalType(Instant.ofEpochMilli(rnd.nextLong()));
DeserializationSchema<UnionLogicalType> deserializer =
AvroDeserializationSchema.forSpecific(UnionLogicalType.class);

byte[] encodedData = writeRecord(data);
UnionLogicalType deserializedData = deserializer.deserialize(encodedData);
assertEquals(data, deserializedData);
assertThat(deserializedData).isEqualTo(data);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,17 @@
import org.apache.flink.runtime.minicluster.MiniClusterConfiguration;
import org.apache.flink.test.util.TestEnvironment;
import org.apache.flink.util.JarUtils;
import org.apache.flink.util.TestLogger;

import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;

import java.io.File;
import java.io.IOException;
import java.util.Collections;

/** IT case for the {@link AvroExternalJarProgram}. */
public class AvroExternalJarProgramITCase extends TestLogger {
class AvroExternalJarProgramITCase {

private static final String JAR_FILE = "maven-test-jar.jar";

Expand All @@ -51,19 +50,19 @@ public class AvroExternalJarProgramITCase extends TestLogger {
.setNumSlotsPerTaskManager(PARALLELISM)
.build());

@BeforeClass
@BeforeAll
public static void setUp() throws Exception {
MINI_CLUSTER.start();
}

@AfterClass
@AfterAll
public static void tearDown() {
TestEnvironment.unsetAsContext();
MINI_CLUSTER.closeAsync();
}

@Test
public void testExternalProgram() throws Exception {
void testExternalProgram() throws Exception {

String jarFile = JAR_FILE;
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,16 @@
import org.apache.flink.table.runtime.connector.source.ScanRuntimeProviderContext;
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.util.TestLogger;

import org.junit.Test;
import org.junit.jupiter.api.Test;

import java.util.HashMap;
import java.util.Map;

import static org.junit.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;

/** Tests for the {@link AvroFormatFactory}. */
public class AvroFormatFactoryTest extends TestLogger {
class AvroFormatFactoryTest {

private static final ResolvedSchema SCHEMA =
ResolvedSchema.of(
Expand All @@ -53,7 +52,7 @@ public class AvroFormatFactoryTest extends TestLogger {
(RowType) SCHEMA.toPhysicalRowDataType().getLogicalType();

@Test
public void testSeDeSchema() {
void testSeDeSchema() {
final AvroRowDataDeserializationSchema expectedDeser =
new AvroRowDataDeserializationSchema(ROW_TYPE, InternalTypeInfo.of(ROW_TYPE));

Expand All @@ -68,7 +67,7 @@ public void testSeDeSchema() {
scanSourceMock.valueFormat.createRuntimeDecoder(
ScanRuntimeProviderContext.INSTANCE, SCHEMA.toPhysicalRowDataType());

assertEquals(expectedDeser, actualDeser);
assertThat(actualDeser).isEqualTo(expectedDeser);

final AvroRowDataSerializationSchema expectedSer =
new AvroRowDataSerializationSchema(ROW_TYPE);
Expand All @@ -81,7 +80,7 @@ public void testSeDeSchema() {
SerializationSchema<RowData> actualSer =
sinkMock.valueFormat.createRuntimeEncoder(null, SCHEMA.toPhysicalRowDataType());

assertEquals(expectedSer, actualSer);
assertThat(actualSer).isEqualTo(expectedSer);
}

// ------------------------------------------------------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,16 @@
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.core.fs.Path;

import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Test;

import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;

/** Tests for the type extraction of the {@link AvroInputFormat}. */
public class AvroInputFormatTypeExtractionTest {
class AvroInputFormatTypeExtractionTest {

@Test
public void testTypeExtraction() {
void testTypeExtraction() {
try {
InputFormat<MyAvroType, ?> format =
new AvroInputFormat<MyAvroType>(
Expand All @@ -45,14 +47,14 @@ public void testTypeExtraction() {
DataSet<MyAvroType> input = env.createInput(format);
TypeInformation<?> typeInfoDataSet = input.getType();

Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo);
Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo);
assertThat(typeInfoDirect).isInstanceOf(PojoTypeInfo.class);
assertThat(typeInfoDataSet).isInstanceOf(PojoTypeInfo.class);

Assert.assertEquals(MyAvroType.class, typeInfoDirect.getTypeClass());
Assert.assertEquals(MyAvroType.class, typeInfoDataSet.getTypeClass());
assertThat(typeInfoDirect.getTypeClass()).isEqualTo(MyAvroType.class);
assertThat(typeInfoDataSet.getTypeClass()).isEqualTo(MyAvroType.class);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
fail(e.getMessage());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,14 @@
import org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders;

import com.esotericsoftware.kryo.Kryo;
import org.junit.Test;
import org.junit.jupiter.api.Test;

import java.lang.reflect.Method;
import java.net.URL;
import java.util.LinkedHashMap;

import static org.apache.flink.util.FlinkUserCodeClassLoader.NOOP_EXCEPTION_HANDLER;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.assertj.core.api.Assertions.assertThat;

/**
* This test makes sure that reversed classloading works for the Avro/Kryo integration when Kryo is
Expand All @@ -59,10 +58,10 @@
* 0x0000020: 57b1
* </pre>
*/
public class AvroKryoClassloadingTest {
class AvroKryoClassloadingTest {

@Test
public void testKryoInChildClasspath() throws Exception {
void testKryoInChildClasspath() throws Exception {
final Class<?> avroClass = AvroKryoSerializerUtils.class;

final URL avroLocation = avroClass.getProtectionDomain().getCodeSource().getLocation();
Expand All @@ -84,7 +83,7 @@ public void testKryoInChildClasspath() throws Exception {

final Class<?> userLoadedAvroClass =
Class.forName(avroClass.getName(), false, userAppClassLoader);
assertNotEquals(avroClass, userLoadedAvroClass);
assertThat(userLoadedAvroClass).isNotEqualTo(avroClass);

// call the 'addAvroGenericDataArrayRegistration(...)' method
final Method m =
Expand All @@ -94,6 +93,6 @@ public void testKryoInChildClasspath() throws Exception {
final LinkedHashMap<String, ?> map = new LinkedHashMap<>();
m.invoke(userLoadedAvroClass.newInstance(), map);

assertEquals(1, map.size());
assertThat(map).hasSize(1);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Registration;
import org.junit.Test;
import org.junit.jupiter.api.Test;

import java.io.BufferedReader;
import java.io.BufferedWriter;
Expand All @@ -32,8 +32,8 @@
import java.io.IOException;
import java.io.InputStreamReader;

import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;

/**
* Tests that the set of Kryo registrations is the same across compatible Flink versions.
Expand All @@ -42,7 +42,7 @@
* verifies that we correctly register Avro types at the {@link KryoSerializer} when Avro is
* present.
*/
public class AvroKryoSerializerRegistrationsTest {
class AvroKryoSerializerRegistrationsTest {

/**
* Tests that the registered classes in Kryo did not change.
Expand All @@ -51,7 +51,7 @@ public class AvroKryoSerializerRegistrationsTest {
* change in the serializers can break savepoint backwards compatibility between Flink versions.
*/
@Test
public void testDefaultKryoRegisteredClassesDidNotChange() throws Exception {
void testDefaultKryoRegisteredClassesDidNotChange() throws Exception {
final Kryo kryo = new KryoSerializer<>(Integer.class, new ExecutionConfig()).getKryo();

try (BufferedReader reader =
Expand Down Expand Up @@ -100,7 +100,7 @@ public void testDefaultKryoRegisteredClassesDidNotChange() throws Exception {
private void writeDefaultKryoRegistrations(String filePath) throws IOException {
final File file = new File(filePath);
if (file.exists()) {
assertTrue(file.delete());
assertThat(file.delete()).isTrue();
}

final Kryo kryo = new KryoSerializer<>(Integer.class, new ExecutionConfig()).getKryo();
Expand Down

0 comments on commit a3dff68

Please sign in to comment.