Skip to content

Commit

Permalink
Compatibility tests extended for many versions [DEX-54] (#1301)
Browse files Browse the repository at this point in the history
GitOrigin-RevId: 61d5cd72dc571be22ae7c8fd034859ec8d38b82f
  • Loading branch information
TomaszGaweda authored and actions-user committed Apr 11, 2024
1 parent d294fc3 commit c440b59
Show file tree
Hide file tree
Showing 7 changed files with 96 additions and 75 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,37 @@ public void readData(ObjectDataInput in) throws IOException {
init();
}

public static List<Schema> readSchemas(DataInput in) throws IOException {
int schemaCount = in.readInt();
List<Schema> schemas = new ArrayList<>(schemaCount);
for (int i = 0; i < schemaCount; i++) {
String typeName = in.readUTF();
int fieldCount = in.readInt();
List<FieldDescriptor> fields = new ArrayList<>(fieldCount);
for (int j = 0; j < fieldCount; j++) {
String name = in.readUTF();
FieldKind kind = FieldKind.get(in.readInt());
FieldDescriptor descriptor = new FieldDescriptor(name, kind);
fields.add(descriptor);
}
var schema = new Schema(typeName, fields);
schemas.add(schema);
}
return schemas;
}

public static void writeSchemas(DataOutput out, Collection<Schema> schemas) throws IOException {
out.writeInt(schemas.size());
for (Schema schema : schemas) {
out.writeUTF(schema.getTypeName());
out.writeInt(schema.getFieldCount());
for (FieldDescriptor descriptor : schema.getFields()) {
out.writeUTF(descriptor.getFieldName());
out.writeInt(descriptor.getKind().getId());
}
}
}

@Override
public int getFactoryId() {
return SchemaDataSerializerHook.F_ID;
Expand Down Expand Up @@ -240,34 +271,4 @@ public int hashCode() {
return (int) schemaId;
}

public static List<Schema> readSchemas(DataInput in) throws IOException {
int schemaCount = in.readInt();
ArrayList<Schema> schemas = new ArrayList<>(schemaCount);
for (int i = 0; i < schemaCount; i++) {
String typeName = in.readUTF();
int fieldCount = in.readInt();
ArrayList<FieldDescriptor> fields = new ArrayList<>(fieldCount);
for (int j = 0; j < fieldCount; j++) {
String name = in.readUTF();
FieldKind kind = FieldKind.get(in.readInt());
FieldDescriptor descriptor = new FieldDescriptor(name, kind);
fields.add(descriptor);
}
Schema schema = new Schema(typeName, fields);
schemas.add(schema);
}
return schemas;
}

public static void writeSchemas(DataOutput out, Collection<Schema> schemas) throws IOException {
out.writeInt(schemas.size());
for (Schema schema : schemas) {
out.writeUTF(schema.getTypeName());
out.writeInt(schema.getFieldCount());
for (FieldDescriptor descriptor : schema.getFields()) {
out.writeUTF(descriptor.getFieldName());
out.writeInt(descriptor.getKind().getId());
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import com.hazelcast.internal.networking.OutboundHandler;
import com.hazelcast.internal.serialization.InternalSerializationService;
import com.hazelcast.internal.serialization.SerializationService;
import com.hazelcast.internal.serialization.impl.AbstractSerializationService;
import com.hazelcast.internal.serialization.impl.compact.schema.MemberSchemaService;
import com.hazelcast.internal.server.ServerConnection;
import com.hazelcast.internal.server.ServerContext;
Expand Down Expand Up @@ -74,13 +75,13 @@ public SamplingNodeExtension(NodeExtension nodeExtension) {
@Override
public InternalSerializationService createSerializationService() {
InternalSerializationService serializationService = nodeExtension.createSerializationService();
return new SamplingSerializationService(serializationService);
return new SamplingSerializationService((AbstractSerializationService) serializationService);
}

@Override
public InternalSerializationService createCompatibilitySerializationService() {
InternalSerializationService serializationService = nodeExtension.createCompatibilitySerializationService();
return new SamplingSerializationService(serializationService);
return new SamplingSerializationService((AbstractSerializationService) serializationService);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,12 @@
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.serialization.DataType;
import com.hazelcast.internal.serialization.InternalSerializationService;
import com.hazelcast.internal.serialization.impl.AbstractSerializationService;
import com.hazelcast.internal.serialization.impl.HeapData;
import com.hazelcast.internal.serialization.impl.InternalGenericRecord;
import com.hazelcast.internal.serialization.impl.SerializerAdapter;
import com.hazelcast.internal.serialization.impl.compact.CompactGenericRecord;
import com.hazelcast.internal.serialization.impl.compact.CompactStreamSerializer;
import com.hazelcast.internal.serialization.impl.compact.Schema;
import com.hazelcast.internal.serialization.impl.portable.PortableContext;
import com.hazelcast.jet.config.JobConfig;
Expand Down Expand Up @@ -60,7 +63,8 @@ public class SamplingSerializationService implements InternalSerializationServic
static final ConcurrentMap<String, List<byte[]>> SERIALIZED_SAMPLES_PER_CLASS_NAME =
new ConcurrentHashMap<>(1000);
// cache classes for which samples have already been captured
static final Set<String> SAMPLED_CLASSES = newSetFromMap(new ConcurrentHashMap<String, Boolean>(1000));
static final Set<String> SAMPLED_CLASSES = newSetFromMap(new ConcurrentHashMap<>(1000));
static final ConcurrentMap<String, Schema> SAMPLED_CLASSES_SCHEMAS = new ConcurrentHashMap<>(1000);

private static final int MAX_SERIALIZED_SAMPLES_PER_CLASS = 5;
// utility strings to locate test classes commonly used as user objects
Expand All @@ -78,10 +82,14 @@ public class SamplingSerializationService implements InternalSerializationServic
.map(Class::getName)
.collect(Collectors.toSet());

protected final InternalSerializationService delegate;
protected final AbstractSerializationService delegate;
protected final CompactStreamSerializer compactStreamSerializer;

public SamplingSerializationService(InternalSerializationService delegate) {
public SamplingSerializationService(AbstractSerializationService delegate) {
this.delegate = delegate;

SerializerAdapter adapter = delegate.getCompactSerializer(true);
compactStreamSerializer = (CompactStreamSerializer) adapter.getImpl();
}

@Override
Expand Down Expand Up @@ -250,7 +258,7 @@ public void dispose() {
}

// record the given object, then return it
protected static <T> T sampleObject(T obj, byte[] serializedObject) {
protected <T> T sampleObject(T obj, byte[] serializedObject) {
if (obj == null) {
return null;
}
Expand All @@ -270,7 +278,7 @@ private static void addSerializedSample(Object obj, byte[] bytes) {
}
}

private static boolean shouldAddSerializedSample(Object obj) {
private boolean shouldAddSerializedSample(Object obj) {
Class klass = obj.getClass();
if (klass.isPrimitive()) {
return false;
Expand All @@ -282,6 +290,12 @@ private static boolean shouldAddSerializedSample(Object obj) {
return false;
}

if (delegate.isCompactSerializable(obj)) {
SAMPLED_CLASSES_SCHEMAS.computeIfAbsent(className, cn ->
compactStreamSerializer.extractSchema(obj)
);
}

if (SAMPLED_CLASSES.contains(className)) {
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,67 +16,76 @@

package com.hazelcast.test.compatibility;

import com.hazelcast.internal.serialization.impl.compact.Schema;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.test.TestEnvironment;
import org.junit.runner.Result;
import org.junit.runner.notification.RunListener;
import org.junit.platform.launcher.TestExecutionListener;
import org.junit.platform.launcher.TestPlan;

import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.nio.channels.FileChannel;
import java.util.Collection;
import java.util.List;
import java.util.Map;

import static com.hazelcast.internal.nio.IOUtil.closeResource;
import static com.hazelcast.test.HazelcastTestSupport.randomString;
import static com.hazelcast.test.compatibility.SamplingSerializationService.SERIALIZED_SAMPLES_PER_CLASS_NAME;
import static com.hazelcast.internal.util.StringUtil.LINE_SEPARATOR;
import static com.hazelcast.internal.util.StringUtil.isNullOrEmpty;
import static com.hazelcast.test.compatibility.SamplingConf.FILE_NAME;
import static com.hazelcast.test.compatibility.SamplingConf.INDEX_FILE_SUFFIX;
import static com.hazelcast.test.compatibility.SamplingConf.SAMPLES_FILE_SUFFIX;
import static com.hazelcast.test.compatibility.SamplingConf.SCHEMA_FILE_SUFFIX;
import static com.hazelcast.test.compatibility.SamplingSerializationService.SERIALIZED_SAMPLES_PER_CLASS_NAME;

/**
* When tests run is done, dump serialized object samples to output file
*/
public class SamplingRunListener extends RunListener {
public class SamplingTestExecutionListener implements TestExecutionListener {

public static final String FILE_NAME = TestEnvironment.getSerializedClassNamesPath() + randomString();
public static final String SAMPLES_FILE_SUFFIX = ".samples";
public static final String INDEX_FILE_SUFFIX = ".index";

private static final ILogger LOGGER = Logger.getLogger(SamplingRunListener.class);
private static final ILogger LOGGER = Logger.getLogger(SamplingTestExecutionListener.class);

@Override
public void testRunFinished(Result result)
throws Exception {
FileOutputStream serializedSamplesOutput = null;
FileWriter indexOutput = null;
try {
serializedSamplesOutput = new FileOutputStream(FILE_NAME + SAMPLES_FILE_SUFFIX);
public void testPlanExecutionFinished(TestPlan testPlan) {
if (isNullOrEmpty(TestEnvironment.getSerializedClassNamesPath())) {
return;
}
LOGGER.info("Sampling is done, serialized classes count: " + SERIALIZED_SAMPLES_PER_CLASS_NAME.keySet().size());
try (
var serializedSamplesOutput = new FileOutputStream(FILE_NAME + SAMPLES_FILE_SUFFIX);
var indexOutput = new FileWriter(FILE_NAME + INDEX_FILE_SUFFIX);
var serializedSchemaOutput = new FileOutputStream(FILE_NAME + SCHEMA_FILE_SUFFIX)
) {
FileChannel samplesOutputChannel = serializedSamplesOutput.getChannel();
// index file line format: className,startOfSample1,lengthOfSample1,startOfSample2,lengthOfSample2,...
indexOutput = new FileWriter(FILE_NAME + INDEX_FILE_SUFFIX);

for (Map.Entry<String, List<byte[]>> entry : SERIALIZED_SAMPLES_PER_CLASS_NAME.entrySet()) {
if (entry.getValue().isEmpty()) {
continue;
}
List<byte[]> samples = entry.getValue();
indexOutput.write(entry.getKey());
for (int i = 0; i < samples.size(); i++) {
byte[] sample = samples.get(i);
for (byte[] sample : samples) {
indexOutput.write("," + samplesOutputChannel.position() + "," + sample.length);
serializedSamplesOutput.write(sample);
}
indexOutput.write(LINE_SEPARATOR);
}
} catch (FileNotFoundException e) {
LOGGER.severe(e);

writeSchemas(serializedSchemaOutput);
} catch (IOException e) {
LOGGER.severe(e);
} finally {
closeResource(indexOutput);
closeResource(serializedSamplesOutput);
throw new RuntimeException(e);
}
}

private void writeSchemas(FileOutputStream fileOut) {
Collection<Schema> schemas = SamplingSerializationService.SAMPLED_CLASSES_SCHEMAS.values();
try (var out = new ObjectOutputStream(fileOut)) {
Schema.writeSchemas(out, schemas);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,12 @@

package com.hazelcast.test.mocknetwork;

import com.hazelcast.cluster.Address;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.LifecycleService;
import com.hazelcast.instance.impl.Node;
import com.hazelcast.instance.impl.NodeContext;
import com.hazelcast.instance.impl.NodeState;
import com.hazelcast.cluster.Address;
import com.hazelcast.test.AssertTask;
import com.hazelcast.internal.util.AddressUtil;

import java.net.UnknownHostException;
Expand All @@ -39,7 +38,7 @@
import java.util.concurrent.ConcurrentMap;

import static com.hazelcast.test.HazelcastTestSupport.assertTrueEventually;
import static org.junit.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;

public final class TestNodeRegistry {
Expand All @@ -61,12 +60,8 @@ public NodeContext createNodeContext(final Address address, Set<Address> initial
final Node node = nodes.get(address);
if (node != null) {
assertFalse(address + " is already registered", node.isRunning());
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals(address + " should be SHUT_DOWN", NodeState.SHUT_DOWN, node.getState());
}
});
assertTrueEventually(address + " should be SHUT_DOWN",
() -> assertThat(node.getState()).isEqualTo(NodeState.SHUT_DOWN));
nodes.remove(address, node);
}
return new MockNodeContext(this, address, initiallyBlockedAddresses, nodeExtensionPriorityList);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
com.hazelcast.test.compatibility.SamplingTestExecutionListener
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1389,7 +1389,7 @@
<!-- for compatibility testing -->
<id>generate-compatibility-samples</id>
<properties>
<vmHeapSettings>-Xms128m -Xmx1G</vmHeapSettings>
<vmHeapSettings>-Xms4G -Xmx8G</vmHeapSettings>
</properties>
<build>
<plugins>
Expand All @@ -1401,7 +1401,7 @@
<properties>
<property>
<name>listener</name>
<value>com.hazelcast.test.compatibility.SamplingRunListener</value>
<value>com.hazelcast.test.compatibility.SamplingTestExecutionListener</value>
</property>
</properties>
<trimStackTrace>false</trimStackTrace>
Expand Down

0 comments on commit c440b59

Please sign in to comment.