Skip to content

Commit

Permalink
MINOR: Remove unneeded explicit type arguments (#15736)
Browse files Browse the repository at this point in the history
Reviewers: Chia-Ping Tsai <chia7712@gmail.com>
  • Loading branch information
mimaison committed Apr 17, 2024
1 parent a3dcbd4 commit aee9724
Show file tree
Hide file tree
Showing 14 changed files with 35 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ public Selector(int maxReceiveSize,
this.memoryPool = memoryPool;
this.lowMemThreshold = (long) (0.1 * this.memoryPool.size());
this.failedAuthenticationDelayMs = failedAuthenticationDelayMs;
this.delayedClosingChannels = (failedAuthenticationDelayMs > NO_FAILED_AUTHENTICATION_DELAY) ? new LinkedHashMap<String, DelayedAuthenticationFailureClose>() : null;
this.delayedClosingChannels = (failedAuthenticationDelayMs > NO_FAILED_AUTHENTICATION_DELAY) ? new LinkedHashMap<>() : null;
}

public Selector(int maxReceiveSize,
Expand Down Expand Up @@ -229,7 +229,7 @@ public Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String met
}

public Selector(long connectionMaxIdleMS, int failedAuthenticationDelayMs, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder, LogContext logContext) {
this(NetworkReceive.UNLIMITED, connectionMaxIdleMS, failedAuthenticationDelayMs, metrics, time, metricGrpPrefix, Collections.<String, String>emptyMap(), true, channelBuilder, logContext);
this(NetworkReceive.UNLIMITED, connectionMaxIdleMS, failedAuthenticationDelayMs, metrics, time, metricGrpPrefix, Collections.emptyMap(), true, channelBuilder, logContext);
}

/**
Expand Down Expand Up @@ -1281,7 +1281,7 @@ private Meter createMeter(Metrics metrics, String groupName, Map<String, String

/**
* This method generates `time-total` metrics but has a couple of deficiencies: no `-ns` suffix and no dash between basename
* and `time-toal` suffix.
* and `time-total` suffix.
* @deprecated use {{@link #createIOThreadRatioMeter(Metrics, String, Map, String, String)}} for new metrics instead
*/
@Deprecated
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.errors.AuthenticationException;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.MockTime;
Expand Down Expand Up @@ -104,8 +103,8 @@ private static Cluster mockCluster() {
nodes.put(1, new Node(1, "localhost", 8122));
nodes.put(2, new Node(2, "localhost", 8123));
return new Cluster("mockClusterId", nodes.values(),
Collections.<PartitionInfo>emptySet(), Collections.<String>emptySet(),
Collections.<String>emptySet(), nodes.get(0));
Collections.emptySet(), Collections.emptySet(),
Collections.emptySet(), nodes.get(0));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ public void testTwoConsumersOneTopicOnePartition() {

Map<String, List<TopicPartition>> assignment = assignor.assign(partitionsPerTopic, consumers);
assertEquals(partitions(tp(topic, 0)), assignment.get(consumer1));
assertEquals(Collections.<TopicPartition>emptyList(), assignment.get(consumer2));
assertEquals(Collections.emptyList(), assignment.get(consumer2));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -388,13 +388,13 @@ public void testClientConfigProvider() {

@Test
public void testEmptyPluginUrls() throws Exception {
assertEquals(Collections.<Path>emptyList(), PluginUtils.pluginUrls(pluginPath));
assertEquals(Collections.emptyList(), PluginUtils.pluginUrls(pluginPath));
}

@Test
public void testEmptyStructurePluginUrls() throws Exception {
createBasicDirectoryLayout();
assertEquals(Collections.<Path>emptyList(), PluginUtils.pluginUrls(pluginPath));
assertEquals(Collections.emptyList(), PluginUtils.pluginUrls(pluginPath));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,21 +51,21 @@ public void teardown() {

@Test
public void topLevelStructRequired() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());
assertThrows(DataException.class, () -> xformValue.apply(new SourceRecord(null, null,
"topic", 0, Schema.INT32_SCHEMA, 42)));
}

@Test
public void topLevelMapRequired() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());
assertThrows(DataException.class, () -> xformValue.apply(new SourceRecord(null, null,
"topic", 0, null, 42)));
}

@Test
public void testNestedStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

SchemaBuilder builder = SchemaBuilder.struct();
builder.field("int8", Schema.INT8_SCHEMA);
Expand Down Expand Up @@ -162,7 +162,7 @@ public void testNestedMapWithDelimiter() {

@Test
public void testOptionalFieldStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

SchemaBuilder builder = SchemaBuilder.struct();
builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
Expand All @@ -189,7 +189,7 @@ public void testOptionalFieldStruct() {

@Test
public void testOptionalStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

SchemaBuilder builder = SchemaBuilder.struct().optional();
builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
Expand All @@ -205,7 +205,7 @@ public void testOptionalStruct() {

@Test
public void testOptionalNestedStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

SchemaBuilder builder = SchemaBuilder.struct().optional();
builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
Expand All @@ -229,7 +229,7 @@ public void testOptionalNestedStruct() {

@Test
public void testOptionalFieldMap() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

Map<String, Object> supportedTypes = new HashMap<>();
supportedTypes.put("opt_int32", null);
Expand All @@ -250,7 +250,7 @@ public void testOptionalFieldMap() {

@Test
public void testKey() {
xformKey.configure(Collections.<String, String>emptyMap());
xformKey.configure(Collections.emptyMap());

Map<String, Map<String, Integer>> key = Collections.singletonMap("A", Collections.singletonMap("B", 12));
SourceRecord src = new SourceRecord(null, null, "topic", null, key, null, null);
Expand Down Expand Up @@ -295,7 +295,7 @@ public void testOptionalAndDefaultValuesNested() {
// children should also be optional. Similarly, if the parent Struct has a default value, the default value for
// the flattened field

xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

SchemaBuilder builder = SchemaBuilder.struct().optional();
builder.field("req_field", Schema.STRING_SCHEMA);
Expand Down Expand Up @@ -324,7 +324,7 @@ public void testOptionalAndDefaultValuesNested() {

@Test
public void tombstoneEventWithoutSchemaShouldPassThrough() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

final SourceRecord record = new SourceRecord(null, null, "test", 0,
null, null);
Expand All @@ -336,7 +336,7 @@ public void tombstoneEventWithoutSchemaShouldPassThrough() {

@Test
public void tombstoneEventWithSchemaShouldPassThrough() {
xformValue.configure(Collections.<String, String>emptyMap());
xformValue.configure(Collections.emptyMap());

final Schema simpleStructSchema = SchemaBuilder.struct().name("name").version(1).doc("doc").field("magic", Schema.OPTIONAL_INT64_SCHEMA).build();
final SourceRecord record = new SourceRecord(null, null, "test", 0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ public void teardown() {

@Test
public void testConfigNoTargetType() {
assertThrows(ConfigException.class, () -> xformValue.configure(Collections.<String, String>emptyMap()));
assertThrows(ConfigException.class, () -> xformValue.configure(Collections.emptyMap()));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ void testDropNullKeyRecordsForRepartitionNodesWithNoRelaxedJoinDownstream() {
.to(OUT);
initTopology();
left.pipeInput(null, "leftValue", 1);
assertEquals(Collections.<KeyValue<String, String>>emptyList(), out.readKeyValuesToList());
assertEquals(Collections.emptyList(), out.readKeyValuesToList());
}

private void initTopology() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public class WindowedStreamPartitionerTest {
);

private Cluster cluster = new Cluster("cluster", Collections.singletonList(Node.noNode()), infos,
Collections.<String>emptySet(), Collections.<String>emptySet());
Collections.emptySet(), Collections.emptySet());

@Test
public void testCopartitioning() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ public class PartitionGrouperTest {
"cluster",
Collections.singletonList(Node.noNode()),
infos,
Collections.<String>emptySet(),
Collections.<String>emptySet());
Collections.emptySet(),
Collections.emptySet());

@Test
public void shouldComputeGroupingForTwoGroups() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -721,7 +721,7 @@ public void shouldUpdateOffsetsUponCompletion() {
collector.send(topic, "999", "0", null, 1, null, stringSerializer, stringSerializer, null, context);
collector.send(topic, "999", "0", null, 2, null, stringSerializer, stringSerializer, null, context);

assertEquals(Collections.<TopicPartition, Long>emptyMap(), offsets);
assertEquals(Collections.emptyMap(), offsets);

collector.flush();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public void shouldNotWriteCheckpointWhenNoOffsets() throws IOException {

assertFalse(f.exists());

assertEquals(Collections.<TopicPartition, Long>emptyMap(), checkpoint.read());
assertEquals(Collections.emptyMap(), checkpoint.read());

// deleting a non-exist checkpoint file should be fine
checkpoint.delete();
Expand All @@ -101,7 +101,7 @@ public void shouldDeleteExistingCheckpointWhenNoOffsets() throws IOException {
checkpoint.write(Collections.emptyMap());

assertThat(file.exists(), is(false));
assertThat(Collections.<TopicPartition, Long>emptyMap(), is(checkpoint.read()));
assertThat(Collections.emptyMap(), is(checkpoint.read()));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ public void shouldHaveChangeLoggingStoreByDefault() {
public void shouldNotHaveChangeLoggingStoreWhenDisabled() {
final SessionStore<String, String> store = builder.withLoggingDisabled().build();
final StateStore next = ((WrappedStateStore) store).wrapped();
assertThat(next, CoreMatchers.<StateStore>equalTo(inner));
assertThat(next, CoreMatchers.equalTo(inner));
}

@Test
Expand All @@ -91,26 +91,26 @@ public void shouldHaveCachingStoreWhenEnabled() {
@Test
public void shouldHaveChangeLoggingStoreWhenLoggingEnabled() {
final SessionStore<String, String> store = builder
.withLoggingEnabled(Collections.<String, String>emptyMap())
.withLoggingEnabled(Collections.emptyMap())
.build();
final StateStore wrapped = ((WrappedStateStore) store).wrapped();
assertThat(store, instanceOf(MeteredSessionStore.class));
assertThat(wrapped, instanceOf(ChangeLoggingSessionBytesStore.class));
assertThat(((WrappedStateStore) wrapped).wrapped(), CoreMatchers.<StateStore>equalTo(inner));
assertThat(((WrappedStateStore) wrapped).wrapped(), CoreMatchers.equalTo(inner));
}

@Test
public void shouldHaveCachingAndChangeLoggingWhenBothEnabled() {
final SessionStore<String, String> store = builder
.withLoggingEnabled(Collections.<String, String>emptyMap())
.withLoggingEnabled(Collections.emptyMap())
.withCachingEnabled()
.build();
final WrappedStateStore caching = (WrappedStateStore) ((WrappedStateStore) store).wrapped();
final WrappedStateStore changeLogging = (WrappedStateStore) caching.wrapped();
assertThat(store, instanceOf(MeteredSessionStore.class));
assertThat(caching, instanceOf(CachingSessionStore.class));
assertThat(changeLogging, instanceOf(ChangeLoggingSessionBytesStore.class));
assertThat(changeLogging.wrapped(), CoreMatchers.<StateStore>equalTo(inner));
assertThat(changeLogging.wrapped(), CoreMatchers.equalTo(inner));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public MockProcessorNode() {
}

private MockProcessorNode(final MockProcessor<KIn, VIn, KOut, VOut> mockProcessor) {
super(NAME + INDEX.getAndIncrement(), mockProcessor, Collections.<String>emptySet());
super(NAME + INDEX.getAndIncrement(), mockProcessor, Collections.emptySet());

this.mockProcessor = mockProcessor;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public KeyValueIterator<Windowed<K>, V> fetch(final K key) {
throw new InvalidStateStoreException("not open");
}
if (!sessions.containsKey(key)) {
return new KeyValueIteratorStub<>(Collections.<KeyValue<Windowed<K>, V>>emptyIterator());
return new KeyValueIteratorStub<>(Collections.emptyIterator());
}
return new KeyValueIteratorStub<>(sessions.get(key).iterator());
}
Expand All @@ -99,7 +99,7 @@ public KeyValueIterator<Windowed<K>, V> fetch(final K keyFrom, final K keyTo) {
NavigableMap<K, List<KeyValue<Windowed<K>, V>>> subSessionsMap = getSubSessionsMap(keyFrom, keyTo);

if (subSessionsMap.isEmpty()) {
return new KeyValueIteratorStub<>(Collections.<KeyValue<Windowed<K>, V>>emptyIterator());
return new KeyValueIteratorStub<>(Collections.emptyIterator());
}
final Iterator<List<KeyValue<Windowed<K>, V>>> keysIterator = subSessionsMap.values().iterator();
return new KeyValueIteratorStub<>(
Expand Down

0 comments on commit aee9724

Please sign in to comment.