Skip to content

Commit

Permalink
Fix code inspection for kernel module (#31209)
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu committed May 11, 2024
1 parent 78454cc commit a8174ae
Show file tree
Hide file tree
Showing 94 changed files with 151 additions and 181 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public List<Record> fetch(final int batchSize, final long timeoutMillis) {
long startMillis = System.currentTimeMillis();
int recordsCount = 0;
do {
List<Record> records = queue.poll(Math.max(0, timeoutMillis - (System.currentTimeMillis() - startMillis)), TimeUnit.MILLISECONDS);
List<Record> records = queue.poll(Math.max(0L, timeoutMillis - (System.currentTimeMillis() - startMillis)), TimeUnit.MILLISECONDS);
if (null == records || records.isEmpty()) {
continue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public final class ConsistencyCheckJobItemProgressContext implements PipelineJob

private volatile long recordsCount;

private final AtomicLong checkedRecordsCount = new AtomicLong(0);
private final AtomicLong checkedRecordsCount = new AtomicLong(0L);

private final long checkBeginTimeMillis = System.currentTimeMillis();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,6 @@ public static boolean isBigDecimalEquals(final BigDecimal one, final BigDecimal
decimalTwo = another;
}
}
return decimalOne.equals(decimalTwo);
return 0 == decimalOne.compareTo(decimalTwo);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,6 @@ public boolean isIgnored() {
* @return matched or not
*/
public boolean isMatched() {
if (null != ignoredType) {
return false;
}
return matched;
return null == ignoredType && matched;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ public abstract class MatchingTableInventoryChecker implements TableInventoryChe
@Override
public TableDataConsistencyCheckResult checkSingleTableInventoryData() {
ThreadFactory threadFactory = ExecutorThreadFactoryBuilder.build("job-" + getJobIdDigest(param.getJobId()) + "-matching-check-%d");
ThreadPoolExecutor executor = new ThreadPoolExecutor(2, 2, 60, TimeUnit.SECONDS, new ArrayBlockingQueue<>(2), threadFactory);
ThreadPoolExecutor executor = new ThreadPoolExecutor(2, 2, 60L, TimeUnit.SECONDS, new ArrayBlockingQueue<>(2), threadFactory);
try {
return checkSingleTableInventoryData(param, executor);
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ private void batchWrite(final Collection<DataRecord> records) {
if (i == importerConfig.getRetryTimes()) {
throw new PipelineImporterJobWriteException(ex);
}
Thread.sleep(Math.min(5 * 60 * 1000L, 1000L << i));
Thread.sleep(Math.min(5L * 60L * 1000L, 1000L << i));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public Void getEndValue() {

@Override
public Void convert(final String value) {
throw new UnsupportedOperationException();
throw new UnsupportedOperationException("");
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public static PipelineContextKey parseContextKey(final String jobId) {
* @return ElasticJob configuration POJO
*/
public static JobConfigurationPOJO getElasticJobConfigurationPOJO(final String jobId) {
JobConfigurationPOJO result = PipelineAPIFactory.getJobConfigurationAPI(PipelineJobIdUtils.parseContextKey(jobId)).getJobConfiguration(jobId);
JobConfigurationPOJO result = PipelineAPIFactory.getJobConfigurationAPI(parseContextKey(jobId)).getJobConfiguration(jobId);
ShardingSpherePreconditions.checkNotNull(result, () -> new PipelineJobNotFoundException(jobId));
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ private JobItemIncrementalTasksProgress getIncrementalTasksProgress(final Collec
}

private JobItemInventoryTasksProgress getInventoryTasksProgress(final Collection<PipelineTask> inventoryTasks) {
Map<String, InventoryTaskProgress> inventoryTaskProgressMap = new HashMap<>();
Map<String, InventoryTaskProgress> inventoryTaskProgressMap = new HashMap<>(inventoryTasks.size(), 1F);
for (PipelineTask each : inventoryTasks) {
inventoryTaskProgressMap.put(each.getTaskId(), (InventoryTaskProgress) each.getTaskProgress());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public final class PipelineJobProgressPersistContext {

private final int shardingItem;

private final AtomicLong unhandledEventCount = new AtomicLong(0);
private final AtomicLong unhandledEventCount = new AtomicLong(0L);

private final AtomicBoolean firstExceptionLogged = new AtomicBoolean(false);
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public final class PipelineJobProgressPersistService {
private static final long DELAY_SECONDS = 1L;

static {
JOB_PERSIST_EXECUTOR.scheduleWithFixedDelay(new PersistJobContextRunnable(), 0, DELAY_SECONDS, TimeUnit.SECONDS);
JOB_PERSIST_EXECUTOR.scheduleWithFixedDelay(new PersistJobContextRunnable(), 0L, DELAY_SECONDS, TimeUnit.SECONDS);
}

/**
Expand Down Expand Up @@ -126,8 +126,8 @@ private static synchronized void persist(final String jobId, final int shardingI

private static void persist0(final String jobId, final int shardingItem, final PipelineJobProgressPersistContext persistContext) {
long currentUnhandledEventCount = persistContext.getUnhandledEventCount().get();
ShardingSpherePreconditions.checkState(currentUnhandledEventCount >= 0, () -> new IllegalStateException("Current unhandled event count must be greater than or equal to 0"));
if (0 == currentUnhandledEventCount) {
ShardingSpherePreconditions.checkState(currentUnhandledEventCount >= 0L, () -> new IllegalStateException("Current unhandled event count must be greater than or equal to 0"));
if (0L == currentUnhandledEventCount) {
return;
}
Optional<PipelineJobItemContext> jobItemContext = PipelineJobRegistry.getItemContext(jobId, shardingItem);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public JobItemInventoryTasksProgress swapToObject(final YamlJobItemInventoryTask
if (null == yamlProgress) {
return new JobItemInventoryTasksProgress(Collections.emptyMap());
}
Map<String, InventoryTaskProgress> taskProgressMap = new LinkedHashMap<>();
Map<String, InventoryTaskProgress> taskProgressMap = new LinkedHashMap<>(yamlProgress.getFinished().length + yamlProgress.getUnfinished().size(), 1F);
taskProgressMap.putAll(Arrays.stream(yamlProgress.getFinished()).collect(Collectors.toMap(key -> key, value -> new InventoryTaskProgress(new IngestFinishedPosition()))));
taskProgressMap.putAll(yamlProgress.getUnfinished().entrySet().stream().collect(Collectors.toMap(Entry::getKey, getInventoryTaskProgressFunction())));
return new JobItemInventoryTasksProgress(taskProgressMap);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,8 @@ public static int getInventoryFinishedPercentage(final TransmissionJobItemProgre
if (JobStatus.EXECUTE_INCREMENTAL_TASK == jobItemProgress.getStatus() || JobStatus.FINISHED == jobItemProgress.getStatus()) {
return 100;
}
if (0 != jobItemProgress.getProcessedRecordsCount() && 0 != jobItemProgress.getInventoryRecordsCount()) {
return (int) Math.min(100, jobItemProgress.getProcessedRecordsCount() * 100 / jobItemProgress.getInventoryRecordsCount());
if (0L != jobItemProgress.getProcessedRecordsCount() && 0L != jobItemProgress.getInventoryRecordsCount()) {
return (int) Math.min(100L, jobItemProgress.getProcessedRecordsCount() * 100L / jobItemProgress.getInventoryRecordsCount());
}
return 0;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ private Map<CaseInsensitiveIdentifier, PipelineTableMetaData> loadTableMetaData0
tableNames.add(tableName);
}
}
Map<CaseInsensitiveIdentifier, PipelineTableMetaData> result = new LinkedHashMap<>();
Map<CaseInsensitiveIdentifier, PipelineTableMetaData> result = new LinkedHashMap<>(tableNames.size(), 1F);
for (String each : tableNames) {
Set<String> primaryKeys = loadPrimaryKeys(connection, schemaName, each);
Map<String, Collection<String>> uniqueKeys = loadUniqueIndexesOfTable(connection, schemaName, each);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ public void prepareTargetSchemas(final PrepareTargetSchemasParameter param) thro
}
String defaultSchema = dialectDatabaseMetaData.getDefaultSchema().orElse(null);
PipelinePrepareSQLBuilder pipelineSQLBuilder = new PipelinePrepareSQLBuilder(targetDatabaseType);
Collection<String> createdSchemaNames = new HashSet<>();
Collection<String> createdSchemaNames = new HashSet<>(param.getCreateTableConfigurations().size(), 1F);
for (CreateTableConfiguration each : param.getCreateTableConfigurations()) {
String targetSchemaName = each.getTargetName().getSchemaName().toString();
if (null == targetSchemaName || targetSchemaName.equalsIgnoreCase(defaultSchema) || createdSchemaNames.contains(targetSchemaName)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public static long getTableRecordsCount(final InventoryDumperContext dumperConte
if (sql.isPresent()) {
DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, dataSource.getDatabaseType().getType());
long result = getEstimatedCount(databaseType, dataSource, sql.get());
return result > 0 ? result : getCount(dataSource, pipelineSQLBuilder.buildCountSQL(schemaName, actualTableName));
return result > 0L ? result : getCount(dataSource, pipelineSQLBuilder.buildCountSQL(schemaName, actualTableName));
}
return getCount(dataSource, pipelineSQLBuilder.buildCountSQL(schemaName, actualTableName));
} catch (final SQLException ex) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,13 +183,13 @@ private Collection<IngestPosition> getInventoryPositions(final InventoryDumperCo

private Collection<IngestPosition> getPositionByIntegerUniqueKeyRange(final InventoryDumperContext dumperContext, final long tableRecordsCount,
final TransmissionJobItemContext jobItemContext, final PipelineDataSourceWrapper dataSource) {
if (0 == tableRecordsCount) {
return Collections.singletonList(new IntegerPrimaryKeyIngestPosition(0, 0));
if (0L == tableRecordsCount) {
return Collections.singletonList(new IntegerPrimaryKeyIngestPosition(0L, 0L));
}
Collection<IngestPosition> result = new LinkedList<>();
Range<Long> uniqueKeyValuesRange = getUniqueKeyValuesRange(jobItemContext, dataSource, dumperContext);
int shardingSize = jobItemContext.getJobProcessContext().getProcessConfiguration().getRead().getShardingSize();
long splitCount = tableRecordsCount / shardingSize + (tableRecordsCount % shardingSize > 0 ? 1 : 0);
long splitCount = tableRecordsCount / shardingSize + (tableRecordsCount % shardingSize > 0L ? 1 : 0);
long interval = (uniqueKeyValuesRange.getMaximum() - uniqueKeyValuesRange.getMinimum()) / splitCount;
IntervalToRangeIterator rangeIterator = new IntervalToRangeIterator(uniqueKeyValuesRange.getMinimum(), uniqueKeyValuesRange.getMaximum(), interval);
while (rangeIterator.hasNext()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ public void persistCheckJobResult(final String parentJobId, final String checkJo
if (null == checkResultMap) {
return;
}
Map<String, String> yamlCheckResultMap = new LinkedHashMap<>();
Map<String, String> yamlCheckResultMap = new LinkedHashMap<>(checkResultMap.size(), 1F);
for (Entry<String, TableDataConsistencyCheckResult> entry : checkResultMap.entrySet()) {
YamlTableDataConsistencyCheckResult yamlCheckResult = new YamlTableDataConsistencyCheckResultSwapper().swapToYamlConfiguration(entry.getValue());
yamlCheckResultMap.put(entry.getKey(), YamlEngine.marshal(yamlCheckResult));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public final class PipelineImportSQLBuilder {
public PipelineImportSQLBuilder(final DatabaseType databaseType) {
dialectSQLBuilder = DatabaseTypedSPILoader.getService(DialectPipelineSQLBuilder.class, databaseType);
sqlSegmentBuilder = new PipelineSQLSegmentBuilder(databaseType);
sqlCache = Caffeine.newBuilder().initialCapacity(16).maximumSize(1024).build();
sqlCache = Caffeine.newBuilder().initialCapacity(16).maximumSize(1024L).build();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,12 @@ public IntervalToRangeIterator(final long minimum, final long maximum, final lon
if (minimum > maximum) {
throw new IllegalArgumentException("minimum greater than maximum");
}
if (interval < 0) {
if (interval < 0L) {
throw new IllegalArgumentException("interval is less than zero");
}
this.maximum = maximum;
this.interval = interval;
this.current = minimum;
current = minimum;
}

@Override
Expand All @@ -56,11 +56,11 @@ public boolean hasNext() {
@Override
public Range<Long> next() {
if (!hasNext()) {
throw new NoSuchElementException();
throw new NoSuchElementException("");
}
long upperLimit = Math.min(maximum, current + interval);
Range<Long> result = Range.between(current, upperLimit);
current = upperLimit + 1;
current = upperLimit + 1L;
return result;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public Map<CaseInsensitiveIdentifier, Set<String>> getShardingColumnsMap(final C
}
Set<String> defaultDatabaseShardingColumns = extractShardingColumns(shardingRuleConfig.get().getDefaultDatabaseShardingStrategy());
Set<String> defaultTableShardingColumns = extractShardingColumns(shardingRuleConfig.get().getDefaultTableShardingStrategy());
Map<CaseInsensitiveIdentifier, Set<String>> result = new ConcurrentHashMap<>();
Map<CaseInsensitiveIdentifier, Set<String>> result = new ConcurrentHashMap<>(shardingRuleConfig.get().getTables().size(), 1F);
for (ShardingTableRuleConfiguration each : shardingRuleConfig.get().getTables()) {
CaseInsensitiveIdentifier logicTableName = new CaseInsensitiveIdentifier(each.getLogicTable());
if (!logicTableNames.contains(logicTableName)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ private void execute(final PipelineChannelAckCallback ackCallback, final int rec
MultiplexPipelineChannel channel = new MultiplexPipelineChannel(CHANNEL_NUMBER, TypedSPILoader.getService(PipelineChannelCreator.class, "MEMORY"), 10000, ackCallback);
fetchWithMultiThreads(channel, countDownLatch);
channel.push(Arrays.asList(records));
boolean awaitResult = countDownLatch.await(10, TimeUnit.SECONDS);
boolean awaitResult = countDownLatch.await(10L, TimeUnit.SECONDS);
assertTrue(awaitResult, "await failed");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public final class ConsistencyCheckDataBuilder {
* @return built record
*/
public static Map<String, Object> buildFixedFullTypeRecord(final int id) {
Map<String, Object> result = new LinkedHashMap<>();
Map<String, Object> result = new LinkedHashMap<>(15, 1F);
result.put("id", id);
result.put("c_bool", true);
result.put("c_int1", Byte.MAX_VALUE);
Expand Down Expand Up @@ -97,13 +97,13 @@ private static Object getModifiedValue(final Object value) {
return value + "-";
}
if (value instanceof Time) {
return new Time(((Time) value).getTime() - 1);
return new Time(((Time) value).getTime() - 1L);
}
if (value instanceof Date) {
return new Date(((Date) value).getTime() - 1);
return new Date(((Date) value).getTime() - 1L);
}
if (value instanceof Timestamp) {
return new Timestamp(((Timestamp) value).getTime() - 1);
return new Timestamp(((Timestamp) value).getTime() - 1L);
}
if (value instanceof int[]) {
int[] result = ((int[]) value).clone();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ void assertIsIntegerEquals() {

@Test
void assertIsBigDecimalEquals() {
BigDecimal one = BigDecimal.valueOf(3322, 1);
BigDecimal another = BigDecimal.valueOf(33220, 2);
BigDecimal one = BigDecimal.valueOf(3322L, 1);
BigDecimal another = BigDecimal.valueOf(33220L, 2);
assertTrue(DataConsistencyCheckUtils.isBigDecimalEquals(one, another));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ private PreparedStatement mockPreparedStatement(final long expectedCRC32Result,

@Test
void assertCalculateFailed() throws SQLException {
when(connection.prepareStatement(anyString())).thenThrow(new SQLException());
when(connection.prepareStatement(anyString())).thenThrow(new SQLException(""));
assertThrows(PipelineTableDataConsistencyCheckLoadingFailedException.class, () -> new CRC32SingleTableInventoryCalculator().calculate(parameter));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class JobDataNodeLineConvertUtilsTest {

@Test
void assertConvertDataNodesToLines() {
Map<String, List<DataNode>> mockDataNodes = new LinkedHashMap<>();
Map<String, List<DataNode>> mockDataNodes = new LinkedHashMap<>(2, 1F);
List<DataNode> dataNodes = Arrays.asList(new DataNode("ds_0", "t_order_0"), new DataNode("ds_0", "t_order_1"));
List<DataNode> itemDataNodes = Collections.singletonList(new DataNode("ds_0", "t_order_item_0"));
mockDataNodes.put("t_order", dataNodes);
Expand All @@ -51,10 +51,8 @@ void assertConvertDataNodesToLines() {

@Test
void assertConvertDataNodesToLinesWithMultipleDataSource() {
Map<String, List<DataNode>> mockDataNodes = new LinkedHashMap<>();
List<DataNode> dataNodes = Arrays.asList(new DataNode("ds_0", "t_order_0"), new DataNode("ds_0", "t_order_2"), new DataNode("ds_1", "t_order_1"), new DataNode("ds_1", "t_order_3"));
mockDataNodes.put("t_order", dataNodes);
List<JobDataNodeLine> jobDataNodeLines = JobDataNodeLineConvertUtils.convertDataNodesToLines(mockDataNodes);
List<JobDataNodeLine> jobDataNodeLines = JobDataNodeLineConvertUtils.convertDataNodesToLines(Collections.singletonMap("t_order", dataNodes));
assertThat(jobDataNodeLines.size(), is(2));
JobDataNodeEntry jobDataNodeEntry = jobDataNodeLines.get(0).getEntries().iterator().next();
assertThat(jobDataNodeEntry.getDataNodes().stream().map(DataNode::getTableName).collect(Collectors.toList()), is(Arrays.asList("t_order_0", "t_order_2")));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,6 @@ void assertInit() {

@Test
void assertToString() {
assertThat(new IntegerPrimaryKeyIngestPosition(1, 100).toString(), is("i,1,100"));
assertThat(new IntegerPrimaryKeyIngestPosition(1L, 100L).toString(), is("i,1,100"));
}
}
Loading

0 comments on commit a8174ae

Please sign in to comment.