Skip to content

Commit

Permalink
Linter issues
Browse files Browse the repository at this point in the history
  • Loading branch information
Zabuzard committed Aug 11, 2020
1 parent 49167a1 commit 3b11561
Show file tree
Hide file tree
Showing 11 changed files with 75 additions and 60 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*
* @author Daniel Tischner {@literal <zabuza.dev@gmail.com>}
*/
@SuppressWarnings({"UseOfSystemOutOrSystemErr", "ClassIndependentOfModule", "ClassOnlyUsedInOneModule"})
@SuppressWarnings({ "UseOfSystemOutOrSystemErr", "ClassIndependentOfModule", "ClassOnlyUsedInOneModule" })
enum CompareFiles {
;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
*
* @author Daniel Tischner {@literal <zabuza.dev@gmail.com>}
*/
@SuppressWarnings({"UseOfSystemOutOrSystemErr", "ClassIndependentOfModule", "ClassOnlyUsedInOneModule"})
@SuppressWarnings({ "UseOfSystemOutOrSystemErr", "ClassIndependentOfModule", "ClassOnlyUsedInOneModule" })
enum LocalChunkCache {
;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,66 +83,67 @@ public static void main(final String[] args) {
currentBuild));
}

private static String bytesToReadable(long bytes) {
private static String bytesToReadable(final long bytes) {
if (bytes < 1_000) {
return bytes + " B";
}

double kiloBytes = bytes / 1_000.0;
final double kiloBytes = bytes / 1_000.0;
if (kiloBytes < 1_000) {
return String.format("%.2f", kiloBytes) + " KB";
}

double megaBytes = kiloBytes / 1_000.0;
final double megaBytes = kiloBytes / 1_000.0;
if (megaBytes < 1_000) {
return String.format("%.2f", megaBytes) + " MB";
}

double gigaBytes = megaBytes / 1_000.0;
final double gigaBytes = megaBytes / 1_000.0;
if (gigaBytes < 1_000) {
return String.format("%.2f", gigaBytes) + " GB";
}
return "";
}

private static void chunkPath(final Chunker chunker, final Path path, final Consumer<Chunk> chunkAction) {
private static void chunkPath(final Chunker chunker, final Path path, final Consumer<? super Chunk> chunkAction) {
try {
List<Path> files = Files.walk(path)
final List<Path> files = Files.walk(path)
.filter(Files::isRegularFile)
.collect(Collectors.toList());

long totalBytes = files.stream()
final long totalBytes = files.stream()
.mapToLong(file -> {
try {
return Files.size(file);
} catch (IOException e) {
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
})
.sum();
AtomicLong processedBytesTotal = new AtomicLong(0);
AtomicLong processedBytesSincePrint = new AtomicLong(0);
AtomicLong timeStart = new AtomicLong(System.nanoTime());
ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
final AtomicLong processedBytesTotal = new AtomicLong(0);
final AtomicLong processedBytesSincePrint = new AtomicLong(0);
final AtomicLong timeStart = new AtomicLong(System.nanoTime());
final ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
final long nanosPerSecond = 1_000_000_000L;
Runnable statPrinter = () -> {
AtomicLong timeEnd = new AtomicLong(System.nanoTime());
long timeDiff = timeEnd.get() - timeStart.get();
final Runnable statPrinter = () -> {
final AtomicLong timeEnd = new AtomicLong(System.nanoTime());
final long timeDiff = timeEnd.get() - timeStart.get();
if (timeDiff < nanosPerSecond) {
return;
}
timeStart.set(timeEnd.get());
long bytesPerSecond = processedBytesSincePrint.get() / (timeDiff / nanosPerSecond);
long bytesLeft = totalBytes - processedBytesTotal.get();
long secondsLeft = bytesLeft / (bytesPerSecond == 0 ? 1 : bytesPerSecond);
final long bytesPerSecond = processedBytesSincePrint.get() / (timeDiff / nanosPerSecond);
final long bytesLeft = totalBytes - processedBytesTotal.get();
final long secondsLeft = bytesLeft / (bytesPerSecond == 0 ? 1 : bytesPerSecond);

System.out.printf("\t%12s/s, %12s ETC, %12s processed, %12s total\r", bytesToReadable(bytesPerSecond),
secondsToReadable(secondsLeft), bytesToReadable(processedBytesTotal.get()),
bytesToReadable(totalBytes));
System.out.printf("\t%12s/s, %12s ETC, %12s processed, %12s total\r",
PatchSummary.bytesToReadable(bytesPerSecond), PatchSummary.secondsToReadable(secondsLeft),
PatchSummary.bytesToReadable(processedBytesTotal.get()),
PatchSummary.bytesToReadable(totalBytes));

processedBytesSincePrint.set(0);
};
var statPrintTask = service.scheduleAtFixedRate(statPrinter, 0, 1, TimeUnit.SECONDS);
final var statPrintTask = service.scheduleAtFixedRate(statPrinter, 0, 1, TimeUnit.SECONDS);

files.parallelStream()
.filter(Files::isRegularFile)
Expand All @@ -155,38 +156,40 @@ private static void chunkPath(final Chunker chunker, final Path path, final Cons
}));
statPrintTask.cancel(false);
service.shutdown();
} catch (IOException e) {
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
}

private static void executePatchSummary(final String description, final Chunker chunker, final Path previousBuild,
final Path currentBuild) {
final List<ChunkMetadata> previousChunks = Collections.synchronizedList(new ArrayList<>());
chunkPath(chunker, previousBuild, chunk -> previousChunks.add(chunk.toChunkMetadata()));
PatchSummary.chunkPath(chunker, previousBuild, chunk -> previousChunks.add(chunk.toChunkMetadata()));
final BuildSummary previousBuildSummary = new BuildSummary(previousChunks);

final List<ChunkMetadata> currentChunks = Collections.synchronizedList(new ArrayList<>());
chunkPath(chunker, currentBuild, chunk -> currentChunks.add(chunk.toChunkMetadata()));
PatchSummary.chunkPath(chunker, currentBuild, chunk -> currentChunks.add(chunk.toChunkMetadata()));
final BuildSummary currentBuildSummary = new BuildSummary(currentChunks);

final PatchSummary summary = new PatchSummary(previousBuildSummary, currentBuildSummary);
System.out.println("==== " + description);
System.out.printf("%-25s %12s total size, %12d total chunks, %12s unique size, %12d unique chunks%n",
"Build summary previous:", bytesToReadable(previousBuildSummary.getTotalSize()),
previousBuildSummary.getTotalChunksCount(), bytesToReadable(previousBuildSummary.getTotalUniqueSize()),
"Build summary previous:", PatchSummary.bytesToReadable(previousBuildSummary.getTotalSize()),
previousBuildSummary.getTotalChunksCount(),
PatchSummary.bytesToReadable(previousBuildSummary.getTotalUniqueSize()),
previousBuildSummary.getUniqueChunksCount());
System.out.printf("%-25s %12s total size, %12d total chunks, %12s unique size, %12d unique chunks%n",
"Build summary current:", bytesToReadable(currentBuildSummary.getTotalSize()),
currentBuildSummary.getTotalChunksCount(), bytesToReadable(currentBuildSummary.getTotalUniqueSize()),
"Build summary current:", PatchSummary.bytesToReadable(currentBuildSummary.getTotalSize()),
currentBuildSummary.getTotalChunksCount(),
PatchSummary.bytesToReadable(currentBuildSummary.getTotalUniqueSize()),
currentBuildSummary.getUniqueChunksCount());
System.out.printf("%-25s %12s average chunk size, %12.2f%% deduplication ratio%n", "Build metrics previous:",
bytesToReadable(previousBuildSummary.getAverageChunkSize()),
PatchSummary.bytesToReadable(previousBuildSummary.getAverageChunkSize()),
previousBuildSummary.getDeduplicationRatio());
System.out.printf("%-25s %12s average chunk size, %12.2f%% deduplication ratio%n", "Build metrics current:",
bytesToReadable(currentBuildSummary.getAverageChunkSize()),
PatchSummary.bytesToReadable(currentBuildSummary.getAverageChunkSize()),
currentBuildSummary.getDeduplicationRatio());
System.out.printf("%-25s %12s%n", "Patch size:", bytesToReadable(summary.getPatchSize()));
System.out.printf("%-25s %12s%n", "Patch size:", PatchSummary.bytesToReadable(summary.getPatchSize()));
System.out.printf("%-25s %12d%n", "Chunks to add:", summary.getChunksToAdd()
.size());
System.out.printf("%-25s %12d%n", "Chunks to remove:", summary.getChunksToRemove()
Expand All @@ -198,26 +201,26 @@ private static void executePatchSummary(final String description, final Chunker
System.out.println();
}

private static String secondsToReadable(long seconds) {
StringBuilder sb = new StringBuilder();
private static String secondsToReadable(final long seconds) {
final StringBuilder sb = new StringBuilder();
boolean entered = false;
Duration time = Duration.ofSeconds(seconds);
final Duration time = Duration.ofSeconds(seconds);

long days = time.toDays();
final long days = time.toDays();
if (days != 0) {
sb.append(days)
.append("d ");
entered = true;
}

int hours = time.toHoursPart();
final int hours = time.toHoursPart();
if (hours != 0 || entered) {
sb.append(hours)
.append("h ");
entered = true;
}

int minutes = time.toMinutesPart();
final int minutes = time.toMinutesPart();
if (minutes != 0 || entered) {
sb.append(minutes)
.append("m ");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public interface Chunk {
String getHexHash();

/**
* Converts this chunk to its corresponding metadata
* Converts this chunk to its corresponding metadata.
*
* @return The metadata
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
*
* @author Daniel Tischner {@literal <zabuza.dev@gmail.com>}
*/
@SuppressWarnings("ClassWithTooManyFields")
public final class ChunkerBuilder {
/**
* The default expected size of chunks, in bytes, used by all chunkers.
Expand Down Expand Up @@ -159,11 +160,13 @@ public Chunker build() {

final MaskGenerator maskGenerator =
new MaskGenerator(maskOption, normalizationLevel, expectedChunkSize, maskGenerationSeed);
long maskSmallToUse = maskSmall != null ? maskSmall : maskGenerator.generateSmallMask();
long maskLargeToUse = maskLarge != null ? maskLarge : maskGenerator.generateLargeMask();
final long maskSmallToUse = maskSmall != null ? maskSmall : maskGenerator.generateSmallMask();
final long maskLargeToUse = maskLarge != null ? maskLarge : maskGenerator.generateLargeMask();

int minimalChunkSize = (int) (expectedChunkSize * minimalChunkSizeFactor);
int maximalChunkSize = (int) (expectedChunkSize * maximalChunkSizeFactor);
//noinspection NumericCastThatLosesPrecision
final int minimalChunkSize = (int) (expectedChunkSize * minimalChunkSizeFactor);
//noinspection NumericCastThatLosesPrecision
final int maximalChunkSize = (int) (expectedChunkSize * maximalChunkSizeFactor);

final IterativeStreamChunkerCore coreToUse = chunkerCore != null ? chunkerCore : switch (chunkerOption) {
case FAST_CDC -> new FastCdcChunkerCore(expectedChunkSize, minimalChunkSize, maximalChunkSize,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ public final class FastCdcChunkerCore implements IterativeStreamChunkerCore {
* @param maskLarge Mask for the fingerprint that is used for bigger windows, to increase the likelihood of a
* split
*/
@SuppressWarnings("ConstructorWithTooManyParameters")
public FastCdcChunkerCore(final int expectedSize, final int minSize, final int maxSize, final long[] gear,
final long maskSmall, final long maskLarge) {
this.expectedSize = expectedSize;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public enum HashTables {
*/
@SuppressWarnings("OverlyLargePrimitiveArrayInitializer")
private static final long[] RTPAL =
{0x5a16b18f2aac863eL, 0x05fad735784f09eaL, 0x355c6a3868fe64afL, 0x57df89c95716c702L, 0x46ea7572135544a6L,
{ 0x5a16b18f2aac863eL, 0x05fad735784f09eaL, 0x355c6a3868fe64afL, 0x57df89c95716c702L, 0x46ea7572135544a6L,
0x6291d5376cd79d73L, 0x2a6e072b609b0bbfL, 0x110f7f895ec438b7L, 0x2fc580f60659f690L,
0x15ce33c924a8880bL, 0x1f3fabc44c091f5fL, 0x76e7512d0f53c142L, 0x30ff6d65448b44b3L,
0x16db576e7ecfe3c9L, 0x7009bea841de2e20L, 0x0ad460d80f3fe181L, 0x0a1e6fed6ece42dbL,
Expand Down Expand Up @@ -76,13 +76,13 @@ public enum HashTables {
0x45e90bc2494ad436L, 0x5291bcf62f0b6bdbL, 0x72ea193619f06853L, 0x5a5a2bd77114b311L,
0x5445faa82e02e158L, 0x0065712926726beaL, 0x1bed3b9a62fbf757L, 0x1767b815257b83d4L,
0x000eab4e77327b81L, 0x0fd333301966ff16L, 0x6780eb8339b83286L, 0x7652a5e647799673L,
0x43c0db665e364315L, 0x6fe4fe01606d405dL, 0x6833dbd876b03920L};
0x43c0db665e364315L, 0x6fe4fe01606d405dL, 0x6833dbd876b03920L };
/**
* Table used by NlFielder-Rust.
*/
@SuppressWarnings("OverlyLargePrimitiveArrayInitializer")
private static final long[] NLFIEDLER_RUST =
{0x5c95c078L, 0x22408989L, 0x2d48a214L, 0x12842087L, 0x530f8afbL, 0x474536b9L, 0x2963b4f1L, 0x44cb738bL,
{ 0x5c95c078L, 0x22408989L, 0x2d48a214L, 0x12842087L, 0x530f8afbL, 0x474536b9L, 0x2963b4f1L, 0x44cb738bL,
0x4ea7403dL, 0x4d606b6eL, 0x074ec5d3L, 0x3af39d18L, 0x726003caL, 0x37a62a74L, 0x51a2f58eL,
0x7506358eL, 0x5d4ab128L, 0x4d4ae17bL, 0x41e85924L, 0x470c36f7L, 0x4741cbe1L, 0x01bb7f30L,
0x617c1de3L, 0x2b0c3a1fL, 0x50c48f73L, 0x21a82d37L, 0x6095ace0L, 0x419167a0L, 0x3caf49b0L,
Expand Down Expand Up @@ -118,7 +118,7 @@ public enum HashTables {
0x0e93e12bL, 0x64b2791dL, 0x440d2476L, 0x588ea8ddL, 0x4665a658L, 0x7446c418L, 0x1877a774L,
0x5626407eL, 0x7f63bd46L, 0x32d2dbd8L, 0x3c790f4aL, 0x772b7239L, 0x6f8b2826L, 0x677ff609L,
0x0dc82c11L, 0x23ffe354L, 0x2eac53a6L, 0x16139e09L, 0x0afd0dbcL, 0x2a4d4237L, 0x56a368c7L,
0x234325e4L, 0x2dce9187L, 0x32e8ea7eL};
0x234325e4L, 0x2dce9187L, 0x32e8ea7eL };

/**
* Gets the table used by RTPal.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public final class MaskGenerator {
private static long generateMaskFastCdc(final int effectiveBits, final long seed) {
// Shuffle a mask with 'effectiveBits' 1s and fill up the rest with '0'
// The most significant bit has to be 1 always, hence we only shuffle the rest
List<Integer> maskBits = new ArrayList<>();
final List<Integer> maskBits = new ArrayList<>();
int i = 0;
while (i < effectiveBits - 1) {
maskBits.add(1);
Expand All @@ -44,7 +44,7 @@ private static long generateMaskFastCdc(final int effectiveBits, final long seed
}
Collections.shuffle(maskBits, new Random(seed));

String mask = Stream.concat(Stream.of(1), maskBits.stream())
final String mask = Stream.concat(Stream.of(1), maskBits.stream())
.map(Object::toString)
.collect(Collectors.joining());

Expand All @@ -58,7 +58,7 @@ private static long generateMaskFastCdc(final int effectiveBits, final long seed
*
* @return The generated mask
*/
private static long generateMaskNlfiedlerRust(int bits) {
private static long generateMaskNlfiedlerRust(final int bits) {
return Long.parseLong("1".repeat(bits), 2);
}

Expand All @@ -69,7 +69,7 @@ private static long generateMaskNlfiedlerRust(int bits) {
*
* @return The amount of effective bits to use
*/
private static int getEffectiveBits(int expectedChunkSize) {
private static int getEffectiveBits(final int expectedChunkSize) {
return Util.log2(expectedChunkSize);
}

Expand Down Expand Up @@ -131,11 +131,11 @@ public long generateSmallMask() {
*
* @return The generated mask
*/
private long generateMask(int effectiveBitOffset) {
int effectiveBits = getEffectiveBits(expectedChunkSize) + effectiveBitOffset;
private long generateMask(final int effectiveBitOffset) {
final int effectiveBits = MaskGenerator.getEffectiveBits(expectedChunkSize) + effectiveBitOffset;
return switch (maskOption) {
case FAST_CDC -> generateMaskFastCdc(effectiveBits, seed);
case NLFIEDLER_RUST -> generateMaskNlfiedlerRust(effectiveBits);
case FAST_CDC -> MaskGenerator.generateMaskFastCdc(effectiveBits, seed);
case NLFIEDLER_RUST -> MaskGenerator.generateMaskNlfiedlerRust(effectiveBits);
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ public final class NlfiedlerRustChunkerCore implements IterativeStreamChunkerCor
* @param maskLarge Mask for the fingerprint that is used for bigger windows, to increase the likelihood of a
* split
*/
@SuppressWarnings("ConstructorWithTooManyParameters")
public NlfiedlerRustChunkerCore(final int expectedSize, final int minSize, final int maxSize, final long[] gear,
final long maskSmall, final long maskLarge) {
this.expectedSize = expectedSize;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
* @param <X> The type contained in the outer iterator, either already an iterator or used to produce iterators using a
* given function
* @param <Y> The type contained in the inner iterators, also the final type contained in this iterator
*
* @author Daniel Tischner {@literal <zabuza.dev@gmail.com>}
*/
public final class FlatIterator<X, Y> implements Iterator<Y> {
/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@ public enum Util {
* All characters available in the hexadecimal-system, as UTF-8 encoded array.
*/
private static final byte[] HEX_ARRAY = "0123456789ABCDEF".getBytes(StandardCharsets.UTF_8);
/**
* Small delta to counter floating point issues when rounding down would accidentally round down a full integer to
* much.
*/
private static final double FLOATING_DELTA = 1.0e-12;

/**
* Creates a hexadecimal representation of the given binary data.
Expand Down Expand Up @@ -63,14 +68,14 @@ public static byte[] hash(final String method, final byte[] data) {
*
* @return The log2 of the given value
*/
public static int log2(int x) {
public static int log2(final int x) {
if (x >= 0) {
// Safe binary-only conversion without floating points
return Integer.bitCount(Integer.highestOneBit(x) - 1);
}
// Adding epsilon to mitigate floating point errors, see https://stackoverflow.com/a/3305400/2411243
return (int) (Math.log(x) / Math.log(2) + 1e-12);

//noinspection NumericCastThatLosesPrecision
return (int) (Math.log(x) / Math.log(2) + Util.FLOATING_DELTA);

}
}

0 comments on commit 3b11561

Please sign in to comment.