diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java index 4611df765f990..84df75f7cce32 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java @@ -723,7 +723,7 @@ private void addReplicaToReplicasMap(Block block, ReplicaMap volumeMap, } } - + private AtomicLong scanBlockCnt = new AtomicLong(0L); /** * Add replicas under the given directory to the volume map * @param volumeMap the replicas map @@ -749,6 +749,11 @@ void addToReplicasMap(ReplicaMap volumeMap, File dir, lazyWriteReplicaMap, isFinalized, exceptions, subTaskQueue); subTask.fork(); subTaskQueue.add(subTask); + } else { + long num = scanBlockCnt.get(); + if (num > 0 && num % 10000 == 0) { + LOG.info("Scan process: " + num + " blocks have been scanned."); + } } if (isFinalized && FsDatasetUtil.isUnlinkTmpFile(file)) { @@ -768,6 +773,7 @@ void addToReplicasMap(ReplicaMap volumeMap, File dir, Block block = new Block(blockId, file.length(), genStamp); addReplicaToReplicasMap(block, volumeMap, lazyWriteReplicaMap, isFinalized); + scanBlockCnt.incrementAndGet(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java index 5468473d9de0b..e68845a6fcd32 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java @@ -2102,4 +2102,28 @@ public void delayGetMetaDataInputStream() { DataNodeFaultInjector.set(oldDnInjector); } } + + /** + * My patch only adds a log print. Therefore, I cannot write assert judgments. + * I can only verify the normal function of addVolume. + */ + @Test + public void testLog4AddToReplicasMap() throws IOException { + List nsInfos = Lists.newArrayList(); + for (String bpid : BLOCK_POOL_IDS) { + nsInfos.add(new NamespaceInfo(0, CLUSTER_ID, bpid, 1)); + } + String path = BASE_DIR + "/newData0"; + String pathUri = new Path(path).toUri().toString(); + StorageLocation loc = StorageLocation.parse(pathUri); + Storage.StorageDirectory sd = createStorageDirectory( + new File(path), conf); + DataStorage.VolumeBuilder builder = + new DataStorage.VolumeBuilder(storage, sd); + when(storage.prepareVolume(eq(datanode), eq(loc), + anyList())) + .thenReturn(builder); + dataset.addVolume(loc, nsInfos); + } + }