From 4f6f65cc09fadbca84e2f56b99555255c7bf13ea Mon Sep 17 00:00:00 2001 From: Min Zhao Date: Thu, 3 Jun 2021 19:27:22 +0800 Subject: [PATCH 1/9] HADOOP-17742. fix distcp fail when copying to ftp filesystem --- .../apache/hadoop/fs/ftp/FTPFileSystem.java | 8 --- .../contract/ftp/TestFTPContractRename.java | 66 ------------------- .../apache/hadoop/tools/DistCpConstants.java | 4 ++ .../hadoop/tools/mapred/CopyCommitter.java | 3 +- .../mapred/RetriableFileCopyCommand.java | 15 +++-- .../apache/hadoop/tools/util/DistCpUtils.java | 16 +++++ 6 files changed, 30 insertions(+), 82 deletions(-) delete mode 100644 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index 28db2c9a1a227..305c5e1690a1c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -72,8 +72,6 @@ public class FTPFileSystem extends FileSystem { public static final String FS_FTP_DATA_CONNECTION_MODE = "fs.ftp.data.connection.mode"; public static final String FS_FTP_TRANSFER_MODE = "fs.ftp.transfer.mode"; - public static final String E_SAME_DIRECTORY_ONLY = - "only same directory renames are supported"; public static final String FS_FTP_TIMEOUT = "fs.ftp.timeout"; private URI uri; @@ -686,17 +684,11 @@ private boolean rename(FTPClient client, Path src, Path dst) + " already exists"); } String parentSrc = absoluteSrc.getParent().toUri().toString(); - String parentDst = absoluteDst.getParent().toUri().toString(); if (isParentOf(absoluteSrc, absoluteDst)) { throw new IOException("Cannot rename " + absoluteSrc + " under itself" + " : "+ absoluteDst); } - if (!parentSrc.equals(parentDst)) { - throw new IOException("Cannot rename source: " + absoluteSrc - + " to " + absoluteDst - + " -"+ E_SAME_DIRECTORY_ONLY); - } String from = absoluteSrc.getName(); String to = absoluteDst.getName(); client.changeWorkingDirectory(parentSrc); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java deleted file mode 100644 index fb8718bf24648..0000000000000 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.ftp; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractRenameTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.ftp.FTPFileSystem; - -import java.io.IOException; - -public class TestFTPContractRename extends AbstractContractRenameTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new FTPContract(conf); - } - - /** - * Check the exception was about cross-directory renames - * -if not, rethrow it. - * @param e exception raised - * @throws IOException - */ - private void verifyUnsupportedDirRenameException(IOException e) throws IOException { - if (!e.toString().contains(FTPFileSystem.E_SAME_DIRECTORY_ONLY)) { - throw e; - } - } - - @Override - public void testRenameDirIntoExistingDir() throws Throwable { - try { - super.testRenameDirIntoExistingDir(); - fail("Expected a failure"); - } catch (IOException e) { - verifyUnsupportedDirRenameException(e); - } - } - - @Override - public void testRenameFileNonexistentDir() throws Throwable { - try { - super.testRenameFileNonexistentDir(); - fail("Expected a failure"); - } catch (IOException e) { - verifyUnsupportedDirRenameException(e); - } - } -} diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java index 25815687c2973..a02c408aeb377 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java @@ -191,4 +191,8 @@ private DistCpConstants() { public static final String CLASS_INSTANTIATION_ERROR_MSG = "Unable to instantiate "; + + public static final String TARGET_TEMP_FILE_PREFIX_COMMA = ".distcp.tmp."; + + public static final String TARGET_TEMP_FILE_PREFIX = "distcp.tmp."; } diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java index 139bd08fd7abc..aa3c9d996a933 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java @@ -170,8 +170,9 @@ private void deleteAttemptTempFiles(Path targetWorkPath, return; } + String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(targetFS); FileStatus[] tempFiles = targetFS.globStatus( - new Path(targetWorkPath, ".distcp.tmp." + jobId.replaceAll("job","attempt") + "*")); + new Path(targetWorkPath, tempFilePrefix + jobId.replaceAll("job","attempt") + "*")); if (tempFiles != null && tempFiles.length > 0) { for (FileStatus file : tempFiles) { diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java index cde160c965485..818896bd6ac8a 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java @@ -129,16 +129,15 @@ private long doCopy(CopyListingFileStatus source, Path target, throws IOException { LOG.info("Copying {} to {}", source.getPath(), target); + final Configuration configuration = context.getConfiguration(); + FileSystem targetFS = target.getFileSystem(configuration); final boolean toAppend = action == FileAction.APPEND; final boolean useTempTarget = !toAppend && !directWrite; - Path targetPath = useTempTarget ? getTempFile(target, context) : target; + Path targetPath = useTempTarget ? getTempFile(target, context, targetFS) : target; LOG.info("Writing to {} target file path {}", useTempTarget ? "temporary" : "direct", targetPath); - final Configuration configuration = context.getConfiguration(); - FileSystem targetFS = target.getFileSystem(configuration); - try { final Path sourcePath = source.getPath(); final FileSystem sourceFS = sourcePath.getFileSystem(configuration); @@ -161,6 +160,7 @@ private long doCopy(CopyListingFileStatus source, Path target, if (useTempTarget) { LOG.info("Renaming temporary target file path {} to {}", targetPath, target); + target = new Path(target.toUri().getPath()); promoteTmpToTarget(targetPath, target, targetFS); } LOG.info("Completed writing {} ({} bytes)", target, bytesRead); @@ -257,17 +257,18 @@ private void promoteTmpToTarget(Path tmpTarget, Path target, FileSystem fs) } } - private Path getTempFile(Path target, Mapper.Context context) { + private Path getTempFile(Path target, Mapper.Context context, FileSystem fileSystem) { Path targetWorkPath = new Path(context.getConfiguration(). get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH)); Path root = target.equals(targetWorkPath) ? targetWorkPath.getParent() : targetWorkPath; - Path tempFile = new Path(root, ".distcp.tmp." + + String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(fileSystem); + Path tempFile = new Path(root, tempFilePrefix + context.getTaskAttemptID().toString() + "." + String.valueOf(System.currentTimeMillis())); LOG.info("Creating temp file: {}", tempFile); - return tempFile; + return new Path(tempFile.toUri().getPath()); } @VisibleForTesting diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java index 9c1666b658c70..345a485ee1e0f 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java @@ -662,4 +662,20 @@ public static Path getSplitChunkPath(Path targetFile, + ".____distcpSplit____" + srcFileStatus.getChunkOffset() + "." + srcFileStatus.getChunkLength()); } + + /** + * Return the target temp file prefix + * + * The FTPFilesystem can't work well when the file name is starts with comma. + * + * @param fileSystem target filesystem + * @return temp file path prefix + */ + public static String getTargetTempFilePrefix(FileSystem fileSystem) { + if (StringUtils.equalsIgnoreCase(fileSystem.getScheme(), "ftp")) { + return DistCpConstants.TARGET_TEMP_FILE_PREFIX; + } else { + return DistCpConstants.TARGET_TEMP_FILE_PREFIX_COMMA; + } + } } From 74550a42bfe47d06898f05eec11907715134e103 Mon Sep 17 00:00:00 2001 From: July <51110188+yikf@users.noreply.github.com> Date: Sat, 5 Jun 2021 04:36:09 +0800 Subject: [PATCH 2/9] HDFS-16033 Fix issue of the StatisticsDataReferenceCleaner cleanUp (#3042) Contributed by kaifeiYi (yikf). Signed-off-by: Mingliang Liu Reviewed-by: Steve Loughran --- .../src/main/java/org/apache/hadoop/fs/FileSystem.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index c6cf941cee7ab..057382bed9cde 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -4004,12 +4004,19 @@ public void cleanUp() { * Background action to act on references being removed. */ private static class StatisticsDataReferenceCleaner implements Runnable { + /** + * Represents the timeout period expires for remove reference objects from + * the STATS_DATA_REF_QUEUE when the queue is empty. + */ + private static final int REF_QUEUE_POLL_TIMEOUT = 10000; + @Override public void run() { while (!Thread.interrupted()) { try { StatisticsDataReference ref = - (StatisticsDataReference)STATS_DATA_REF_QUEUE.remove(); + (StatisticsDataReference)STATS_DATA_REF_QUEUE. + remove(REF_QUEUE_POLL_TIMEOUT); ref.cleanUp(); } catch (InterruptedException ie) { LOGGER.warn("Cleaner thread interrupted, will stop", ie); From eda478bb9ce3476de94607e0b54de8d88091318c Mon Sep 17 00:00:00 2001 From: Hideyuki Furue Date: Wed, 2 Jun 2021 19:56:38 +0900 Subject: [PATCH 3/9] Fix container-executor Signed-off-by: Akira Ajisaka --- .../impl/container-executor.c | 130 +++++++++++++----- .../native/container-executor/impl/main.c | 4 +- .../container-executor/impl/runc/runc_reap.c | 6 +- .../native/container-executor/impl/util.c | 10 +- .../impl/utils/docker-util.c | 4 +- .../impl/utils/string-utils.c | 1 + .../test/test-container-executor.c | 41 ++++++ 7 files changed, 158 insertions(+), 38 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c index d69acf33abe10..ab03d3991dc7f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c @@ -49,6 +49,10 @@ #include #include #include +#ifdef __linux +#include +#include +#endif #ifndef HAVE_FCHMODAT #include "compat/fchmodat.h" @@ -231,6 +235,19 @@ static int write_pid_to_cgroup_as_root(const char* cgroup_file, pid_t pid) { goto cleanup; } + // statfs + struct statfs buf; + if (statfs(cgroup_file, &buf) == -1) { + fprintf(LOGFILE, "Can't statfs file %s as node manager - %s\n", cgroup_file, + strerror(errno)); + rc = -1; + goto cleanup; + } else if (buf.f_type != CGROUP_SUPER_MAGIC) { + fprintf(LOGFILE, "Pid file %s is not located on cgroup filesystem\n", cgroup_file); + rc = -1; + goto cleanup; + } + // open int cgroup_fd = open(cgroup_file, O_WRONLY | O_APPEND, 0); if (cgroup_fd == -1) { @@ -524,27 +541,16 @@ int is_runc_support_enabled() { /** * Utility function to concatenate argB to argA using the concat_pattern. + * For historical reasons, redundant argument numArgs exists. */ char *concatenate(char *concat_pattern, char *return_path_name, int numArgs, ...) { va_list ap; va_start(ap, numArgs); - int strlen_args = 0; - char *arg = NULL; - int j; - for (j = 0; j < numArgs; j++) { - arg = va_arg(ap, char*); - if (arg == NULL) { - fprintf(LOGFILE, "One of the arguments passed for %s is null.\n", - return_path_name); - return NULL; - } - strlen_args += strlen(arg); - } + int str_len = vsnprintf(NULL, 0, concat_pattern, ap) + 1; va_end(ap); char *return_path = NULL; - int str_len = strlen(concat_pattern) + strlen_args + 1; return_path = (char *) malloc(str_len); if (return_path == NULL) { @@ -785,6 +791,10 @@ static int create_container_directories(const char* user, const char *app_id, if (container_dir == NULL) { return OUT_OF_MEMORY; } + if (strstr(container_dir, "..") != 0) { + fprintf(LOGFILE, "Unsupported container directory path detected.\n"); + return COULD_NOT_CREATE_WORK_DIRECTORIES; + } if (mkdirs(container_dir, perms) == 0) { result = 0; } @@ -812,19 +822,26 @@ static int create_container_directories(const char* user, const char *app_id, char *container_log_dir = get_app_log_directory(*log_dir_ptr, combined_name); int check = check_nm_local_dir(nm_uid, *log_dir_ptr); if (check != 0) { - container_log_dir = NULL; - } - if (strstr(container_log_dir, "..") != 0) { - fprintf(LOGFILE, "Unsupported container log directory path detected.\n"); - container_log_dir = NULL; + free(container_log_dir); + free(combined_name); + return COULD_NOT_CREATE_APP_LOG_DIRECTORIES; } if (container_log_dir == NULL) { free(combined_name); return OUT_OF_MEMORY; + } + if (strstr(container_log_dir, "..") != 0) { + fprintf(LOGFILE, "Unsupported container log directory path detected.\n"); + free(container_log_dir); + free(combined_name); + return COULD_NOT_CREATE_APP_LOG_DIRECTORIES; } else if (mkdirs(container_log_dir, logdir_perms) != 0) { free(container_log_dir); } else { result = 0; + if (chosen_container_log_dir != NULL) { + free(chosen_container_log_dir); + } chosen_container_log_dir = strdup(container_log_dir); free(container_log_dir); } @@ -845,6 +862,12 @@ static int create_container_directories(const char* user, const char *app_id, return OUT_OF_MEMORY; } + if (strstr(tmp_dir, "..") != 0 || strstr(private_tmp_dir, "..") != 0 || strstr(private_var_tmp_dir, "..") != 0) { + fprintf(ERRORFILE, "Unsupported tmp directory path detected.\n"); + result = COULD_NOT_CREATE_TMP_DIRECTORIES; + goto cleanup; + } + if (mkdirs(tmp_dir, perms) != 0) { fprintf(ERRORFILE, "Could not create tmp_dir: %s\n", tmp_dir); result = COULD_NOT_CREATE_TMP_DIRECTORIES; @@ -857,7 +880,7 @@ static int create_container_directories(const char* user, const char *app_id, goto cleanup; } - // clear group sticky bit on private_tmp_dir + // clear setgid bit on private_tmp_dir if (chmod(private_tmp_dir, perms) != 0) { fprintf(ERRORFILE, "Could not chmod private_tmp_dir: %s\n", private_tmp_dir); result = COULD_NOT_CREATE_TMP_DIRECTORIES; @@ -870,7 +893,7 @@ static int create_container_directories(const char* user, const char *app_id, goto cleanup; } - // clear group sticky bit on private_tmp_dir + // clear setgid bit on private_tmp_dir if (chmod(private_var_tmp_dir, perms) != 0) { fprintf(ERRORFILE, "Could not chmod private_var_tmp_dir: %s\n", private_var_tmp_dir); result = COULD_NOT_CREATE_TMP_DIRECTORIES; @@ -1053,7 +1076,7 @@ static int change_owner(const char* path, uid_t user, gid_t group) { * return non-0 on failure */ int create_directory_for_user(const char* path) { - // set 2750 permissions and group sticky bit + // set 750 permissions and setgid bit mode_t permissions = S_IRWXU | S_IRGRP | S_IXGRP | S_ISGID; uid_t user = geteuid(); gid_t group = getegid(); @@ -1066,13 +1089,13 @@ int create_directory_for_user(const char* path) { if (ret == 0) { if (0 == mkdir(path, permissions) || EEXIST == errno) { - // need to reassert the group sticky bit + // need to reassert the setgid bit if (change_owner(path, user, nm_gid) != 0) { fprintf(LOGFILE, "Failed to chown %s to %d:%d: %s\n", path, user, nm_gid, strerror(errno)); ret = -1; } else if (chmod(path, permissions) != 0) { - fprintf(LOGFILE, "Can't chmod %s to add the sticky bit - %s\n", + fprintf(LOGFILE, "Can't chmod %s to add the setgid bit - %s\n", path, strerror(errno)); ret = -1; } @@ -1212,6 +1235,11 @@ int initialize_user(const char *user, char* const* local_dirs) { fprintf(LOGFILE, "Couldn't get userdir directory for %s.\n", user); failed = 1; break; + // Avoid possible wrong validation. Username can contain double dots. + } else if (strstr(user_dir, "/../") != 0) { + fprintf(LOGFILE, "Unsupported userdir directory path detected.\n"); + failed = 1; + break; } if (create_directory_for_user(user_dir) != 0) { failed = 1; @@ -1233,6 +1261,9 @@ int create_log_dirs(const char *app_id, char * const * log_dirs) { } if (app_log_dir == NULL) { // try the next one + } else if (strstr(app_log_dir, "..") != 0) { + fprintf(LOGFILE, "Unsupported app-log directory path detected.\n"); + free(app_log_dir); } else if (create_directory_for_user(app_log_dir) != 0) { free(app_log_dir); return -1; @@ -1301,7 +1332,11 @@ int create_container_log_dirs(const char *container_id, const char *app_id, } int result = check_nm_local_dir(nm_uid, *log_root); - if (result != 0 && container_log_dir != NULL) { + if (result != 0) { + free(container_log_dir); + container_log_dir = NULL; + continue; + } else if (strstr(container_log_dir, "..") != 0) { fprintf(LOGFILE, "Unsupported container log directory path (%s) detected.\n", container_log_dir); free(container_log_dir); @@ -1346,6 +1381,9 @@ static char *create_app_dirs(const char *user, char *app_dir = get_app_directory(*nm_root, user, app_id); if (app_dir == NULL) { // try the next one + } else if (strstr(app_dir, "..") != 0) { + fprintf(LOGFILE, "Unsupported app directory path detected.\n"); + free(app_dir); } else if (mkdirs(app_dir, permissions) != 0) { free(app_dir); } else if (primary_app_dir == NULL) { @@ -1412,7 +1450,7 @@ int initialize_app(const char *user, const char *app_id, char *nmPrivate_credentials_file_copy = strdup(nmPrivate_credentials_file); // TODO: FIXME. The user's copy of creds should go to a path selected by - // localDirAllocatoir + // localDirAllocator char *cred_file_name = concatenate("%s/%s", "cred file", 2, primary_app_dir, basename(nmPrivate_credentials_file_copy)); if (cred_file_name == NULL) { @@ -1742,14 +1780,14 @@ int create_script_paths(const char *work_dir, int exit_code = -1; *script_file_dest = get_container_launcher_file(work_dir); - if (script_file_dest == NULL) { + if (*script_file_dest == NULL) { exit_code = OUT_OF_MEMORY; fprintf(ERRORFILE, "Could not create script_file_dest\n"); return exit_code; } *cred_file_dest = get_container_credentials_file(work_dir); - if (NULL == cred_file_dest) { + if (NULL == *cred_file_dest) { exit_code = OUT_OF_MEMORY; fprintf(ERRORFILE, "Could not create cred_file_dest\n"); return exit_code; @@ -1757,13 +1795,13 @@ int create_script_paths(const char *work_dir, if (https == 1) { *keystore_file_dest = get_container_keystore_file(work_dir); - if (NULL == keystore_file_dest) { + if (NULL == *keystore_file_dest) { exit_code = OUT_OF_MEMORY; fprintf(ERRORFILE, "Could not create keystore_file_dest\n"); return exit_code; } *truststore_file_dest = get_container_truststore_file(work_dir); - if (NULL == truststore_file_dest) { + if (NULL == *truststore_file_dest) { exit_code = OUT_OF_MEMORY; fprintf(ERRORFILE, "Could not create truststore_file_dest\n"); return exit_code; @@ -1917,6 +1955,12 @@ int create_user_filecache_dirs(const char * user, char* const* local_dirs) { rc = INITIALIZE_USER_FAILED; break; } + if (strstr(filecache_dir, "..") != 0) { + fprintf(LOGFILE, "Unsupported filecache directory path detected.\n"); + free(filecache_dir); + rc = INITIALIZE_USER_FAILED; + break; + } if (0 != mkdir(filecache_dir, permissions) && EEXIST != errno) { fprintf(LOGFILE, "Failed to create directory %s - %s\n", filecache_dir, strerror(errno)); @@ -1941,6 +1985,12 @@ int create_yarn_sysfs(const char* user, const char *app_id, return OUT_OF_MEMORY; } char *yarn_sysfs_dir = make_string("%s/%s", container_dir, "sysfs"); + if (strstr(yarn_sysfs_dir, "..") != 0) { + fprintf(LOGFILE, "Unsupported yarn sysfs directory path detected.\n"); + free(yarn_sysfs_dir); + free(container_dir); + return OUT_OF_MEMORY; + } if (mkdir(yarn_sysfs_dir, perms) == 0) { result = 0; } @@ -2062,7 +2112,6 @@ int launch_docker_container_as_user(const char * user, const char *app_id, if (exit_code != 0) { fprintf(ERRORFILE, "Could not create user yarn sysfs directory\n"); exit(-1); - goto cleanup; } docker_command = construct_docker_command(command_file); @@ -2096,6 +2145,12 @@ int launch_docker_container_as_user(const char * user, const char *app_id, docker_command_with_binary = flatten(docker_command); + if (docker_command_with_binary == NULL) { + fprintf (ERRORFILE, "Could not flatten docker command.\n"); + exit_code = UNABLE_TO_EXECUTE_CONTAINER_SCRIPT; + goto cleanup; + } + // Launch container pid_t child_pid = fork(); if (child_pid == -1) { @@ -2800,6 +2855,7 @@ int list_as_user(const char *target_dir) { strerror(errno)); ret = -1; } + closedir(dir); } else { fprintf(LOGFILE, "Could not open directory %s - %s\n", target_dir, strerror(errno)); @@ -2857,8 +2913,10 @@ int is_empty(char *target_dir) { continue; } fprintf(LOGFILE, "Directory is not empty %s\n", target_dir); + closedir(dir); return 0; } + closedir(dir); return 1; } @@ -2886,7 +2944,7 @@ int mount_cgroup(const char *pair, const char *hierarchy) { goto cleanup; } if (hierarchy == NULL || strstr(hierarchy, "..") != NULL) { - fprintf(LOGFILE, "Unsupported cgroup hierarhy path detected.\n"); + fprintf(LOGFILE, "Unsupported cgroup hierarchy path detected.\n"); result = INVALID_COMMAND_PROVIDED; goto cleanup; } @@ -2907,8 +2965,13 @@ int mount_cgroup(const char *pair, const char *hierarchy) { result = INVALID_COMMAND_PROVIDED; goto cleanup; } + if (strlen(mount_path) + strlen(hierarchy) + 2 > EXECUTOR_PATH_MAX) { + fprintf(LOGFILE, "cgroup hierarchy path is too long.\n"); + result = INVALID_COMMAND_PROVIDED; + goto cleanup; + } if (mount("none", mount_path, "cgroup", 0, controller) == 0) { - char *buf = stpncpy(hier_path, mount_path, strlen(mount_path)); + char *buf = stpncpy(hier_path, mount_path, EXECUTOR_PATH_MAX); *buf++ = '/'; snprintf(buf, EXECUTOR_PATH_MAX - (buf - hier_path), "%s", hierarchy); @@ -3083,6 +3146,9 @@ char* flatten(char **args) { total = total + strlen(args[i]) + 1; } char *buffer = (char *) malloc(total * sizeof(char)); + if (buffer == NULL) { + return NULL; + } char *to = NULL; to = buffer; for (int i = 0; args[i] != NULL; i++) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c index ff59b96d23362..1b91e8a3d6cd0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c @@ -68,8 +68,8 @@ static void display_usage(FILE *stream) { fprintf(stream, " container-executor \n" " where command and command-args: \n" \ - " initialize container: %2d appid tokens nm-local-dirs " - "nm-log-dirs cmd app...\n" + " initialize container: %2d appid containerid tokens nm-local-dirs " + "nm-log-dirs cmd...\n" " launch container: %2d appid containerid workdir " "container-script tokens http-option pidfile nm-local-dirs nm-log-dirs resources ", INITIALIZE_CONTAINER, LAUNCH_CONTAINER); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/runc/runc_reap.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/runc/runc_reap.c index b67c60492fe16..ff5329b179670 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/runc/runc_reap.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/runc/runc_reap.c @@ -446,6 +446,7 @@ static struct mntent* get_layer_mounts(size_t* num_entries_out, size_t num_entries = 0; size_t entries_capacity = num_entries_per_alloc; struct mntent* entries = malloc(sizeof(*entries) * entries_capacity); + struct mntent* new_entries; if (entries == NULL) { fputs("Unable to allocate memory\n", ERRORFILE); goto fail; @@ -484,11 +485,12 @@ static struct mntent* get_layer_mounts(size_t* num_entries_out, if (num_entries == entries_capacity) { entries_capacity += num_entries_per_alloc; - entries = realloc(entries, sizeof(*entries) * entries_capacity); - if (entries == NULL) { + new_entries = realloc(entries, sizeof(*entries) * entries_capacity); + if (new_entries == NULL) { fputs("Unable to allocate memory\n", ERRORFILE); goto fail; } + entries = new_entries; } if (!copy_mntent(entries + num_entries, me)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/util.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/util.c index 9567ccc001485..c8ee7b461e67b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/util.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/util.c @@ -25,6 +25,7 @@ char** split_delimiter(char *value, const char *delim) { char **return_values = NULL; + char **new_return_values; char *temp_tok = NULL; char *tempstr = NULL; int size = 0; @@ -60,8 +61,15 @@ char** split_delimiter(char *value, const char *delim) { // Make sure returned values has enough space for the trailing NULL. if (size >= return_values_size - 1) { return_values_size += per_alloc_size; - return_values = (char **) realloc(return_values,(sizeof(char *) * + new_return_values = (char **) realloc(return_values,(sizeof(char *) * return_values_size)); + if (!new_return_values) { + fprintf(ERRORFILE, "Reallocation error for return_values in %s.\n", + __func__); + failed = 1; + goto cleanup; + } + return_values = new_return_values; // Make sure new added memory are filled with NULL for (int i = size; i < return_values_size; i++) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/docker-util.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/docker-util.c index 8bc66b30f6446..b81468aa259d0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/docker-util.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/docker-util.c @@ -1385,7 +1385,9 @@ static char* get_docker_mount_options_string(mount_options *options) { return NULL; } - idx += sprintf(options_string, "%s", options->opts[0]); + if (options->num_opts > 0) { + idx += sprintf(options_string, "%s", options->opts[0]); + } for (i = 1; i < options->num_opts; i++) { idx += sprintf(options_string + idx, ",%s", options->opts[i]); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/string-utils.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/string-utils.c index 62d54a9ea62e4..e9e733d4ebadd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/string-utils.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/utils/string-utils.c @@ -181,6 +181,7 @@ char *make_string(const char *fmt, ...) { int ret = vsnprintf(buf, buflen, fmt, vargs); va_end(vargs); if (ret < 0) { + free(buf); buf = NULL; } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c index f75a5eafd69e0..f209ea53a0096 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c @@ -1322,6 +1322,46 @@ void test_trim_function() { free(trimmed); } +/** + * This test is used to verify that concatenate() works correctly + */ +void test_concatenate() { + char *concatenate(char *concat_pattern, char *return_path_name, int numArgs, ...); + printf("\nTesting concatenate function\n"); + + // numArgs: 0 + char *expected1 = "fixed1"; + char *actual1 = concatenate("fixed1", "test1", 0); + if (actual1 == NULL || strcmp(actual1, expected1) != 0) { + printf("FAIL: concatenate: test1: expected %s got %s\n", expected1, actual1); + exit(1); + } + + // numArgs: 1 + char *expected2 = "fixed1/var1"; + char *actual2 = concatenate("fixed1/%s", "test2", 1, "var1"); + if (actual2 == NULL || strcmp(actual2, expected2) != 0) { + printf("FAIL: concatenate: test2: expected %s got %s\n", expected2, actual2); + exit(1); + } + + // numArgs: 2 + char *expected3 = "fixed1/var1/fixed2/var2"; + char *actual3 = concatenate("fixed1/%s/fixed2/%s", "test3", 2, "var1", "var2"); + if (actual3 == NULL || strcmp(actual3, expected3) != 0) { + printf("FAIL: concatenate: test3: expected %s got %s\n", expected3, actual3); + exit(1); + } + + // concat_pattern with field width + char *expected4 = "[x ]"; + char *actual4 = concatenate("[%-10s]", "test4", 1, "x"); + if (actual4 == NULL || strcmp(actual4, expected4) != 0) { + printf("FAIL: concatenate: test4: expected %s got %s\n", expected4, actual4); + exit(1); + } +} + int is_empty(char *name); void test_is_empty() { @@ -1762,6 +1802,7 @@ int main(int argc, char **argv) { #endif test_trim_function(); + test_concatenate(); printf("\nFinished tests\n"); printf("\nAttempting to clean up from the run\n"); From f3eab2dbb65093d08c4711ce872bf5979a77bd12 Mon Sep 17 00:00:00 2001 From: Min Zhao Date: Thu, 3 Jun 2021 19:27:22 +0800 Subject: [PATCH 4/9] HADOOP-17742. fix distcp fail when copying to ftp filesystem --- .../apache/hadoop/fs/ftp/FTPFileSystem.java | 8 --- .../contract/ftp/TestFTPContractRename.java | 66 ------------------- .../apache/hadoop/tools/DistCpConstants.java | 4 ++ .../hadoop/tools/mapred/CopyCommitter.java | 3 +- .../mapred/RetriableFileCopyCommand.java | 15 +++-- .../apache/hadoop/tools/util/DistCpUtils.java | 16 +++++ 6 files changed, 30 insertions(+), 82 deletions(-) delete mode 100644 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index 6899bb8d87426..dd070189e311f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -72,8 +72,6 @@ public class FTPFileSystem extends FileSystem { public static final String FS_FTP_DATA_CONNECTION_MODE = "fs.ftp.data.connection.mode"; public static final String FS_FTP_TRANSFER_MODE = "fs.ftp.transfer.mode"; - public static final String E_SAME_DIRECTORY_ONLY = - "only same directory renames are supported"; public static final String FS_FTP_TIMEOUT = "fs.ftp.timeout"; private URI uri; @@ -686,17 +684,11 @@ private boolean rename(FTPClient client, Path src, Path dst) + " already exists"); } String parentSrc = absoluteSrc.getParent().toUri().toString(); - String parentDst = absoluteDst.getParent().toUri().toString(); if (isParentOf(absoluteSrc, absoluteDst)) { throw new IOException("Cannot rename " + absoluteSrc + " under itself" + " : "+ absoluteDst); } - if (!parentSrc.equals(parentDst)) { - throw new IOException("Cannot rename source: " + absoluteSrc - + " to " + absoluteDst - + " -"+ E_SAME_DIRECTORY_ONLY); - } String from = absoluteSrc.getName(); String to = absoluteDst.getName(); client.changeWorkingDirectory(parentSrc); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java deleted file mode 100644 index fb8718bf24648..0000000000000 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.contract.ftp; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.AbstractContractRenameTest; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.ftp.FTPFileSystem; - -import java.io.IOException; - -public class TestFTPContractRename extends AbstractContractRenameTest { - - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new FTPContract(conf); - } - - /** - * Check the exception was about cross-directory renames - * -if not, rethrow it. - * @param e exception raised - * @throws IOException - */ - private void verifyUnsupportedDirRenameException(IOException e) throws IOException { - if (!e.toString().contains(FTPFileSystem.E_SAME_DIRECTORY_ONLY)) { - throw e; - } - } - - @Override - public void testRenameDirIntoExistingDir() throws Throwable { - try { - super.testRenameDirIntoExistingDir(); - fail("Expected a failure"); - } catch (IOException e) { - verifyUnsupportedDirRenameException(e); - } - } - - @Override - public void testRenameFileNonexistentDir() throws Throwable { - try { - super.testRenameFileNonexistentDir(); - fail("Expected a failure"); - } catch (IOException e) { - verifyUnsupportedDirRenameException(e); - } - } -} diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java index c75c0e85dd791..21920d57bbe71 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java @@ -192,4 +192,8 @@ private DistCpConstants() { public static final String CLASS_INSTANTIATION_ERROR_MSG = "Unable to instantiate "; + + public static final String TARGET_TEMP_FILE_PREFIX_COMMA = ".distcp.tmp."; + + public static final String TARGET_TEMP_FILE_PREFIX = "distcp.tmp."; } diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java index 2272781f72476..cd15c468a8ac4 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java @@ -170,8 +170,9 @@ private void deleteAttemptTempFiles(Path targetWorkPath, return; } + String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(targetFS); FileStatus[] tempFiles = targetFS.globStatus( - new Path(targetWorkPath, ".distcp.tmp." + jobId.replaceAll("job","attempt") + "*")); + new Path(targetWorkPath, tempFilePrefix + jobId.replaceAll("job","attempt") + "*")); if (tempFiles != null && tempFiles.length > 0) { for (FileStatus file : tempFiles) { diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java index 9a72c9d7dbfde..c2b15ddc6c4af 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java @@ -129,16 +129,15 @@ private long doCopy(CopyListingFileStatus source, Path target, throws IOException { LOG.info("Copying {} to {}", source.getPath(), target); + final Configuration configuration = context.getConfiguration(); + FileSystem targetFS = target.getFileSystem(configuration); final boolean toAppend = action == FileAction.APPEND; final boolean useTempTarget = !toAppend && !directWrite; - Path targetPath = useTempTarget ? getTempFile(target, context) : target; + Path targetPath = useTempTarget ? getTempFile(target, context, targetFS) : target; LOG.info("Writing to {} target file path {}", useTempTarget ? "temporary" : "direct", targetPath); - final Configuration configuration = context.getConfiguration(); - FileSystem targetFS = target.getFileSystem(configuration); - try { final Path sourcePath = source.getPath(); final FileSystem sourceFS = sourcePath.getFileSystem(configuration); @@ -161,6 +160,7 @@ private long doCopy(CopyListingFileStatus source, Path target, if (useTempTarget) { LOG.info("Renaming temporary target file path {} to {}", targetPath, target); + target = new Path(target.toUri().getPath()); promoteTmpToTarget(targetPath, target, targetFS); } LOG.info("Completed writing {} ({} bytes)", target, bytesRead); @@ -257,17 +257,18 @@ private void promoteTmpToTarget(Path tmpTarget, Path target, FileSystem fs) } } - private Path getTempFile(Path target, Mapper.Context context) { + private Path getTempFile(Path target, Mapper.Context context, FileSystem fileSystem) { Path targetWorkPath = new Path(context.getConfiguration(). get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH)); Path root = target.equals(targetWorkPath) ? targetWorkPath.getParent() : targetWorkPath; - Path tempFile = new Path(root, ".distcp.tmp." + + String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(fileSystem); + Path tempFile = new Path(root, tempFilePrefix + context.getTaskAttemptID().toString() + "." + String.valueOf(System.currentTimeMillis())); LOG.info("Creating temp file: {}", tempFile); - return tempFile; + return new Path(tempFile.toUri().getPath()); } @VisibleForTesting diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java index 1af434e19f823..e074576193057 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java @@ -662,4 +662,20 @@ public static Path getSplitChunkPath(Path targetFile, + ".____distcpSplit____" + srcFileStatus.getChunkOffset() + "." + srcFileStatus.getChunkLength()); } + + /** + * Return the target temp file prefix + * + * The FTPFilesystem can't work well when the file name is starts with comma. + * + * @param fileSystem target filesystem + * @return temp file path prefix + */ + public static String getTargetTempFilePrefix(FileSystem fileSystem) { + if (StringUtils.equalsIgnoreCase(fileSystem.getScheme(), "ftp")) { + return DistCpConstants.TARGET_TEMP_FILE_PREFIX; + } else { + return DistCpConstants.TARGET_TEMP_FILE_PREFIX_COMMA; + } + } } From 38bd8a5e0717f1f22a6e91bc85c62c52a27d9c01 Mon Sep 17 00:00:00 2001 From: Min Zhao Date: Sun, 6 Jun 2021 23:48:40 +0800 Subject: [PATCH 5/9] HADOOP-17742. fix distcp fail when copying to ftp filesystem --- .../java/org/apache/hadoop/tools/DistCpConstants.java | 11 +++++++++-- .../org/apache/hadoop/tools/mapred/CopyCommitter.java | 5 +++-- .../hadoop/tools/mapred/RetriableFileCopyCommand.java | 11 ++++++----- .../org/apache/hadoop/tools/util/DistCpUtils.java | 9 +++++---- 4 files changed, 23 insertions(+), 13 deletions(-) diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java index 21920d57bbe71..9ee74814d4f37 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java @@ -193,7 +193,14 @@ private DistCpConstants() { public static final String CLASS_INSTANTIATION_ERROR_MSG = "Unable to instantiate "; - public static final String TARGET_TEMP_FILE_PREFIX_COMMA = ".distcp.tmp."; + /** + * The prefix of target temp file path. + */ + public static final String TARGET_TEMP_FILE_PREFIX_DOT = ".distcp.tmp."; - public static final String TARGET_TEMP_FILE_PREFIX = "distcp.tmp."; + /** + * FTP filesystem can't be work well when the path with dot prefix, so the target tmp file path use the prefix without + * dot. + */ + public static final String TARGET_TEMP_FILE_PREFIX_FTP = "distcp.tmp."; } diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java index cd15c468a8ac4..d4f35a3b4973f 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java @@ -170,9 +170,10 @@ private void deleteAttemptTempFiles(Path targetWorkPath, return; } - String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(targetFS); + String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(targetWorkPath); FileStatus[] tempFiles = targetFS.globStatus( - new Path(targetWorkPath, tempFilePrefix + jobId.replaceAll("job","attempt") + "*")); + new Path(targetWorkPath, tempFilePrefix + + jobId.replaceAll("job", "attempt") + "*")); if (tempFiles != null && tempFiles.length > 0) { for (FileStatus file : tempFiles) { diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java index c2b15ddc6c4af..3b82b90c830a5 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java @@ -129,15 +129,16 @@ private long doCopy(CopyListingFileStatus source, Path target, throws IOException { LOG.info("Copying {} to {}", source.getPath(), target); - final Configuration configuration = context.getConfiguration(); - FileSystem targetFS = target.getFileSystem(configuration); final boolean toAppend = action == FileAction.APPEND; final boolean useTempTarget = !toAppend && !directWrite; - Path targetPath = useTempTarget ? getTempFile(target, context, targetFS) : target; + Path targetPath = useTempTarget ? getTempFile(target, context) : target; LOG.info("Writing to {} target file path {}", useTempTarget ? "temporary" : "direct", targetPath); + final Configuration configuration = context.getConfiguration(); + FileSystem targetFS = target.getFileSystem(configuration); + try { final Path sourcePath = source.getPath(); final FileSystem sourceFS = sourcePath.getFileSystem(configuration); @@ -257,13 +258,13 @@ private void promoteTmpToTarget(Path tmpTarget, Path target, FileSystem fs) } } - private Path getTempFile(Path target, Mapper.Context context, FileSystem fileSystem) { + private Path getTempFile(Path target, Mapper.Context context) { Path targetWorkPath = new Path(context.getConfiguration(). get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH)); Path root = target.equals(targetWorkPath) ? targetWorkPath.getParent() : targetWorkPath; - String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(fileSystem); + String tempFilePrefix = DistCpUtils.getTargetTempFilePrefix(target); Path tempFile = new Path(root, tempFilePrefix + context.getTaskAttemptID().toString() + "." + String.valueOf(System.currentTimeMillis())); diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java index e074576193057..a525688da7d8b 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java @@ -671,11 +671,12 @@ public static Path getSplitChunkPath(Path targetFile, * @param fileSystem target filesystem * @return temp file path prefix */ - public static String getTargetTempFilePrefix(FileSystem fileSystem) { - if (StringUtils.equalsIgnoreCase(fileSystem.getScheme(), "ftp")) { - return DistCpConstants.TARGET_TEMP_FILE_PREFIX; + public static String getTargetTempFilePrefix(Path targetPath) { + String schema = targetPath.toUri().getScheme(); + if (StringUtils.equalsIgnoreCase("ftp", schema)) { + return DistCpConstants.TARGET_TEMP_FILE_PREFIX_FTP; } else { - return DistCpConstants.TARGET_TEMP_FILE_PREFIX_COMMA; + return DistCpConstants.TARGET_TEMP_FILE_PREFIX_DOT; } } } From b1ccfb5958be3f34205b67aaebd8be00615b2f7d Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Mon, 7 Jun 2021 08:21:29 +0530 Subject: [PATCH 6/9] MAPREDUCE-7350. Replace Guava Lists usage by Hadoop's own Lists in hadoop-mapreduce-project (#3074) --- .../hadoop/mapreduce/v2/app/MockJobs.java | 2 +- .../counters/CounterGroupFactory.java | 2 +- .../mapreduce/lib/input/FileInputFormat.java | 5 ++- .../mapreduce/util/CountersStrings.java | 3 +- .../hadoop/mapred/TestFileInputFormat.java | 3 +- .../lib/input/TestFileInputFormat.java | 3 +- .../mapred/nativetask/kvtest/KVTest.java | 2 +- .../uploader/TestFrameworkUploader.java | 2 +- .../hadoop-mapreduce-client/pom.xml | 32 +++++++++++++++++++ .../hadoop-mapreduce-examples/pom.xml | 32 +++++++++++++++++++ 10 files changed, 73 insertions(+), 13 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java index 586d86aee1d93..38ceeada62d5c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java @@ -61,6 +61,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.yarn.MockApps; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -70,7 +71,6 @@ import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.thirdparty.com.google.common.collect.Iterators; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; public class MockJobs extends MockApps { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java index 278d0a73a0cbd..8e0c864586384 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/CounterGroupFactory.java @@ -21,7 +21,6 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.classification.InterfaceAudience; @@ -30,6 +29,7 @@ import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.util.ResourceBundles; +import org.apache.hadoop.util.Lists; /** * An abstract class to provide common implementation of the diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java index a71aaade013c0..5b0f88f868c70 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java @@ -41,15 +41,14 @@ import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.security.TokenCache; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StopWatch; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - -/** +/** * A base class for file-based {@link InputFormat}s. * *

FileInputFormat is the base class for all file-based diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java index 9e32e70ede6a4..429425ad2d558 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/CountersStrings.java @@ -21,13 +21,12 @@ import java.text.ParseException; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.mapreduce.counters.AbstractCounters; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.counters.CounterGroupBase; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringUtils; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java index 0bf29a500661f..3f3cb24a1a5aa 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import org.apache.hadoop.util.Lists; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -43,8 +44,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - @RunWith(value = Parameterized.class) public class TestFileInputFormat { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java index 52b987aebb1c7..8103ce8234177 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java @@ -43,6 +43,7 @@ import org.apache.hadoop.mapred.SplitLocationInfo; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Sets; import org.junit.After; import org.junit.Assert; @@ -54,8 +55,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - @RunWith(value = Parameterized.class) public class TestFileInputFormat { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java index cee7675a4047f..0771c669148ee 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java @@ -30,6 +30,7 @@ import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; import org.apache.hadoop.mapred.nativetask.testutil.TestConstants; +import org.apache.hadoop.util.Lists; import org.junit.AfterClass; import org.apache.hadoop.util.NativeCodeLoader; import org.junit.Assume; @@ -42,7 +43,6 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Splitter; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; @RunWith(Parameterized.class) public class KVTest { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java index ed4d9ce68c3c3..7749db1be846d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java @@ -18,7 +18,6 @@ package org.apache.hadoop.mapred.uploader; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.io.FileUtils; @@ -32,6 +31,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; +import org.apache.hadoop.util.Lists; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml index 55940ae4ff744..112a2c73e8ba8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml @@ -177,6 +177,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml index 48cf27efe437f..c4f89acb41f4f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml @@ -155,6 +155,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + From 90581f72ca61a2fd4b3b2896661981210b297cc7 Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Mon, 7 Jun 2021 09:54:09 +0530 Subject: [PATCH 7/9] HADOOP-17743. Replace Guava Lists usage by Hadoop's own Lists in hadoop-common, hadoop-tools and cloud-storage projects (#3072) --- .../hadoop-cloud-storage/pom.xml | 37 +++++++++++++++++++ .../hadoop-cos/pom.xml | 32 ++++++++++++++++ .../hadoop-huaweicloud/pom.xml | 32 ++++++++++++++++ .../apache/hadoop/fs/obs/OBSCommonUtils.java | 2 +- hadoop-common-project/hadoop-common/pom.xml | 32 ++++++++++++++++ .../org/apache/hadoop/crypto/CryptoCodec.java | 2 +- .../hadoop/fs/permission/AclStatus.java | 2 +- .../apache/hadoop/fs/permission/AclUtil.java | 2 +- .../apache/hadoop/fs/shell/AclCommands.java | 3 +- .../java/org/apache/hadoop/ha/NodeFencer.java | 2 +- .../org/apache/hadoop/http/HttpServer2.java | 2 +- .../metrics2/impl/MBeanInfoBuilder.java | 3 +- .../metrics2/impl/MetricsCollectorImpl.java | 3 +- .../impl/MetricsRecordBuilderImpl.java | 3 +- .../metrics2/impl/MetricsSystemImpl.java | 2 +- .../apache/hadoop/metrics2/util/Servers.java | 3 +- .../apache/hadoop/util/ChunkedArrayList.java | 1 - .../apache/hadoop/util/JvmPauseMonitor.java | 1 - .../java/org/apache/hadoop/util/ZKUtil.java | 1 - .../hadoop/conf/TestReconfiguration.java | 2 +- .../org/apache/hadoop/ha/DummyHAService.java | 2 +- .../org/apache/hadoop/ha/TestNodeFencer.java | 3 +- .../hadoop/ha/TestShellCommandFencer.java | 6 +-- .../util/TestApplicationClassLoader.java | 1 - .../hadoop/util/TestDirectBufferPool.java | 2 - hadoop-common-project/hadoop-registry/pom.xml | 33 ++++++++++++++++- .../client/impl/zk/RegistrySecurity.java | 2 +- hadoop-tools/hadoop-aws/pom.xml | 32 ++++++++++++++++ .../org/apache/hadoop/fs/s3a/S3AUtils.java | 2 +- .../hadoop/fs/s3a/auth/RolePolicies.java | 2 +- .../hadoop/fs/s3a/impl/RenameOperation.java | 2 +- .../s3a/s3guard/DumpS3GuardDynamoTable.java | 5 +-- .../fs/s3a/s3guard/DynamoDBMetadataStore.java | 2 +- .../fs/s3a/ITestS3AFailureHandling.java | 2 +- .../fs/s3a/ITestS3GuardListConsistency.java | 2 +- .../fs/s3a/commit/ITestCommitOperations.java | 2 +- .../fs/s3a/commit/TestMagicCommitPaths.java | 2 +- .../s3a/commit/staging/StagingTestBase.java | 2 +- .../TestStagingPartitionedFileListing.java | 2 +- .../TestStagingPartitionedTaskCommit.java | 2 +- .../s3a/impl/TestPartialDeleteFailures.java | 2 +- .../s3guard/ITestDynamoDBMetadataStore.java | 2 +- .../s3a/s3guard/TestPathOrderComparators.java | 12 +++--- hadoop-tools/hadoop-azure/pom.xml | 33 +++++++++++++++++ .../ITestAbfsIdentityTransformer.java | 2 +- .../ITestAzureBlobFileSystemCheckAccess.java | 2 +- .../azurebfs/ITestAzureBlobFilesystemAcl.java | 2 +- .../fs/azurebfs/ITestCustomerProvidedKey.java | 2 +- hadoop-tools/hadoop-distcp/pom.xml | 32 ++++++++++++++++ .../hadoop/tools/CopyListingFileStatus.java | 2 +- .../hadoop/tools/SimpleCopyListing.java | 2 +- .../hadoop/tools/util/TestDistCpUtils.java | 2 +- .../hadoop-dynamometer-infra/pom.xml | 32 ++++++++++++++++ .../tools/dynamometer/ApplicationMaster.java | 2 +- .../hadoop/tools/dynamometer/Client.java | 2 +- .../hadoop-dynamometer-workload/pom.xml | 32 ++++++++++++++++ .../workloadgenerator/CreateFileMapper.java | 2 +- .../audit/AuditReplayMapper.java | 2 +- hadoop-tools/hadoop-kafka/pom.xml | 32 ++++++++++++++++ .../metrics2/impl/TestKafkaMetrics.java | 2 +- 60 files changed, 411 insertions(+), 65 deletions(-) diff --git a/hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml b/hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml index a8f45a7f3a222..699ce1abfc692 100644 --- a/hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml +++ b/hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml @@ -32,6 +32,43 @@ cloud-storage + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + + + + org.apache.hadoop diff --git a/hadoop-cloud-storage-project/hadoop-cos/pom.xml b/hadoop-cloud-storage-project/hadoop-cos/pom.xml index fa47e354c7998..b1f9ccb6e3e04 100644 --- a/hadoop-cloud-storage-project/hadoop-cos/pom.xml +++ b/hadoop-cloud-storage-project/hadoop-cos/pom.xml @@ -96,6 +96,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/pom.xml b/hadoop-cloud-storage-project/hadoop-huaweicloud/pom.xml index 43360c11cd9d2..9386152c5dd7d 100755 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/pom.xml +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/pom.xml @@ -92,6 +92,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java index ba7550bc0d647..d477cec186b0e 100644 --- a/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java +++ b/hadoop-cloud-storage-project/hadoop-huaweicloud/src/main/java/org/apache/hadoop/fs/obs/OBSCommonUtils.java @@ -19,7 +19,6 @@ package org.apache.hadoop.fs.obs; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import com.obs.services.ObsClient; import com.obs.services.exception.ObsException; import com.obs.services.model.AbortMultipartUploadRequest; @@ -53,6 +52,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.security.ProviderUtils; +import org.apache.hadoop.util.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index f49a18398fc04..f322345c13c90 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -647,6 +647,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java index 111e91b5c969e..64c754faa59d8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java @@ -26,13 +26,13 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.PerformanceAdvisory; import org.apache.hadoop.util.ReflectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Splitter; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java index 674b88083d3dc..d05f9550c59ce 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java @@ -24,7 +24,7 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Objects; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; /** * An AclStatus contains the ACL information of a specific file. AclStatus diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclUtil.java index 58b24f200429b..1447e80f3eb2b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclUtil.java @@ -23,7 +23,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; /** * AclUtil contains utility methods for manipulating ACLs. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java index dcff0094eccf5..7a8a9a24da625 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java @@ -22,8 +22,6 @@ import java.util.LinkedList; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -35,6 +33,7 @@ import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.ScopedAclEntries; +import org.apache.hadoop.util.Lists; /** * Acl related operations diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java index 7f4a0790a3bc1..fb78a4c47dcde 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java @@ -25,10 +25,10 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index 9f7562d35aa2c..9f81eed76a730 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -57,7 +57,6 @@ import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import com.sun.jersey.spi.container.servlet.ServletContainer; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; @@ -82,6 +81,7 @@ import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory; import org.apache.hadoop.security.ssl.FileMonitoringTimerTask; import org.apache.hadoop.security.ssl.SSLFactory; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java index a297072d236d4..b32cbdca839b2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java @@ -22,12 +22,11 @@ import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsTag; import org.apache.hadoop.metrics2.MetricsVisitor; +import org.apache.hadoop.util.Lists; /** * Helper class to build MBeanInfo from metrics records diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsCollectorImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsCollectorImpl.java index 4b4b70bd8e607..cce55d4368198 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsCollectorImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsCollectorImpl.java @@ -22,12 +22,13 @@ import java.util.List; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsFilter; +import org.apache.hadoop.util.Lists; + import static org.apache.hadoop.metrics2.lib.Interns.*; @InterfaceAudience.Private diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsRecordBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsRecordBuilderImpl.java index 19e4c3b6d4187..ef0f2b2a14f93 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsRecordBuilderImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsRecordBuilderImpl.java @@ -21,8 +21,6 @@ import java.util.Collections; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsCollector; @@ -30,6 +28,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsTag; import org.apache.hadoop.metrics2.lib.Interns; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Time; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index a6edf08e5a717..535ee914ef870 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -30,7 +30,6 @@ import java.util.TimerTask; import javax.management.ObjectName; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.*; @@ -58,6 +57,7 @@ import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder; import org.apache.hadoop.metrics2.lib.MutableStat; import org.apache.hadoop.metrics2.util.MBeans; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.slf4j.Logger; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/Servers.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/Servers.java index 2bd49e9f211ba..e8d32876f1c08 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/Servers.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/Servers.java @@ -22,11 +22,10 @@ import java.net.InetSocketAddress; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.util.Lists; /** * Helpers to handle server addresses diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ChunkedArrayList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ChunkedArrayList.java index ff7197ce52e4d..55b75634cb68d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ChunkedArrayList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ChunkedArrayList.java @@ -26,7 +26,6 @@ import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.collect.Iterables; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; /** * Simplified List implementation which stores elements as a list diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java index a792993493bca..feb4f9b9d3f01 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java @@ -29,7 +29,6 @@ import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java index 8e4e67d1b61e0..6d38c606c8c4c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java @@ -29,7 +29,6 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Charsets; import org.apache.hadoop.thirdparty.com.google.common.base.Splitter; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.io.Files; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java index 4948df9b1f4cb..0216551ad9822 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java @@ -19,8 +19,8 @@ package org.apache.hadoop.conf; import java.util.function.Supplier; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Time; import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange; import org.junit.Test; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java index b5739f7935ed7..7cb2ab1318bf8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java @@ -33,9 +33,9 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.util.Lists; import org.mockito.Mockito; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java index 972113eefa91f..be67848e2120a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestNodeFencer.java @@ -24,13 +24,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Shell; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - public class TestNodeFencer { private HAServiceTarget MOCK_TARGET; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java index dcff9e30cdba2..88afb35a8dd9a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java @@ -21,9 +21,9 @@ import java.lang.reflect.Method; import java.net.InetSocketAddress; +import java.util.Arrays; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.util.Shell; @@ -223,8 +223,8 @@ public void testCommandAbbreviation() { */ private static class LogAnswer implements Answer { - private static final List DELEGATE_METHODS = Lists.asList("error", - new String[]{"warn", "info", "debug", "trace"}); + private static final List DELEGATE_METHODS = Arrays.asList( + "error", "warn", "info", "debug", "trace"); @Override public Object answer(InvocationOnMock invocation) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java index 0fb887676274a..4be74ba8e7e97 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java @@ -42,7 +42,6 @@ import org.junit.Test; import org.apache.hadoop.thirdparty.com.google.common.base.Splitter; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; public class TestApplicationClassLoader { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDirectBufferPool.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDirectBufferPool.java index d6da2f86cc3c4..592f40aa16c2d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDirectBufferPool.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDirectBufferPool.java @@ -26,8 +26,6 @@ import org.junit.Test; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; - public class TestDirectBufferPool { final org.apache.hadoop.util.DirectBufferPool pool = new org.apache.hadoop.util.DirectBufferPool(); diff --git a/hadoop-common-project/hadoop-registry/pom.xml b/hadoop-common-project/hadoop-registry/pom.xml index 0058832e6110b..8e8b1e064e7e7 100644 --- a/hadoop-common-project/hadoop-registry/pom.xml +++ b/hadoop-common-project/hadoop-registry/pom.xml @@ -260,7 +260,38 @@ - + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java index 065cbe3296b09..945381022b127 100644 --- a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java +++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java @@ -20,7 +20,6 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.base.Splitter; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -29,6 +28,7 @@ import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.ServiceStateException; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.ZKUtil; import org.apache.zookeeper.Environment; import org.apache.zookeeper.ZooDefs; diff --git a/hadoop-tools/hadoop-aws/pom.xml b/hadoop-tools/hadoop-aws/pom.xml index 0cab5ada2169d..414e69780ea6c 100644 --- a/hadoop-tools/hadoop-aws/pom.xml +++ b/hadoop-tools/hadoop-aws/pom.xml @@ -467,6 +467,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java index b6af8e7f27c20..220355aaa3f33 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java @@ -54,7 +54,7 @@ import org.apache.hadoop.security.ProviderUtils; import org.apache.hadoop.util.VersionInfo; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java index 7b632e2d90a7b..22ced94f6dacc 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java @@ -23,7 +23,7 @@ import java.util.Collections; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java index 7b13d0d3c7c42..efc789ba322fa 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java @@ -28,7 +28,7 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.transfer.model.CopyResult; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DumpS3GuardDynamoTable.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DumpS3GuardDynamoTable.java index 2a7cb4c1b7247..e4c9ef6de8ba3 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DumpS3GuardDynamoTable.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DumpS3GuardDynamoTable.java @@ -35,7 +35,6 @@ import java.util.List; import com.amazonaws.services.dynamodbv2.xspec.ExpressionSpecBuilder; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -265,8 +264,8 @@ public int execute() throws ServiceLaunchException, IOException { * @param type of queue */ private void pushAll(Deque queue, List entries) { - List reversed = Lists.reverse(entries); - for (T t : reversed) { + Collections.reverse(entries); + for (T t : entries) { queue.push(t); } } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBMetadataStore.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBMetadataStore.java index e4542eb1bcfa6..b113d2017829a 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBMetadataStore.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/DynamoDBMetadataStore.java @@ -67,7 +67,7 @@ import org.apache.hadoop.fs.s3a.impl.InternalConstants; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.MoreExecutors;; import org.slf4j.Logger; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java index 71a5794371107..e395207589812 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java @@ -20,7 +20,7 @@ import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.MultiObjectDeleteException; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.assertj.core.api.Assertions; import org.junit.Assume; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3GuardListConsistency.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3GuardListConsistency.java index 09f66df4c2ec0..17dc450707016 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3GuardListConsistency.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3GuardListConsistency.java @@ -32,7 +32,7 @@ import org.apache.hadoop.fs.contract.s3a.S3AContract; import com.amazonaws.services.s3.model.S3ObjectSummary; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.assertj.core.api.Assertions; import org.junit.Assume; import org.junit.Test; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java index e8c5d888d10dc..2bc6434ccd494 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java @@ -25,7 +25,7 @@ import java.util.List; import com.amazonaws.services.s3.model.PartETag; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.assertj.core.api.Assertions; import org.junit.Test; import org.slf4j.Logger; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/TestMagicCommitPaths.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/TestMagicCommitPaths.java index 073922cbc0e5f..fdc4ec8058a6c 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/TestMagicCommitPaths.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/TestMagicCommitPaths.java @@ -22,7 +22,7 @@ import java.util.Arrays; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.junit.Assert; import org.junit.Test; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java index 4e425583a3cfd..6e13fd0227a3b 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java @@ -44,7 +44,7 @@ import com.amazonaws.services.s3.model.MultipartUploadListing; import com.amazonaws.services.s3.model.UploadPartRequest; import com.amazonaws.services.s3.model.UploadPartResult; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.junit.AfterClass; import org.junit.Assert; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedFileListing.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedFileListing.java index ce55480323872..76a0de225371e 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedFileListing.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedFileListing.java @@ -28,7 +28,7 @@ import java.util.UUID; import java.util.stream.Collectors; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.junit.After; import org.junit.Test; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java index 9e98a226ef150..fb252102491d6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java @@ -25,7 +25,7 @@ import java.util.UUID; import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Sets; import org.assertj.core.api.Assertions; import org.junit.BeforeClass; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestPartialDeleteFailures.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestPartialDeleteFailures.java index 0d9ba1d304490..f43860e1e8b36 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestPartialDeleteFailures.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestPartialDeleteFailures.java @@ -28,7 +28,7 @@ import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.MultiObjectDeleteException; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.assertj.core.api.Assertions; import org.junit.Before; import org.junit.Test; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestDynamoDBMetadataStore.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestDynamoDBMetadataStore.java index 580386a09f6b0..93a9e33aaaf48 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestDynamoDBMetadataStore.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestDynamoDBMetadataStore.java @@ -46,7 +46,7 @@ import com.amazonaws.services.dynamodbv2.model.Tag; import com.amazonaws.services.dynamodbv2.model.TagResourceRequest; import com.amazonaws.services.dynamodbv2.model.UntagResourceRequest; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.assertj.core.api.Assertions; import org.apache.commons.collections.CollectionUtils; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestPathOrderComparators.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestPathOrderComparators.java index 9b8e5918efee1..03233df69e198 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestPathOrderComparators.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestPathOrderComparators.java @@ -21,11 +21,11 @@ import java.util.Comparator; import java.util.List; +import org.apache.hadoop.util.Lists; import org.junit.Test; import org.apache.hadoop.fs.Path; -import static org.apache.hadoop.thirdparty.com.google.common.collect.Lists.newArrayList; import static org.apache.hadoop.fs.s3a.s3guard.PathOrderComparators.TOPMOST_PATH_FIRST; import static org.apache.hadoop.fs.s3a.s3guard.PathOrderComparators.TOPMOST_PATH_LAST; import static org.assertj.core.api.Assertions.assertThat; @@ -119,13 +119,13 @@ public void testSortOrderConstant() throws Throwable { List sort1 = verifySorted(ROOT, DIR_A, DIR_B, DIR_A_FILE_1, DIR_A_FILE_2, DIR_B_FILE_3, DIR_B_FILE_4); - List sort2 = newArrayList(sort1); + List sort2 = Lists.newArrayList(sort1); assertSortsTo(sort2, sort1, true); } @Test public void testSortReverse() throws Throwable { - List sort1 = newArrayList( + List sort1 = Lists.newArrayList( ROOT, DIR_A, DIR_B, @@ -133,7 +133,7 @@ public void testSortReverse() throws Throwable { DIR_A_FILE_2, DIR_B_FILE_3, DIR_B_FILE_4); - List expected = newArrayList( + List expected = Lists.newArrayList( DIR_B_FILE_4, DIR_B_FILE_3, DIR_A_FILE_2, @@ -146,8 +146,8 @@ public void testSortReverse() throws Throwable { private List verifySorted(Path... paths) { - List original = newArrayList(paths); - List sorted = newArrayList(paths); + List original = Lists.newArrayList(paths); + List sorted = Lists.newArrayList(paths); assertSortsTo(original, sorted, true); return sorted; } diff --git a/hadoop-tools/hadoop-azure/pom.xml b/hadoop-tools/hadoop-azure/pom.xml index d5d0f1fcd7303..59aae51e60788 100644 --- a/hadoop-tools/hadoop-azure/pom.xml +++ b/hadoop-tools/hadoop-azure/pom.xml @@ -101,6 +101,39 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + + diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java index f0473789cf161..5868d083e12e9 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.UUID; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.fs.azurebfs.oauth2.IdentityTransformer; import org.apache.hadoop.fs.permission.AclEntry; import org.junit.Test; diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCheckAccess.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCheckAccess.java index e52071d92e574..ebd64812d45b9 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCheckAccess.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCheckAccess.java @@ -22,7 +22,7 @@ import java.lang.reflect.Field; import java.util.List; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.junit.Assume; import org.junit.Test; import org.mockito.Mockito; diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFilesystemAcl.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFilesystemAcl.java index 74cf02a4f1f68..245ae846e3682 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFilesystemAcl.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFilesystemAcl.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.azurebfs; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import java.io.FileNotFoundException; import java.util.List; diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestCustomerProvidedKey.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestCustomerProvidedKey.java index 9229905b4623c..11165c8ceb723 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestCustomerProvidedKey.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestCustomerProvidedKey.java @@ -36,7 +36,7 @@ import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.assertj.core.api.Assertions; import org.junit.Assume; import org.junit.Test; diff --git a/hadoop-tools/hadoop-distcp/pom.xml b/hadoop-tools/hadoop-distcp/pom.xml index 7e5aaebc08513..8ff7ebb5a5f1c 100644 --- a/hadoop-tools/hadoop-distcp/pom.xml +++ b/hadoop-tools/hadoop-distcp/pom.xml @@ -223,6 +223,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java index 02c623157d8a1..b4a74405c4f7e 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java @@ -41,7 +41,7 @@ import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.thirdparty.com.google.common.base.Objects; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; /** diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java index fb7ace5f1ead5..2e0319867d5f1 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java @@ -18,7 +18,7 @@ package org.apache.hadoop.tools; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java index 7d17167c3e8df..0a1f88e378586 100644 --- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java +++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java @@ -43,7 +43,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml index d6e3d4b228d91..4b4367c6e0e05 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml @@ -137,6 +137,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/ApplicationMaster.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/ApplicationMaster.java index 094721b98d58a..e44f811f0db41 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/ApplicationMaster.java +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/ApplicationMaster.java @@ -18,7 +18,7 @@ package org.apache.hadoop.tools.dynamometer; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/Client.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/Client.java index 3c8baec15c74f..1731780af9cd0 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/Client.java +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/Client.java @@ -21,7 +21,7 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.base.Splitter; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import java.util.Optional; import java.util.function.Supplier; import org.apache.hadoop.tools.dynamometer.workloadgenerator.audit.AuditReplayMapper; diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml index eb54de36d4d68..bd016a8bff414 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml @@ -83,6 +83,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/CreateFileMapper.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/CreateFileMapper.java index 64b8dc28e67d0..318b47d19ff6f 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/CreateFileMapper.java +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/CreateFileMapper.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.tools.dynamometer.workloadgenerator; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import java.io.IOException; import java.io.OutputStream; import java.net.URI; diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/audit/AuditReplayMapper.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/audit/AuditReplayMapper.java index c46f720bd0a87..fdd8f20d85bd5 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/audit/AuditReplayMapper.java +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/main/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/audit/AuditReplayMapper.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.tools.dynamometer.workloadgenerator.audit; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import java.util.Optional; import java.util.function.Function; diff --git a/hadoop-tools/hadoop-kafka/pom.xml b/hadoop-tools/hadoop-kafka/pom.xml index d8c01eda23bee..2b021643fe512 100644 --- a/hadoop-tools/hadoop-kafka/pom.xml +++ b/hadoop-tools/hadoop-kafka/pom.xml @@ -70,6 +70,38 @@ + + org.apache.maven.plugins + maven-enforcer-plugin + + + de.skuzzle.enforcer + restrict-imports-enforcer-rule + ${restrict-imports.enforcer.version} + + + + + banned-illegal-imports + process-sources + + enforce + + + + + true + Use hadoop-common provided Lists rather than Guava provided Lists + + org.apache.hadoop.thirdparty.com.google.common.collect.Lists + com.google.common.collect.Lists + + + + + + + diff --git a/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java b/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java index fb19172359a5f..03c479fba59c5 100644 --- a/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java +++ b/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java @@ -18,7 +18,7 @@ package org.apache.hadoop.metrics2.impl; -import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.apache.hadoop.util.Lists; import org.apache.commons.configuration2.SubsetConfiguration; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricType; From 8b9e76ebba72f6071e02193e1524e406741e206e Mon Sep 17 00:00:00 2001 From: Akira Ajisaka Date: Mon, 7 Jun 2021 14:37:30 +0900 Subject: [PATCH 8/9] HDFS-16050. Some dynamometer tests fail. (#3079) Signed-off-by: Takanobu Asanuma --- .../hadoop-dynamometer/hadoop-dynamometer-blockgen/pom.xml | 2 +- .../hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml | 2 +- .../hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml | 5 +++++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/pom.xml b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/pom.xml index cb593e73df17d..2b8c4294066f3 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/pom.xml +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/pom.xml @@ -36,7 +36,7 @@ org.mockito - mockito-all + mockito-core test diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml index 4b4367c6e0e05..1e30247cbb1e9 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/pom.xml @@ -74,7 +74,7 @@ org.mockito - mockito-all + mockito-core test diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml index bd016a8bff414..cb7000555ee66 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/pom.xml @@ -39,6 +39,11 @@ junit test + + org.mockito + mockito-core + test + org.apache.hadoop hadoop-minicluster From b3a9f9663190a4b2190b42b5fbfffd714829d8ac Mon Sep 17 00:00:00 2001 From: Min Zhao Date: Mon, 7 Jun 2021 13:26:58 +0800 Subject: [PATCH 9/9] HADOOP-17742. fix distcp fail when copying to ftp filesystem --- .../apache/hadoop/fs/ftp/FTPFileSystem.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index dd070189e311f..5876b53daa412 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -623,8 +623,8 @@ private boolean isFile(FTPClient client, Path file) { } /* - * Assuming that parent of both source and destination is the same. Is the - * assumption correct or it is suppose to work like 'move' ? + * The parent of source and destination can be different. It is suppose to + * work like 'move' */ @Override public boolean rename(Path src, Path dst) throws IOException { @@ -683,15 +683,15 @@ private boolean rename(FTPClient client, Path src, Path dst) throw new FileAlreadyExistsException("Destination path " + dst + " already exists"); } - String parentSrc = absoluteSrc.getParent().toUri().toString(); - if (isParentOf(absoluteSrc, absoluteDst)) { - throw new IOException("Cannot rename " + absoluteSrc + " under itself" - + " : "+ absoluteDst); + + Path dstParentPath = absoluteDst.getParent(); + if (!exists(dstParentPath)) { + // when the parent path of dst don't exist, the path need to create. + mkdirs(dstParentPath); } - String from = absoluteSrc.getName(); - String to = absoluteDst.getName(); - client.changeWorkingDirectory(parentSrc); + String from = absoluteSrc.toUri().getPath(); + String to = absoluteDst.toUri().getPath(); boolean renamed = client.rename(from, to); return renamed; }