From 4731c13f92f146d989f77d4beeefb94ed8968eae Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sat, 30 Mar 2024 20:42:07 +0100 Subject: [PATCH 1/8] HADOOP-19081. Move some hadoop-common code to new hadoop-ftp module --- hadoop-common-project/hadoop-common/pom.xml | 19 - hadoop-common-project/hadoop-ftp/pom.xml | 143 ++++++++ .../hadoop-ftp/src/main/conf/log4j.properties | 337 ++++++++++++++++++ .../apache/hadoop/fs/ftp/FTPException.java | 0 .../apache/hadoop/fs/ftp/FTPFileSystem.java | 0 .../apache/hadoop/fs/ftp/FTPInputStream.java | 0 .../apache/hadoop/fs/ftp/FtpConfigKeys.java | 0 .../java/org/apache/hadoop/fs/ftp/FtpFs.java | 0 .../hadoop/fs/sftp/SFTPConnectionPool.java | 0 .../apache/hadoop/fs/sftp/SFTPFileSystem.java | 0 .../hadoop/fs/sftp/SFTPInputStream.java | 0 .../apache/hadoop/fs/sftp/package-info.java | 0 .../hadoop/fs/contract/ftp/FTPContract.java | 0 .../contract/ftp/TestFTPContractCreate.java | 0 .../contract/ftp/TestFTPContractDelete.java | 0 .../fs/contract/ftp/TestFTPContractMkdir.java | 0 .../fs/contract/ftp/TestFTPContractOpen.java | 0 .../contract/ftp/TestFTPContractRename.java | 0 .../hadoop/fs/contract/ftp/package.html | 0 .../hadoop/fs/contract/sftp/SFTPContract.java | 0 .../contract/sftp/TestSFTPContractSeek.java | 0 .../apache/hadoop/fs/ftp/FtpTestServer.java | 0 .../hadoop/fs/ftp/TestFTPFileSystem.java | 0 .../hadoop/fs/sftp/TestSFTPFileSystem.java | 0 .../src/test/resources/contract/ftp.xml | 0 .../src/test/resources/contract/sftp.xml | 0 hadoop-common-project/pom.xml | 1 + 27 files changed, 481 insertions(+), 19 deletions(-) create mode 100644 hadoop-common-project/hadoop-ftp/pom.xml create mode 100644 hadoop-common-project/hadoop-ftp/src/main/conf/log4j.properties rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/main/java/org/apache/hadoop/fs/sftp/package-info.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/resources/contract/ftp.xml (100%) rename hadoop-common-project/{hadoop-common => hadoop-ftp}/src/test/resources/contract/sftp.xml (100%) diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 9f6c91a36af9b..b383b8a05707a 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -289,10 +289,6 @@ hadoop-minikdc test - - com.jcraft - jsch - org.apache.curator curator-test @@ -311,21 +307,6 @@ jsr305 compile - - org.apache.sshd - sshd-core - test - - - org.apache.sshd - sshd-sftp - test - - - org.apache.ftpserver - ftpserver-core - test - org.apache.zookeeper diff --git a/hadoop-common-project/hadoop-ftp/pom.xml b/hadoop-common-project/hadoop-ftp/pom.xml new file mode 100644 index 0000000000000..2d73678c8f156 --- /dev/null +++ b/hadoop-common-project/hadoop-ftp/pom.xml @@ -0,0 +1,143 @@ + + + + 4.0.0 + + org.apache.hadoop + hadoop-project-dist + 3.5.0-SNAPSHOT + ../../hadoop-project-dist + + hadoop-ftp + 3.5.0-SNAPSHOT + Apache Hadoop FTP/SFTP support + Apache Hadoop FTP/SFTP support + jar + + + common + true + true + ../etc/hadoop + wsce-site.xml + + + + + org.apache.hadoop + hadoop-annotations + compile + + + org.apache.hadoop + hadoop-common + compile + + + commons-net + commons-net + compile + + + ch.qos.reload4j + reload4j + compile + + + com.jcraft + jsch + compile + + + org.slf4j + slf4j-api + compile + + + org.apache.hadoop + hadoop-common + test-jar + test + + + junit + junit + test + + + org.assertj + assertj-core + test + + + org.slf4j + slf4j-reload4j + compile + + + org.mockito + mockito-core + test + + + org.apache.sshd + sshd-core + test + + + org.apache.sshd + sshd-sftp + test + + + org.apache.ftpserver + ftpserver-core + test + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + ${runningWithNative} + + + + listener + org.apache.hadoop.test.TimedOutTestsListener + + + + + + + org.apache.rat + apache-rat-plugin + + + + + + + + + diff --git a/hadoop-common-project/hadoop-ftp/src/main/conf/log4j.properties b/hadoop-common-project/hadoop-ftp/src/main/conf/log4j.properties new file mode 100644 index 0000000000000..873e8366800a9 --- /dev/null +++ b/hadoop-common-project/hadoop-ftp/src/main/conf/log4j.properties @@ -0,0 +1,337 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Define some default values that can be overridden by system properties +hadoop.root.logger=INFO,console +hadoop.log.dir=. +hadoop.log.file=hadoop.log + +# Define the root logger to the system property "hadoop.root.logger". +log4j.rootLogger=${hadoop.root.logger} + +# Logging Threshold +log4j.threshold=ALL + +# Null Appender +log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender + +# +# Rolling File Appender - cap space usage at 5gb. +# +hadoop.log.maxfilesize=256MB +hadoop.log.maxbackupindex=20 +log4j.appender.RFA=org.apache.log4j.RollingFileAppender +log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file} + +log4j.appender.RFA.MaxFileSize=${hadoop.log.maxfilesize} +log4j.appender.RFA.MaxBackupIndex=${hadoop.log.maxbackupindex} + +log4j.appender.RFA.layout=org.apache.log4j.PatternLayout + +# Pattern format: Date LogLevel LoggerName LogMessage +log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +# Debugging Pattern format +#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n + + +# +# Daily Rolling File Appender +# + +log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender +log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file} + +# Rollover at midnight +log4j.appender.DRFA.DatePattern=.yyyy-MM-dd + +log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout + +# Pattern format: Date LogLevel LoggerName LogMessage +log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +# Debugging Pattern format +#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n + + +# +# console +# Add "console" to rootlogger above if you want to use this +# + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n + +# +# HDFS block state change log from block manager +# +# Uncomment the following to log normal block state change +# messages from BlockManager in NameNode. +#log4j.logger.BlockStateChange=DEBUG + +# +#Security appender +# +hadoop.security.logger=INFO,NullAppender +hadoop.security.log.maxfilesize=256MB +hadoop.security.log.maxbackupindex=20 +log4j.category.SecurityLogger=${hadoop.security.logger} +hadoop.security.log.file=SecurityAuth-${user.name}.audit +log4j.appender.RFAS=org.apache.log4j.RollingFileAppender +log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file} +log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout +log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize} +log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex} + +# +# Daily Rolling Security appender +# +log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender +log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file} +log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout +log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd + +# +# hadoop configuration logging +# + +# Uncomment the following line to turn off configuration deprecation warnings. +# log4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN + +# +# hdfs audit logging +# +hdfs.audit.logger=INFO,NullAppender +hdfs.audit.log.maxfilesize=256MB +hdfs.audit.log.maxbackupindex=20 +log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger} +log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false +log4j.appender.RFAAUDIT=org.apache.log4j.RollingFileAppender +log4j.appender.RFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log +log4j.appender.RFAAUDIT.layout=org.apache.log4j.PatternLayout +log4j.appender.RFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n +log4j.appender.RFAAUDIT.MaxFileSize=${hdfs.audit.log.maxfilesize} +log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex} + +# +# NameNode metrics logging. +# The default is to retain two namenode-metrics.log files up to 64MB each. +# +namenode.metrics.logger=INFO,NullAppender +log4j.logger.NameNodeMetricsLog=${namenode.metrics.logger} +log4j.additivity.NameNodeMetricsLog=false +log4j.appender.NNMETRICSRFA=org.apache.log4j.RollingFileAppender +log4j.appender.NNMETRICSRFA.File=${hadoop.log.dir}/namenode-metrics.log +log4j.appender.NNMETRICSRFA.layout=org.apache.log4j.PatternLayout +log4j.appender.NNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n +log4j.appender.NNMETRICSRFA.MaxBackupIndex=1 +log4j.appender.NNMETRICSRFA.MaxFileSize=64MB + +# +# DataNode metrics logging. +# The default is to retain two datanode-metrics.log files up to 64MB each. +# +datanode.metrics.logger=INFO,NullAppender +log4j.logger.DataNodeMetricsLog=${datanode.metrics.logger} +log4j.additivity.DataNodeMetricsLog=false +log4j.appender.DNMETRICSRFA=org.apache.log4j.RollingFileAppender +log4j.appender.DNMETRICSRFA.File=${hadoop.log.dir}/datanode-metrics.log +log4j.appender.DNMETRICSRFA.layout=org.apache.log4j.PatternLayout +log4j.appender.DNMETRICSRFA.layout.ConversionPattern=%d{ISO8601} %m%n +log4j.appender.DNMETRICSRFA.MaxBackupIndex=1 +log4j.appender.DNMETRICSRFA.MaxFileSize=64MB + +# Custom Logging levels + +#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG +#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG +#log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=DEBUG + + +# AWS SDK & S3A FileSystem +#log4j.logger.com.amazonaws=ERROR +log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR +#log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN + + +# +# shuffle connection log from shuffleHandler +# Uncomment the following line to enable logging of shuffle connections +# log4j.logger.org.apache.hadoop.mapred.ShuffleHandler.audit=DEBUG + +# +# Yarn ResourceManager Application Summary Log +# +# Set the ResourceManager summary log filename +yarn.server.resourcemanager.appsummary.log.file=rm-appsummary.log +# Set the ResourceManager summary log level and appender +yarn.server.resourcemanager.appsummary.logger=${hadoop.root.logger} +#yarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY + +# To enable AppSummaryLogging for the RM, +# set yarn.server.resourcemanager.appsummary.logger to +# ,RMSUMMARY in hadoop-env.sh + +# Appender for ResourceManager Application Summary Log +# Requires the following properties to be set +# - hadoop.log.dir (Hadoop Log directory) +# - yarn.server.resourcemanager.appsummary.log.file (resource manager app summary log filename) +# - yarn.server.resourcemanager.appsummary.logger (resource manager app summary log level and appender) + +log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=${yarn.server.resourcemanager.appsummary.logger} +log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=false +log4j.appender.RMSUMMARY=org.apache.log4j.RollingFileAppender +log4j.appender.RMSUMMARY.File=${hadoop.log.dir}/${yarn.server.resourcemanager.appsummary.log.file} +log4j.appender.RMSUMMARY.MaxFileSize=256MB +log4j.appender.RMSUMMARY.MaxBackupIndex=20 +log4j.appender.RMSUMMARY.layout=org.apache.log4j.PatternLayout +log4j.appender.RMSUMMARY.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n + +# +# YARN ResourceManager audit logging +# +rm.audit.logger=INFO,NullAppender +rm.audit.log.maxfilesize=256MB +rm.audit.log.maxbackupindex=20 +log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger=${rm.audit.logger} +log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger=false +log4j.appender.RMAUDIT=org.apache.log4j.RollingFileAppender +log4j.appender.RMAUDIT.File=${hadoop.log.dir}/rm-audit.log +log4j.appender.RMAUDIT.layout=org.apache.log4j.PatternLayout +log4j.appender.RMAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n +log4j.appender.RMAUDIT.MaxFileSize=${rm.audit.log.maxfilesize} +log4j.appender.RMAUDIT.MaxBackupIndex=${rm.audit.log.maxbackupindex} + +# +# YARN NodeManager audit logging +# +nm.audit.logger=INFO,NullAppender +nm.audit.log.maxfilesize=256MB +nm.audit.log.maxbackupindex=20 +log4j.logger.org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger=${nm.audit.logger} +log4j.additivity.org.apache.hadoop.yarn.server.nodemanager.NMAuditLogger=false +log4j.appender.NMAUDIT=org.apache.log4j.RollingFileAppender +log4j.appender.NMAUDIT.File=${hadoop.log.dir}/nm-audit.log +log4j.appender.NMAUDIT.layout=org.apache.log4j.PatternLayout +log4j.appender.NMAUDIT.layout.ConversionPattern=%d{ISO8601}%p %c{2}: %m%n +log4j.appender.NMAUDIT.MaxFileSize=${nm.audit.log.maxfilesize} +log4j.appender.NMAUDIT.MaxBackupIndex=${nm.audit.log.maxbackupindex} + +# +# YARN Router audit logging +# +router.audit.logger=INFO,NullAppender +router.audit.log.maxfilesize=256MB +router.audit.log.maxbackupindex=20 +log4j.logger.org.apache.hadoop.yarn.server.router.RouterAuditLogger=${router.audit.logger} +log4j.additivity.org.apache.hadoop.yarn.server.router.RouterAuditLogger=false +log4j.appender.ROUTERAUDIT=org.apache.log4j.RollingFileAppender +log4j.appender.ROUTERAUDIT.File=${hadoop.log.dir}/router-audit.log +log4j.appender.ROUTERAUDIT.layout=org.apache.log4j.PatternLayout +log4j.appender.ROUTERAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n +log4j.appender.ROUTERAUDIT.MaxFileSize=${router.audit.log.maxfilesize} +log4j.appender.ROUTERAUDIT.MaxBackupIndex=${router.audit.log.maxbackupindex} + +# HS audit log configs +#mapreduce.hs.audit.logger=INFO,HSAUDIT +#log4j.logger.org.apache.hadoop.mapreduce.v2.hs.HSAuditLogger=${mapreduce.hs.audit.logger} +#log4j.additivity.org.apache.hadoop.mapreduce.v2.hs.HSAuditLogger=false +#log4j.appender.HSAUDIT=org.apache.log4j.DailyRollingFileAppender +#log4j.appender.HSAUDIT.File=${hadoop.log.dir}/hs-audit.log +#log4j.appender.HSAUDIT.layout=org.apache.log4j.PatternLayout +#log4j.appender.HSAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n +#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd + +# Http Server Request Logs +#log4j.appender.AccessNNDRFA=org.apache.log4j.DailyRollingFileAppender +#log4j.appender.AccessNNDRFA.File=${hadoop.log.dir}/jetty-namenode.log +#log4j.appender.AccessNNDRFA.DatePattern=.yyyy-MM-dd +#log4j.appender.AccessNNDRFA.layout=org.apache.log4j.PatternLayout +#log4j.appender.AccessNNDRFA.layout.ConversionPattern=%m%n + +#log4j.logger.http.requests.namenode=INFO,AccessNNDRFA + +#log4j.appender.AccessDNDRFA=org.apache.log4j.DailyRollingFileAppender +#log4j.appender.AccessDNDRFA.File=${hadoop.log.dir}/jetty-datanode.log +#log4j.appender.AccessDNDRFA.DatePattern=.yyyy-MM-dd +#log4j.appender.AccessDNDRFA.layout=org.apache.log4j.PatternLayout +#log4j.appender.AccessDNDRFA.layout.ConversionPattern=%m%n + +#log4j.logger.http.requests.datanode=INFO,AccessDNDRFA + +#log4j.appender.AccessRMDRFA=org.apache.log4j.DailyRollingFileAppender +#log4j.appender.AccessRMDRFA.File=${hadoop.log.dir}/jetty-resourcemanager.log +#log4j.appender.AccessRMDRFA.DatePattern=.yyyy-MM-dd +#log4j.appender.AccessRMDRFA.layout=org.apache.log4j.PatternLayout +#log4j.appender.AccessRMDRFA.layout.ConversionPattern=%m%n + +#log4j.logger.http.requests.resourcemanager=INFO,AccessRMDRFA + +#log4j.appender.AccessJHDRFA=org.apache.log4j.DailyRollingFileAppender +#log4j.appender.AccessJHDRFA.File=${hadoop.log.dir}/jetty-jobhistory.log +#log4j.appender.AccessJHDRFA.DatePattern=.yyyy-MM-dd +#log4j.appender.AccessJHDRFA.layout=org.apache.log4j.PatternLayout +#log4j.appender.AccessJHDRFA.layout.ConversionPattern=%m%n + +#log4j.logger.http.requests.jobhistory=INFO,AccessJHDRFA + +#log4j.appender.AccessNMDRFA=org.apache.log4j.DailyRollingFileAppender +#log4j.appender.AccessNMDRFA.File=${hadoop.log.dir}/jetty-jobhistory.log +#log4j.appender.AccessNMDRFA.DatePattern=.yyyy-MM-dd +#log4j.appender.AccessNMDRFA.layout=org.apache.log4j.PatternLayout +#log4j.appender.AccessNMDRFA.layout.ConversionPattern=%m%n + +#log4j.logger.http.requests.nodemanager=INFO,AccessNMDRFA + +# WebHdfs request log on datanodes +# Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to +# direct the log to a separate file. +#datanode.webhdfs.logger=INFO,console +#log4j.logger.datanode.webhdfs=${datanode.webhdfs.logger} +#log4j.appender.HTTPDRFA=org.apache.log4j.DailyRollingFileAppender +#log4j.appender.HTTPDRFA.File=${hadoop.log.dir}/hadoop-datanode-webhdfs.log +#log4j.appender.HTTPDRFA.layout=org.apache.log4j.PatternLayout +#log4j.appender.HTTPDRFA.layout.ConversionPattern=%d{ISO8601} %m%n +#log4j.appender.HTTPDRFA.DatePattern=.yyyy-MM-dd + + +# Appender for viewing information for errors and warnings +yarn.ewma.cleanupInterval=300 +yarn.ewma.messageAgeLimitSeconds=86400 +yarn.ewma.maxUniqueMessages=250 +log4j.appender.EWMA=org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender +log4j.appender.EWMA.cleanupInterval=${yarn.ewma.cleanupInterval} +log4j.appender.EWMA.messageAgeLimitSeconds=${yarn.ewma.messageAgeLimitSeconds} +log4j.appender.EWMA.maxUniqueMessages=${yarn.ewma.maxUniqueMessages} + +# +# Fair scheduler state dump +# +# Use following logger to dump the state to a separate file + +#log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler.statedump=DEBUG,FSSTATEDUMP +#log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler.statedump=false +#log4j.appender.FSSTATEDUMP=org.apache.log4j.RollingFileAppender +#log4j.appender.FSSTATEDUMP.File=${hadoop.log.dir}/fairscheduler-statedump.log +#log4j.appender.FSSTATEDUMP.layout=org.apache.log4j.PatternLayout +#log4j.appender.FSSTATEDUMP.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +#log4j.appender.FSSTATEDUMP.MaxFileSize=${hadoop.log.maxfilesize} +#log4j.appender.FSSTATEDUMP.MaxBackupIndex=${hadoop.log.maxbackupindex} + +# Log levels of third-party libraries +log4j.logger.org.apache.commons.beanutils=WARN diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/package-info.java b/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/package-info.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/package-info.java rename to hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/package-info.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java diff --git a/hadoop-common-project/hadoop-common/src/test/resources/contract/ftp.xml b/hadoop-common-project/hadoop-ftp/src/test/resources/contract/ftp.xml similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/resources/contract/ftp.xml rename to hadoop-common-project/hadoop-ftp/src/test/resources/contract/ftp.xml diff --git a/hadoop-common-project/hadoop-common/src/test/resources/contract/sftp.xml b/hadoop-common-project/hadoop-ftp/src/test/resources/contract/sftp.xml similarity index 100% rename from hadoop-common-project/hadoop-common/src/test/resources/contract/sftp.xml rename to hadoop-common-project/hadoop-ftp/src/test/resources/contract/sftp.xml diff --git a/hadoop-common-project/pom.xml b/hadoop-common-project/pom.xml index 40e3f9190bf23..930a956361a56 100644 --- a/hadoop-common-project/pom.xml +++ b/hadoop-common-project/pom.xml @@ -38,6 +38,7 @@ hadoop-minikdc hadoop-kms hadoop-registry + hadoop-ftp From 79d1b255d1bef88342b262a3614178da8d39fb9b Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sat, 30 Mar 2024 20:45:59 +0100 Subject: [PATCH 2/8] Update pom.xml --- hadoop-common-project/hadoop-ftp/pom.xml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/hadoop-common-project/hadoop-ftp/pom.xml b/hadoop-common-project/hadoop-ftp/pom.xml index 2d73678c8f156..89ab6807e2ae6 100644 --- a/hadoop-common-project/hadoop-ftp/pom.xml +++ b/hadoop-common-project/hadoop-ftp/pom.xml @@ -29,14 +29,6 @@ Apache Hadoop FTP/SFTP support jar - - common - true - true - ../etc/hadoop - wsce-site.xml - - org.apache.hadoop From aed1b0129f38f12d9723608e05f1cbbe4c0b8b61 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sat, 30 Mar 2024 22:36:00 +0100 Subject: [PATCH 3/8] test issue --- .../conf/TestCommonConfigurationFields.java | 8 - .../conf/TestFtpConfigurationFields.java | 254 ++++++++++++++++++ 2 files changed, 254 insertions(+), 8 deletions(-) create mode 100644 hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java index 210b36f2dbd1f..66654dc0273d4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java @@ -23,7 +23,6 @@ import org.apache.hadoop.crypto.key.kms.KMSClientProvider; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.fs.ftp.FtpConfigKeys; import org.apache.hadoop.fs.local.LocalConfigKeys; import org.apache.hadoop.ha.SshFenceByTcpPort; import org.apache.hadoop.ha.ZKFailoverController; @@ -40,7 +39,6 @@ * {@link org.apache.hadoop.fs.AbstractFileSystem} * {@link org.apache.hadoop.fs.CommonConfigurationKeys} * {@link org.apache.hadoop.fs.CommonConfigurationKeysPublic} - * {@link org.apache.hadoop.fs.ftp.FtpConfigKeys} * {@link org.apache.hadoop.fs.local.LocalConfigKeys} * {@link org.apache.hadoop.ha.SshFenceByTcpPort} * {@link org.apache.hadoop.http.HttpServer2} @@ -65,7 +63,6 @@ public void initializeMemberVariables() { CommonConfigurationKeys.class, CommonConfigurationKeysPublic.class, LocalConfigKeys.class, - FtpConfigKeys.class, SshFenceByTcpPort.class, LdapGroupsMapping.class, ZKFailoverController.class, @@ -85,11 +82,6 @@ public void initializeMemberVariables() { errorIfMissingXmlProps = false; // Lots of properties not in the above classes - xmlPropsToSkipCompare.add("fs.ftp.password.localhost"); - xmlPropsToSkipCompare.add("fs.ftp.user.localhost"); - xmlPropsToSkipCompare.add("fs.ftp.data.connection.mode"); - xmlPropsToSkipCompare.add("fs.ftp.transfer.mode"); - xmlPropsToSkipCompare.add("fs.ftp.timeout"); xmlPropsToSkipCompare.add("hadoop.tmp.dir"); xmlPropsToSkipCompare.add("nfs3.mountd.port"); xmlPropsToSkipCompare.add("nfs3.server.port"); diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java new file mode 100644 index 0000000000000..1a70706e34a09 --- /dev/null +++ b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java @@ -0,0 +1,254 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.conf; + +import java.util.HashSet; + +import org.apache.hadoop.crypto.key.kms.KMSClientProvider; +import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.ftp.FtpConfigKeys; +import org.apache.hadoop.fs.local.LocalConfigKeys; +import org.apache.hadoop.ha.SshFenceByTcpPort; +import org.apache.hadoop.ha.ZKFailoverController; +import org.apache.hadoop.io.erasurecode.CodecUtil; +import org.apache.hadoop.security.CompositeGroupsMapping; +import org.apache.hadoop.security.HttpCrossOriginFilterInitializer; +import org.apache.hadoop.security.LdapGroupsMapping; +import org.apache.hadoop.security.RuleBasedLdapGroupsMapping; +import org.apache.hadoop.security.ssl.SSLFactory; + +/** + * Unit test class to compare the following Hadoop Configuration classes: + *

+ * {@link org.apache.hadoop.fs.AbstractFileSystem} + * {@link org.apache.hadoop.fs.CommonConfigurationKeys} + * {@link org.apache.hadoop.fs.CommonConfigurationKeysPublic} + * {@link org.apache.hadoop.fs.ftp.FtpConfigKeys} + * {@link org.apache.hadoop.fs.local.LocalConfigKeys} + * {@link org.apache.hadoop.ha.SshFenceByTcpPort} + * {@link org.apache.hadoop.http.HttpServer2} + * {@link org.apache.hadoop.security.LdapGroupsMapping} + * {@link org.apache.hadoop.security.http.CrossOriginFilter} + * {@link org.apache.hadoop.security.ssl.SSLFactory} + * {@link org.apache.hadoop.io.erasurecode.rawcoder.CoderUtil} + *

+ * against core-site.xml for missing properties. Currently only + * throws an error if the class is missing a property. + *

+ * Refer to {@link org.apache.hadoop.conf.TestConfigurationFieldsBase} + * for how this class works. + */ +public class TestFtpConfigurationFields extends TestConfigurationFieldsBase { + + @SuppressWarnings("deprecation") + @Override + public void initializeMemberVariables() { + xmlFilename = "core-default.xml"; + configurationClasses = new Class[] { + CommonConfigurationKeys.class, + CommonConfigurationKeysPublic.class, + LocalConfigKeys.class, + FtpConfigKeys.class, + SshFenceByTcpPort.class, + LdapGroupsMapping.class, + ZKFailoverController.class, + SSLFactory.class, + CompositeGroupsMapping.class, + CodecUtil.class, + RuleBasedLdapGroupsMapping.class + }; + + // Initialize used variables + xmlPropsToSkipCompare = new HashSet<>(); + xmlPrefixToSkipCompare = new HashSet<>(); + configurationPropsToSkipCompare = new HashSet<>(); + + // Set error modes + errorIfMissingConfigProps = true; + errorIfMissingXmlProps = false; + + // Lots of properties not in the above classes + xmlPropsToSkipCompare.add("fs.ftp.password.localhost"); + xmlPropsToSkipCompare.add("fs.ftp.user.localhost"); + xmlPropsToSkipCompare.add("fs.ftp.data.connection.mode"); + xmlPropsToSkipCompare.add("fs.ftp.transfer.mode"); + xmlPropsToSkipCompare.add("fs.ftp.timeout"); + xmlPropsToSkipCompare.add("hadoop.tmp.dir"); + xmlPropsToSkipCompare.add("nfs3.mountd.port"); + xmlPropsToSkipCompare.add("nfs3.server.port"); + xmlPropsToSkipCompare.add("fs.viewfs.rename.strategy"); + + // S3A properties are in a different subtree. + xmlPrefixToSkipCompare.add("fs.s3a."); + + // O3 properties are in a different subtree. + xmlPrefixToSkipCompare.add("fs.o3fs."); + + //ftp properties are in a different subtree. + // - org.apache.hadoop.fs.ftp.FTPFileSystem. + xmlPrefixToSkipCompare.add("fs.ftp.impl"); + + // WASB properties are in a different subtree. + // - org.apache.hadoop.fs.azure.NativeAzureFileSystem + xmlPrefixToSkipCompare.add("fs.wasb.impl"); + xmlPrefixToSkipCompare.add("fs.wasbs.impl"); + xmlPrefixToSkipCompare.add("fs.azure."); + xmlPrefixToSkipCompare.add("fs.abfs.impl"); + xmlPrefixToSkipCompare.add("fs.abfss.impl"); + + // ADL properties are in a different subtree + // - org.apache.hadoop.hdfs.web.ADLConfKeys + xmlPrefixToSkipCompare.add("adl."); + xmlPrefixToSkipCompare.add("fs.adl."); + xmlPropsToSkipCompare.add("fs.AbstractFileSystem.adl.impl"); + + // ViewfsOverloadScheme target fs impl property keys are dynamically + // constructed and they are advanced props. + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.abfs.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.abfss.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.file.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.ftp.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.gs.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.hdfs.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.http.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.https.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.ofs.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.o3fs.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.oss.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.s3a.impl"); + xmlPropsToSkipCompare. + add("fs.viewfs.overload.scheme.target.swebhdfs.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.webhdfs.impl"); + xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.wasb.impl"); + + // Azure properties are in a different class + // - org.apache.hadoop.fs.azure.AzureNativeFileSystemStore + // - org.apache.hadoop.fs.azure.SASKeyGeneratorImpl + xmlPropsToSkipCompare.add("fs.azure.sas.expiry.period"); + xmlPropsToSkipCompare.add("fs.azure.local.sas.key.mode"); + xmlPropsToSkipCompare.add("fs.azure.secure.mode"); + xmlPropsToSkipCompare.add("fs.azure.authorization"); + xmlPropsToSkipCompare.add("fs.azure.authorization.caching.enable"); + xmlPropsToSkipCompare.add("fs.azure.saskey.usecontainersaskeyforallaccess"); + xmlPropsToSkipCompare.add("fs.azure.user.agent.prefix"); + + // Properties in enable callqueue overflow trigger failover for stateless servers. + xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.overflow.trigger.failover"); + xmlPropsToSkipCompare.add("ipc.callqueue.overflow.trigger.failover"); + + // FairCallQueue configs that includes dynamic ports in its keys + xmlPropsToSkipCompare.add("ipc.[port_number].backoff.enable"); + xmlPropsToSkipCompare.add("ipc.backoff.enable"); + xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.impl"); + xmlPropsToSkipCompare.add("ipc.callqueue.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].scheduler.impl"); + xmlPropsToSkipCompare.add("ipc.scheduler.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].scheduler.priority.levels"); + xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.capacity.weights"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].faircallqueue.multiplexer.weights"); + xmlPropsToSkipCompare.add("ipc.[port_number].identity-provider.impl"); + xmlPropsToSkipCompare.add("ipc.identity-provider.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].cost-provider.impl"); + xmlPropsToSkipCompare.add("ipc.cost-provider.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.period-ms"); + xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.decay-factor"); + xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.thresholds"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.backoff.responsetime.enable"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.backoff.responsetime.thresholds"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.metrics.top.user.count"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.service-users"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockshared"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockexclusive"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.handler"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockfree"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.response"); + + // Deprecated properties. These should eventually be removed from the + // class. + configurationPropsToSkipCompare + .add(CommonConfigurationKeysPublic.IO_SORT_MB_KEY); + configurationPropsToSkipCompare + .add(CommonConfigurationKeysPublic.IO_SORT_FACTOR_KEY); + + // Irrelevant property + configurationPropsToSkipCompare.add("dr.who"); + + // XML deprecated properties. + // - org.apache.hadoop.hdfs.client.HdfsClientConfigKeys + xmlPropsToSkipCompare + .add("io.bytes.per.checksum"); + + // Properties in other classes that aren't easily determined + // (not following naming convention, in a different project, not public, + // etc.) + // - org.apache.hadoop.http.HttpServer2.FILTER_INITIALIZER_PROPERTY + xmlPropsToSkipCompare.add("hadoop.http.filter.initializers"); + // - org.apache.hadoop.security.HttpCrossOriginFilterInitializer + xmlPrefixToSkipCompare.add(HttpCrossOriginFilterInitializer.PREFIX); + xmlPrefixToSkipCompare.add("fs.AbstractFileSystem."); + // - org.apache.hadoop.ha.SshFenceByTcpPort + xmlPrefixToSkipCompare.add("dfs.ha.fencing.ssh."); + // - org.apache.hadoop.classification.RegistryConstants + xmlPrefixToSkipCompare.add("hadoop.registry."); + // - org.apache.hadoop.security.AuthenticationFilterInitializer + xmlPrefixToSkipCompare.add("hadoop.http.authentication."); + // - org.apache.hadoop.crypto.key.kms.KMSClientProvider; + xmlPropsToSkipCompare.add(KMSClientProvider.AUTH_RETRY); + // - org.apache.hadoop.io.nativeio.NativeIO + xmlPropsToSkipCompare.add("hadoop.workaround.non.threadsafe.getpwuid"); + // - org.apache.hadoop.hdfs.DFSConfigKeys + xmlPropsToSkipCompare.add("dfs.ha.fencing.methods"); + // - org.apache.hadoop.fs.CommonConfigurationKeysPublic + xmlPrefixToSkipCompare + .add(CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX); + // - org.apache.hadoop.hdfs.server.datanode.DataNode + xmlPropsToSkipCompare.add("hadoop.common.configuration.version"); + // - org.apache.hadoop.fs.FileSystem + xmlPropsToSkipCompare.add("fs.har.impl.disable.cache"); + + // - package org.apache.hadoop.tracing.TraceUtils ? + xmlPropsToSkipCompare.add("hadoop.htrace.span.receiver.classes"); + // Private keys + // - org.apache.hadoop.ha.ZKFailoverController; + xmlPropsToSkipCompare.add("ha.zookeeper.parent-znode"); + xmlPropsToSkipCompare.add("ha.zookeeper.session-timeout.ms"); + // - Where is this used? + xmlPrefixToSkipCompare + .add(CommonConfigurationKeys.FS_CLIENT_HTRACE_PREFIX); + // - org.apache.hadoop.security.UserGroupInformation + xmlPropsToSkipCompare.add("hadoop.kerberos.kinit.command"); + // - org.apache.hadoop.net.NetUtils + xmlPropsToSkipCompare + .add("hadoop.rpc.socket.factory.class.ClientProtocol"); + + // Keys with no corresponding variable + // - org.apache.hadoop.io.compress.bzip2.Bzip2Factory + xmlPropsToSkipCompare.add("io.compression.codec.bzip2.library"); + // - org.apache.hadoop.io.SequenceFile + xmlPropsToSkipCompare.add("io.seqfile.local.dir"); + + xmlPropsToSkipCompare.add("hadoop.http.sni.host.check.enabled"); + } +} From 8304d1ce9d264f1c2120374e319d64b8a36c36c0 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Fri, 5 Apr 2024 18:40:45 +0200 Subject: [PATCH 4/8] try to fix tests --- .../hadoop/conf/TestCommonConfigurationFields.java | 14 ++++++++++++++ .../hadoop/fs/ftp}/TestDelegateToFileSystem.java | 9 ++++++--- 2 files changed, 20 insertions(+), 3 deletions(-) rename hadoop-common-project/{hadoop-common/src/test/java/org/apache/hadoop/fs => hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp}/TestDelegateToFileSystem.java (91%) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java index 66654dc0273d4..797f26e171691 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java @@ -242,5 +242,19 @@ public void initializeMemberVariables() { xmlPropsToSkipCompare.add("io.seqfile.local.dir"); xmlPropsToSkipCompare.add("hadoop.http.sni.host.check.enabled"); + + // skip all FTP props (some were already skipped above) + xmlPropsToSkipCompare.add("fs.AbstractFileSystem.ftp.impl"); + xmlPropsToSkipCompare.add("fs.ftp.impl"); + xmlPropsToSkipCompare.add("fs.ftp.host"); + xmlPropsToSkipCompare.add("fs.ftp.host.port"); + xmlPropsToSkipCompare.add("fs.ftp.data.connection.mode"); + xmlPropsToSkipCompare.add("fs.ftp.transfer.mode"); + xmlPropsToSkipCompare.add("fs.ftp.timeout"); + xmlPropsToSkipCompare.add("ftp.stream-buffer-size"); + xmlPropsToSkipCompare.add("ftp.bytes-per-checksum"); + xmlPropsToSkipCompare.add("ftp.client-write-packet-size"); + xmlPropsToSkipCompare.add("ftp.blocksize"); + xmlPropsToSkipCompare.add("ftp.replication"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java similarity index 91% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java rename to hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java index 5de32861db68d..7f562f85b514f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java +++ b/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java @@ -15,15 +15,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.fs; - -import java.net.URI; +package org.apache.hadoop.fs.ftp; import org.apache.commons.net.ftp.FTP; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.AbstractFileSystem; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.junit.Assert; import org.junit.Test; +import java.net.URI; + public class TestDelegateToFileSystem { private static final String FTP_DUMMYHOST = "ftp://dummyhost"; From 18bc7e41aaf023e360120fd4387d2b5c5840a23b Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Fri, 5 Apr 2024 18:47:07 +0200 Subject: [PATCH 5/8] move to tools --- hadoop-common-project/pom.xml | 1 - .../hadoop-ftp/pom.xml | 12 ++++++------ .../hadoop-ftp/src/main/conf/log4j.properties | 0 .../java/org/apache/hadoop/fs/ftp/FTPException.java | 0 .../java/org/apache/hadoop/fs/ftp/FTPFileSystem.java | 0 .../org/apache/hadoop/fs/ftp/FTPInputStream.java | 0 .../java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java | 0 .../main/java/org/apache/hadoop/fs/ftp/FtpFs.java | 0 .../apache/hadoop/fs/sftp/SFTPConnectionPool.java | 0 .../org/apache/hadoop/fs/sftp/SFTPFileSystem.java | 0 .../org/apache/hadoop/fs/sftp/SFTPInputStream.java | 0 .../java/org/apache/hadoop/fs/sftp/package-info.java | 0 .../hadoop/conf/TestFtpConfigurationFields.java | 0 .../apache/hadoop/fs/contract/ftp/FTPContract.java | 0 .../fs/contract/ftp/TestFTPContractCreate.java | 0 .../fs/contract/ftp/TestFTPContractDelete.java | 0 .../hadoop/fs/contract/ftp/TestFTPContractMkdir.java | 0 .../hadoop/fs/contract/ftp/TestFTPContractOpen.java | 0 .../fs/contract/ftp/TestFTPContractRename.java | 0 .../org/apache/hadoop/fs/contract/ftp/package.html | 0 .../apache/hadoop/fs/contract/sftp/SFTPContract.java | 0 .../fs/contract/sftp/TestSFTPContractSeek.java | 0 .../java/org/apache/hadoop/fs/ftp/FtpTestServer.java | 0 .../hadoop/fs/ftp/TestDelegateToFileSystem.java | 0 .../org/apache/hadoop/fs/ftp/TestFTPFileSystem.java | 0 .../apache/hadoop/fs/sftp/TestSFTPFileSystem.java | 0 .../hadoop-ftp/src/test/resources/contract/ftp.xml | 0 .../hadoop-ftp/src/test/resources/contract/sftp.xml | 0 hadoop-tools/pom.xml | 1 + 29 files changed, 7 insertions(+), 7 deletions(-) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/pom.xml (98%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/conf/log4j.properties (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/package-info.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/resources/contract/ftp.xml (100%) rename {hadoop-common-project => hadoop-tools}/hadoop-ftp/src/test/resources/contract/sftp.xml (100%) diff --git a/hadoop-common-project/pom.xml b/hadoop-common-project/pom.xml index 930a956361a56..40e3f9190bf23 100644 --- a/hadoop-common-project/pom.xml +++ b/hadoop-common-project/pom.xml @@ -38,7 +38,6 @@ hadoop-minikdc hadoop-kms hadoop-registry - hadoop-ftp diff --git a/hadoop-common-project/hadoop-ftp/pom.xml b/hadoop-tools/hadoop-ftp/pom.xml similarity index 98% rename from hadoop-common-project/hadoop-ftp/pom.xml rename to hadoop-tools/hadoop-ftp/pom.xml index 89ab6807e2ae6..c5c142c53ad12 100644 --- a/hadoop-common-project/hadoop-ftp/pom.xml +++ b/hadoop-tools/hadoop-ftp/pom.xml @@ -45,11 +45,6 @@ commons-net compile
- - ch.qos.reload4j - reload4j - compile - com.jcraft jsch @@ -79,7 +74,12 @@ org.slf4j slf4j-reload4j - compile + test + + + ch.qos.reload4j + reload4j + test org.mockito diff --git a/hadoop-common-project/hadoop-ftp/src/main/conf/log4j.properties b/hadoop-tools/hadoop-ftp/src/main/conf/log4j.properties similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/conf/log4j.properties rename to hadoop-tools/hadoop-ftp/src/main/conf/log4j.properties diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPException.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/SFTPInputStream.java diff --git a/hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/package-info.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/package-info.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/package-info.java rename to hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/sftp/package-info.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/conf/TestFtpConfigurationFields.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractCreate.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractDelete.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractMkdir.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractOpen.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/TestFTPContractRename.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/ftp/package.html diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/SFTPContract.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/contract/sftp/TestSFTPContractSeek.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/FtpTestServer.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestDelegateToFileSystem.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java b/hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java rename to hadoop-tools/hadoop-ftp/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java diff --git a/hadoop-common-project/hadoop-ftp/src/test/resources/contract/ftp.xml b/hadoop-tools/hadoop-ftp/src/test/resources/contract/ftp.xml similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/resources/contract/ftp.xml rename to hadoop-tools/hadoop-ftp/src/test/resources/contract/ftp.xml diff --git a/hadoop-common-project/hadoop-ftp/src/test/resources/contract/sftp.xml b/hadoop-tools/hadoop-ftp/src/test/resources/contract/sftp.xml similarity index 100% rename from hadoop-common-project/hadoop-ftp/src/test/resources/contract/sftp.xml rename to hadoop-tools/hadoop-ftp/src/test/resources/contract/sftp.xml diff --git a/hadoop-tools/pom.xml b/hadoop-tools/pom.xml index 8c1256a177cc4..5ca5036ab7307 100644 --- a/hadoop-tools/pom.xml +++ b/hadoop-tools/pom.xml @@ -51,6 +51,7 @@ hadoop-azure-datalake hadoop-aliyun hadoop-fs2img + hadoop-ftp hadoop-benchmark hadoop-compat-bench From 73a9ecd4a007182eb47ca3c026d3c4563419b465 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Fri, 5 Apr 2024 22:22:46 +0200 Subject: [PATCH 6/8] whitespace --- .../conf/TestCommonConfigurationFields.java | 94 +++++++++++-------- .../apache/hadoop/fs/ftp/FTPFileSystem.java | 18 ++-- .../apache/hadoop/fs/ftp/FtpConfigKeys.java | 13 ++- .../java/org/apache/hadoop/fs/ftp/FtpFs.java | 8 +- 4 files changed, 72 insertions(+), 61 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java index 797f26e171691..97330ece649ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java @@ -130,52 +130,13 @@ public void initializeMemberVariables() { xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.webhdfs.impl"); xmlPropsToSkipCompare.add("fs.viewfs.overload.scheme.target.wasb.impl"); - // Azure properties are in a different class - // - org.apache.hadoop.fs.azure.AzureNativeFileSystemStore - // - org.apache.hadoop.fs.azure.SASKeyGeneratorImpl - xmlPropsToSkipCompare.add("fs.azure.sas.expiry.period"); - xmlPropsToSkipCompare.add("fs.azure.local.sas.key.mode"); - xmlPropsToSkipCompare.add("fs.azure.secure.mode"); - xmlPropsToSkipCompare.add("fs.azure.authorization"); - xmlPropsToSkipCompare.add("fs.azure.authorization.caching.enable"); - xmlPropsToSkipCompare.add("fs.azure.saskey.usecontainersaskeyforallaccess"); - xmlPropsToSkipCompare.add("fs.azure.user.agent.prefix"); + addAzureSettings(); // Properties in enable callqueue overflow trigger failover for stateless servers. xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.overflow.trigger.failover"); xmlPropsToSkipCompare.add("ipc.callqueue.overflow.trigger.failover"); - // FairCallQueue configs that includes dynamic ports in its keys - xmlPropsToSkipCompare.add("ipc.[port_number].backoff.enable"); - xmlPropsToSkipCompare.add("ipc.backoff.enable"); - xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.impl"); - xmlPropsToSkipCompare.add("ipc.callqueue.impl"); - xmlPropsToSkipCompare.add("ipc.[port_number].scheduler.impl"); - xmlPropsToSkipCompare.add("ipc.scheduler.impl"); - xmlPropsToSkipCompare.add("ipc.[port_number].scheduler.priority.levels"); - xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.capacity.weights"); - xmlPropsToSkipCompare.add( - "ipc.[port_number].faircallqueue.multiplexer.weights"); - xmlPropsToSkipCompare.add("ipc.[port_number].identity-provider.impl"); - xmlPropsToSkipCompare.add("ipc.identity-provider.impl"); - xmlPropsToSkipCompare.add("ipc.[port_number].cost-provider.impl"); - xmlPropsToSkipCompare.add("ipc.cost-provider.impl"); - xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.period-ms"); - xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.decay-factor"); - xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.thresholds"); - xmlPropsToSkipCompare.add( - "ipc.[port_number].decay-scheduler.backoff.responsetime.enable"); - xmlPropsToSkipCompare.add( - "ipc.[port_number].decay-scheduler.backoff.responsetime.thresholds"); - xmlPropsToSkipCompare.add( - "ipc.[port_number].decay-scheduler.metrics.top.user.count"); - xmlPropsToSkipCompare.add( - "ipc.[port_number].decay-scheduler.service-users"); - xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockshared"); - xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockexclusive"); - xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.handler"); - xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockfree"); - xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.response"); + addFairCallQueueSettings(); // Deprecated properties. These should eventually be removed from the // class. @@ -243,6 +204,57 @@ public void initializeMemberVariables() { xmlPropsToSkipCompare.add("hadoop.http.sni.host.check.enabled"); + addFtpSettings(); + } + + private void addAzureSettings() { + // Azure properties are in a different class + // - org.apache.hadoop.fs.azure.AzureNativeFileSystemStore + // - org.apache.hadoop.fs.azure.SASKeyGeneratorImpl + xmlPropsToSkipCompare.add("fs.azure.sas.expiry.period"); + xmlPropsToSkipCompare.add("fs.azure.local.sas.key.mode"); + xmlPropsToSkipCompare.add("fs.azure.secure.mode"); + xmlPropsToSkipCompare.add("fs.azure.authorization"); + xmlPropsToSkipCompare.add("fs.azure.authorization.caching.enable"); + xmlPropsToSkipCompare.add("fs.azure.saskey.usecontainersaskeyforallaccess"); + xmlPropsToSkipCompare.add("fs.azure.user.agent.prefix"); + } + + private void addFairCallQueueSettings() { + // FairCallQueue configs that includes dynamic ports in its keys + xmlPropsToSkipCompare.add("ipc.[port_number].backoff.enable"); + xmlPropsToSkipCompare.add("ipc.backoff.enable"); + xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.impl"); + xmlPropsToSkipCompare.add("ipc.callqueue.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].scheduler.impl"); + xmlPropsToSkipCompare.add("ipc.scheduler.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].scheduler.priority.levels"); + xmlPropsToSkipCompare.add("ipc.[port_number].callqueue.capacity.weights"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].faircallqueue.multiplexer.weights"); + xmlPropsToSkipCompare.add("ipc.[port_number].identity-provider.impl"); + xmlPropsToSkipCompare.add("ipc.identity-provider.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].cost-provider.impl"); + xmlPropsToSkipCompare.add("ipc.cost-provider.impl"); + xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.period-ms"); + xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.decay-factor"); + xmlPropsToSkipCompare.add("ipc.[port_number].decay-scheduler.thresholds"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.backoff.responsetime.enable"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.backoff.responsetime.thresholds"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.metrics.top.user.count"); + xmlPropsToSkipCompare.add( + "ipc.[port_number].decay-scheduler.service-users"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockshared"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockexclusive"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.handler"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.lockfree"); + xmlPropsToSkipCompare.add("ipc.[port_number].weighted-cost.response"); + } + + private void addFtpSettings() { // skip all FTP props (some were already skipped above) xmlPropsToSkipCompare.add("fs.AbstractFileSystem.ftp.impl"); xmlPropsToSkipCompare.add("fs.ftp.impl"); diff --git a/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index 7a93b34766107..a3fbe97bd859b 100644 --- a/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -134,7 +134,7 @@ public void initialize(URI uri, Configuration conf) throws IOException { // get /** * Connect to the FTP server using configuration parameters * - * + * * @return An FTPClient instance * @throws IOException */ @@ -241,7 +241,7 @@ void setDataConnectionMode(FTPClient client, Configuration conf) /** * Logout and disconnect the given FTPClient. * - * + * * @param client * @throws IOException */ @@ -260,8 +260,8 @@ private void disconnect(FTPClient client) throws IOException { } /** - * Resolve against given working directory. * - * + * Resolve against given working directory. + * * @param workDir * @param path * @return @@ -330,7 +330,7 @@ public FSDataOutputStream create(Path file, FsPermission permission, throw new FileAlreadyExistsException("File already exists: " + file); } } - + Path parent = absolute.getParent(); if (parent == null || !mkdirs(client, parent, FsPermission.getDirDefault())) { parent = (parent == null) ? new Path("/") : parent; @@ -381,7 +381,7 @@ public FSDataOutputStream append(Path f, int bufferSize, throw new UnsupportedOperationException("Append is not supported " + "by FTPFileSystem"); } - + /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs @@ -548,8 +548,8 @@ private FileStatus getFileStatus(FTPClient client, Path file) } /** - * Convert the file information in FTPFile to a {@link FileStatus} object. * - * + * Convert the file information in FTPFile to a {@link FileStatus} object. + * * @param ftpFile * @param parentPath * @return FileStatus @@ -660,7 +660,7 @@ private boolean isParentOf(Path parent, Path child) { * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. - * + * * @param client * @param src * @param dst diff --git a/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java index b522102e540a4..00ec34b50afad 100644 --- a/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java +++ b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java @@ -26,14 +26,14 @@ import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.util.DataChecksum; -/** +/** * This class contains constants for configuration keys used * in the ftp file system. * - * Note that the settings for unimplemented features are ignored. + * Note that the settings for unimplemented features are ignored. * E.g. checksum related settings are just place holders. Even when * wrapped with {@link ChecksumFileSystem}, these settings are not - * used. + * used. */ @InterfaceAudience.Private @InterfaceStability.Unstable @@ -42,10 +42,10 @@ public class FtpConfigKeys extends CommonConfigurationKeys { public static final long BLOCK_SIZE_DEFAULT = 4*1024; public static final String REPLICATION_KEY = "ftp.replication"; public static final short REPLICATION_DEFAULT = 1; - public static final String STREAM_BUFFER_SIZE_KEY = + public static final String STREAM_BUFFER_SIZE_KEY = "ftp.stream-buffer-size"; public static final int STREAM_BUFFER_SIZE_DEFAULT = 1024*1024; - public static final String BYTES_PER_CHECKSUM_KEY = + public static final String BYTES_PER_CHECKSUM_KEY = "ftp.bytes-per-checksum"; public static final int BYTES_PER_CHECKSUM_DEFAULT = 512; public static final String CLIENT_WRITE_PACKET_SIZE_KEY = @@ -56,7 +56,7 @@ public class FtpConfigKeys extends CommonConfigurationKeys { public static final DataChecksum.Type CHECKSUM_TYPE_DEFAULT = DataChecksum.Type.CRC32; public static final String KEY_PROVIDER_URI_DEFAULT = ""; - + protected static FsServerDefaults getServerDefaults() throws IOException { return new FsServerDefaults( BLOCK_SIZE_DEFAULT, @@ -70,4 +70,3 @@ protected static FsServerDefaults getServerDefaults() throws IOException { KEY_PROVIDER_URI_DEFAULT); } } - diff --git a/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java index 6d54e36f67414..ee1b93e5f9b7a 100644 --- a/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java +++ b/hadoop-tools/hadoop-ftp/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java @@ -41,22 +41,22 @@ public class FtpFs extends DelegateToFileSystem { /** * This constructor has the signature needed by * {@link AbstractFileSystem#createFileSystem(URI, Configuration)}. - * + * * @param theUri which must be that of localFs * @param conf * @throws IOException - * @throws URISyntaxException + * @throws URISyntaxException */ FtpFs(final URI theUri, final Configuration conf) throws IOException, URISyntaxException { super(theUri, new FTPFileSystem(), conf, FsConstants.FTP_SCHEME, true); } - + @Override public int getUriDefaultPort() { return FTP.DEFAULT_PORT; } - + @Override @Deprecated public FsServerDefaults getServerDefaults() throws IOException { From fbe70093a567cc86a02f5c42815eedefa454a746 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Fri, 5 Apr 2024 23:17:38 +0200 Subject: [PATCH 7/8] build issue --- .../dev-support/findbugs-exclude.xml | 19 +++++++++++++++++++ hadoop-tools/hadoop-ftp/pom.xml | 5 ----- 2 files changed, 19 insertions(+), 5 deletions(-) create mode 100644 hadoop-tools/hadoop-ftp/dev-support/findbugs-exclude.xml diff --git a/hadoop-tools/hadoop-ftp/dev-support/findbugs-exclude.xml b/hadoop-tools/hadoop-ftp/dev-support/findbugs-exclude.xml new file mode 100644 index 0000000000000..6122191f6b8db --- /dev/null +++ b/hadoop-tools/hadoop-ftp/dev-support/findbugs-exclude.xml @@ -0,0 +1,19 @@ + + + + diff --git a/hadoop-tools/hadoop-ftp/pom.xml b/hadoop-tools/hadoop-ftp/pom.xml index c5c142c53ad12..cbc05e09a34fa 100644 --- a/hadoop-tools/hadoop-ftp/pom.xml +++ b/hadoop-tools/hadoop-ftp/pom.xml @@ -81,11 +81,6 @@ reload4j test - - org.mockito - mockito-core - test - org.apache.sshd sshd-core From 7097202abde43d4322a3bed79af870f17431e31c Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sat, 6 Apr 2024 02:21:31 +0200 Subject: [PATCH 8/8] rename file --- .../dev-support/{findbugs-exclude.xml => findbugsExcludeFile.xml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename hadoop-tools/hadoop-ftp/dev-support/{findbugs-exclude.xml => findbugsExcludeFile.xml} (100%) diff --git a/hadoop-tools/hadoop-ftp/dev-support/findbugs-exclude.xml b/hadoop-tools/hadoop-ftp/dev-support/findbugsExcludeFile.xml similarity index 100% rename from hadoop-tools/hadoop-ftp/dev-support/findbugs-exclude.xml rename to hadoop-tools/hadoop-ftp/dev-support/findbugsExcludeFile.xml