Permalink
Browse files

Merge trunk into auto-HA branch

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-3042@1340622 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
2 parents 3f8885c + be03127 commit dc7603f49414f6d06eafdb28432b225ce411d927 @toddlipcon toddlipcon committed May 20, 2012
Showing with 959 additions and 258 deletions.
  1. +13 −0 hadoop-common-project/hadoop-common/CHANGES.txt
  2. +74 −0 hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  3. +5 −0 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
  4. +25 −10 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
  5. +6 −2 ...on-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
  6. +1 −1 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/empty-configuration.xml
  7. +73 −0 ...op-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java
  8. +28 −0 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
  9. +1 −1 ...rib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperEditLogInputStream.java
  10. +1 −1 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_impls_truncate.c
  11. +4 −0 ...roject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolPB.java
  12. +16 −0 ...-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolServerSideTranslatorPB.java
  13. +11 −0 ...ect/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java
  14. +16 −4 ...ect/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/InvalidateBlocks.java
  15. +2 −1 ...hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java
  16. +12 −9 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
  17. +1 −1 ...ct/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogBackupInputStream.java
  18. +1 −1 ...ject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java
  19. +27 −1 ...dfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
  20. +8 −2 ...op-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
  21. +29 −20 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
  22. +4 −1 ...-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
  23. +2 −22 ...p-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
  24. +16 −8 ...dfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
  25. +3 −3 ...-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
  26. +9 −4 ...s-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
  27. +6 −0 ...s-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
  28. +8 −25 ...roject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SaveNamespaceContext.java
  29. +30 −0 ...-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamLimiter.java
  30. +20 −70 ...project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java
  31. +36 −16 ...ject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java
  32. +7 −1 ...fs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocol.java
  33. +22 −2 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetGroups.java
  34. +51 −0 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Canceler.java
  35. +19 −8 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightHashSet.java
  36. +1 −1 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightLinkedSet.java
  37. +17 −1 hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
  38. +1 −1 ...op-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
  39. +3 −3 ...s-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
  40. +1 −1 ...op-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java
  41. +45 −0 ...project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSEditLogLoader.java
  42. +37 −3 ...project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java
  43. +5 −3 ...s-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSaveNamespace.java
  44. +14 −12 ...ect/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestBootstrapStandby.java
  45. +36 −4 ...t/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyCheckpoints.java
  46. +9 −7 ...project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyIsHot.java
  47. +43 −0 ...op-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestLightWeightHashSet.java
  48. +5 −0 hadoop-mapreduce-project/CHANGES.txt
  49. +5 −0 hadoop-mapreduce-project/pom.xml
  50. +5 −0 hadoop-project/pom.xml
  51. +131 −0 hadoop-tools/hadoop-gridmix/pom.xml
  52. 0 ...hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/AvgRecordFactory.java
  53. +1 −2 ...adoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/ClusterSummarizer.java
  54. 0 ...ools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java
  55. 0 ...ools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/DistributedCacheEmulator.java
  56. 0 ...hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/EchoUserResolver.java
  57. 0 ...oop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java
  58. 0 ...x/src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/FilePool.java
  59. 0 .../src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/FileQueue.java
  60. 0 ...c → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/GenerateData.java
  61. 0 ...p-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/GenerateDistCacheData.java
  62. +2 −2 ...ix/src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/Gridmix.java
  63. 0 ...src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/GridmixJob.java
  64. 0 ...ls/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java
  65. 0 ...src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/GridmixKey.java
  66. 0 ... → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/GridmixRecord.java
  67. 0 ...c → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/GridmixSplit.java
  68. 0 ...c → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/InputStriper.java
  69. 0 ...ols/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/IntermediateRecordFactory.java
  70. 0 ...src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/JobCreator.java
  71. 0 ...src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/JobFactory.java
  72. 0 ...src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java
  73. 0 ...c → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java
  74. 0 ...ix/src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/LoadJob.java
  75. 0 .../src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/LoadSplit.java
  76. 0 ...rc → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/Progressive.java
  77. 0 ... → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/PseudoLocalFs.java
  78. 0 ...hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/RandomAlgorithms.java
  79. 0 ...tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/RandomTextDataGenerator.java
  80. 0 ...adoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/ReadRecordFactory.java
  81. 0 ... → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/RecordFactory.java
  82. 0 ...hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/ReplayJobFactory.java
  83. 0 ...-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/RoundRobinUserResolver.java
  84. 0 ...hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/SerialJobFactory.java
  85. 0 ...x/src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/SleepJob.java
  86. 0 ...c → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/StatListener.java
  87. 0 ...src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/Statistics.java
  88. 0 ...hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/StressJobFactory.java
  89. 0 ...p-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/SubmitterUserResolver.java
  90. 0 ...src → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/Summarizer.java
  91. 0 ...c → hadoop-tools/hadoop-gridmix/src/main}/java/org/apache/hadoop/mapred/gridmix/UserResolver.java
  92. 0 ...va/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/CumulativeCpuUsageEmulatorPlugin.java
  93. 0 ...n}/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
  94. 0 ...src/main}/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageMatcher.java
  95. 0 ...}/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java
  96. 0 ... hadoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/DebugJobFactory.java
  97. 0 ...hadoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java
  98. +5 −4 ...hadoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/GridmixTestUtils.java
  99. 0 ...hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationUtils.java
  100. 0 ...t → hadoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestFilePool.java
  101. 0 ... → hadoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestFileQueue.java
  102. 0 ...ls/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java
  103. 0 ...adoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestGridmixRecord.java
  104. 0 ...doop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestGridmixSummary.java
  105. 0 ...→ hadoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestHighRamJob.java
  106. 0 ...adoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestPseudoLocalFs.java
  107. 0 ...oop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestRandomAlgorithm.java
  108. 0 ...s/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestRandomTextDataGenerator.java
  109. 0 ...adoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestRecordFactory.java
  110. 0 ...ls/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestResourceUsageEmulators.java
  111. 0 ... hadoop-tools/hadoop-gridmix/src/test/java}/org/apache/hadoop/mapred/gridmix/TestUserResolve.java
  112. BIN ...contrib/gridmix/src/test → hadoop-tools/hadoop-gridmix/src/test/resources}/data/wordcount.json.gz
  113. +5 −0 hadoop-tools/hadoop-tools-dist/pom.xml
  114. +1 −0 hadoop-tools/pom.xml
@@ -67,6 +67,9 @@ Trunk (unreleased changes)
HADOOP-8297. Writable javadocs don't carry default constructor (harsh)
+ HADOOP-8360. empty-configuration.xml fails xml validation
+ (Radim Kolar via harsh)
+
BUG FIXES
HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -175,6 +178,16 @@ Release 2.0.1-alpha - UNRELEASED
HADOOP-8400. All commands warn "Kerberos krb5 configuration not found" when security is not enabled. (tucu)
+ HADOOP-8406. CompressionCodecFactory.CODEC_PROVIDERS iteration is
+ thread-unsafe (todd)
+
+ HADOOP-8287. etc/hadoop is missing hadoop-env.sh (eli)
+
+ HADOOP-8408. MR doesn't work with a non-default ViewFS mount table
+ and security enabled. (atm via eli)
+
+ HADOOP-8329. Build fails with Java 7. (eli)
+
Release 2.0.0-alpha - UNRELEASED
INCOMPATIBLE CHANGES
@@ -0,0 +1,74 @@
+# Copyright 2011 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME. All others are
+# optional. When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use.
+export JAVA_HOME=${JAVA_HOME}
+
+# The jsvc implementation to use. Jsvc is required to run secure datanodes.
+#export JSVC_HOME=${JSVC_HOME}
+
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
+
+# Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
+for f in $HADOOP_HOME/contrib/capacity-scheduler/*.jar; do
+ if [ "$HADOOP_CLASSPATH" ]; then
+ export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f
+ else
+ export HADOOP_CLASSPATH=$f
+ fi
+done
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+#export HADOOP_HEAPSIZE=
+#export HADOOP_NAMENODE_INIT_HEAPSIZE=""
+
+# Extra Java runtime options. Empty by default.
+export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
+export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
+
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_SECONDARYNAMENODE_OPTS"
+
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"
+#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
+
+# On secure datanodes, user to run the datanode as after dropping privileges
+export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
+
+# Where log files are stored. $HADOOP_HOME/logs by default.
+export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
+
+# Where log files are stored in the secure data environment.
+export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
+
+# The directory where pid files are stored. /tmp by default.
+export HADOOP_PID_DIR=${HADOOP_PID_DIR}
+export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
+
+# A string representing this instance of hadoop. $USER by default.
+export HADOOP_IDENT_STRING=$USER
@@ -233,6 +233,11 @@ public Path getTrashCanLocation(final Path f) throws FileNotFoundException {
fsState.resolve(getUriPath(f), true);
return res.isInternalDir() ? null : res.targetFileSystem.getHomeDirectory();
}
+
+ @Override
+ public String getCanonicalServiceName() {
+ return getUri().getHost();
+ }
@Override
public URI getUri() {
@@ -96,7 +96,7 @@
// The ServletContext attribute where the daemon Configuration
// gets stored.
public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
- static final String ADMINS_ACL = "admins.acl";
+ public static final String ADMINS_ACL = "admins.acl";
public static final String SPNEGO_FILTER = "SpnegoFilter";
public static final String BIND_ADDRESS = "bind.address";
@@ -792,7 +792,7 @@ public static boolean isInstrumentationAccessAllowed(
*
* @param servletContext
* @param request
- * @param response
+ * @param response used to send the error response if user does not have admin access.
* @return true if admin-authorized, false otherwise
* @throws IOException
*/
@@ -814,18 +814,33 @@ public static boolean hasAdministratorAccess(
"authorized to access this page.");
return false;
}
+
+ if (servletContext.getAttribute(ADMINS_ACL) != null &&
+ !userHasAdministratorAccess(servletContext, remoteUser)) {
+ response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
+ + remoteUser + " is unauthorized to access this page.");
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Get the admin ACLs from the given ServletContext and check if the given
+ * user is in the ACL.
+ *
+ * @param servletContext the context containing the admin ACL.
+ * @param remoteUser the remote user to check for.
+ * @return true if the user is present in the ACL, false if no ACL is set or
+ * the user is not present
+ */
+ public static boolean userHasAdministratorAccess(ServletContext servletContext,
+ String remoteUser) {
AccessControlList adminsAcl = (AccessControlList) servletContext
.getAttribute(ADMINS_ACL);
UserGroupInformation remoteUserUGI =
UserGroupInformation.createRemoteUser(remoteUser);
- if (adminsAcl != null) {
- if (!adminsAcl.isUserAllowed(remoteUserUGI)) {
- response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User "
- + remoteUser + " is unauthorized to access this page.");
- return false;
- }
- }
- return true;
+ return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI);
}
/**
@@ -109,8 +109,12 @@ public String toString() {
List<Class<? extends CompressionCodec>> result
= new ArrayList<Class<? extends CompressionCodec>>();
// Add codec classes discovered via service loading
- for (CompressionCodec codec : CODEC_PROVIDERS) {
- result.add(codec.getClass());
+ synchronized (CODEC_PROVIDERS) {
+ // CODEC_PROVIDERS is a lazy collection. Synchronize so it is
+ // thread-safe. See HADOOP-8406.
+ for (CompressionCodec codec : CODEC_PROVIDERS) {
+ result.add(codec.getClass());
+ }
}
// Add codec classes from configuration
String codecsString = conf.get("io.compression.codecs");
@@ -1,3 +1,4 @@
+<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@@ -14,7 +15,6 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
-<?xml version="1.0"?>
<configuration>
</configuration>
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.viewfs;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsConstants;
+import org.junit.Test;
+
+/**
+ * Test ViewFileSystem's support for having delegation tokens fetched and cached
+ * for the file system.
+ */
+public class TestViewFileSystemDelegationTokenSupport {
+
+ private static final String MOUNT_TABLE_NAME = "vfs-cluster";
+
+ /**
+ * Ensure that a canonical service name can be determined for ViewFileSystem
+ * instances configured with a non-default mount table name.
+ *
+ * Regression test for HADOOP-8408.
+ */
+ @Test
+ public void testGetCanonicalServiceNameWithNonDefaultMountTable()
+ throws URISyntaxException, IOException {
+
+ Configuration conf = new Configuration();
+ ConfigUtil.addLink(conf, MOUNT_TABLE_NAME, "/user", new URI("file:///"));
+
+ FileSystem viewFs = FileSystem.get(new URI(FsConstants.VIEWFS_SCHEME +
+ "://" + MOUNT_TABLE_NAME), conf);
+
+ String serviceName = viewFs.getCanonicalServiceName();
+ assertNotNull(serviceName);
+ assertEquals(MOUNT_TABLE_NAME, serviceName);
+ }
+
+ @Test
+ public void testGetCanonicalServiceNameWithDefaultMountTable()
+ throws URISyntaxException, IOException {
+
+ Configuration conf = new Configuration();
+ ConfigUtil.addLink(conf, "/user", new URI("file:///"));
+
+ FileSystem viewFs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
+
+ String serviceName = viewFs.getCanonicalServiceName();
+ assertNull(serviceName);
+ }
+
+}
@@ -187,6 +187,12 @@ Release 2.0.1-alpha - UNRELEASED
HDFS-3419. Cleanup LocatedBlock. (eli)
+ HDFS-3440. More effectively limit stream memory consumption when reading
+ corrupt edit logs (Colin Patrick McCabe via todd)
+
+ HDFS-3438. BootstrapStandby should not require a rollEdits on active node
+ (todd)
+
OPTIMIZATIONS
BUG FIXES
@@ -206,6 +212,25 @@ Release 2.0.1-alpha - UNRELEASED
HDFS-3413. TestFailureToReadEdits timing out. (atm)
+ HDFS-3422. TestStandbyIsHot timeouts too aggressive (todd)
+
+ HDFS-3433. GetImageServlet should allow administrative requestors when
+ security is enabled. (atm)
+
+ HDFS-1153. dfsnodelist.jsp should handle invalid input parameters.
+ (Ravi Phulari via eli)
+
+ HDFS-3434. InvalidProtocolBufferException when visiting DN
+ browseDirectory.jsp (eli)
+
+ HDFS-2800. Fix cancellation of checkpoints in the standby node to be more
+ reliable. (todd)
+
+ HDFS-3391. Fix InvalidateBlocks to compare blocks including their
+ generation stamps. (todd)
+
+ HDFS-3444. hdfs groups command doesn't work with security enabled. (atm)
+
Release 2.0.0-alpha - UNRELEASED
INCOMPATIBLE CHANGES
@@ -697,6 +722,9 @@ Release 2.0.0-alpha - UNRELEASED
HDFS-3026. HA: Handle failure during HA state transition. (atm)
+ HDFS-860. fuse-dfs truncate behavior causes issues with scp.
+ (Brian Bockelman via eli)
+
BREAKDOWN OF HDFS-1623 SUBTASKS
HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)
@@ -75,7 +75,7 @@
tracker = new FSEditLogLoader.PositionTrackingInputStream(bin);
DataInputStream in = new DataInputStream(tracker);
- reader = new FSEditLogOp.Reader(in, logVersion);
+ reader = new FSEditLogOp.Reader(in, tracker, logVersion);
}
@Override
@@ -37,7 +37,7 @@ int dfs_truncate(const char *path, off_t size)
assert(dfs);
if (size != 0) {
- return -ENOTSUP;
+ return 0;
}
int ret = dfs_unlink(path);
@@ -20,9 +20,13 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.security.KerberosInfo;
+@KerberosInfo(
+ serverPrincipal=CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
@ProtocolInfo(
protocolName = "org.apache.hadoop.tools.GetUserMappingsProtocol",
protocolVersion = 1)
@@ -32,6 +32,8 @@
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetBlocksResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetEditLogManifestRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetEditLogManifestResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetMostRecentCheckpointTxIdRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetMostRecentCheckpointTxIdResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetTransactionIdRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetTransactionIdResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.RegisterRequestProto;
@@ -104,6 +106,20 @@ public GetTransactionIdResponseProto getTransactionId(RpcController unused,
}
return GetTransactionIdResponseProto.newBuilder().setTxId(txid).build();
}
+
+ @Override
+ public GetMostRecentCheckpointTxIdResponseProto getMostRecentCheckpointTxId(
+ RpcController unused, GetMostRecentCheckpointTxIdRequestProto request)
+ throws ServiceException {
+ long txid;
+ try {
+ txid = impl.getMostRecentCheckpointTxId();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ return GetMostRecentCheckpointTxIdResponseProto.newBuilder().setTxId(txid).build();
+ }
+
@Override
public RollEditLogResponseProto rollEditLog(RpcController unused,
Oops, something went wrong.

0 comments on commit dc7603f

Please sign in to comment.