Permalink
Browse files

Merge r1346682 through r1354801 from trunk.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-3092@1354832 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
2 parents c78ed5c + 6883354 commit 6e42a23f8485d9fc4b52d1e45b88217d7accd06a Tsz-wo Sze committed Jun 28, 2012
Showing with 8,380 additions and 4,851 deletions.
  1. +2 −2 dev-support/test-patch.sh
  2. +60 −0 hadoop-assemblies/src/main/resources/assemblies/hadoop-raid-dist.xml
  3. +67 −0 hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
  4. +18 −0 ...-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceAudience.java
  5. +12 −1 ...project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceStability.java
  6. +1 −0 .../hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java
  7. +38 −33 ...oop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
  8. +28 −0 ...op-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationHandler.java
  9. +5 −5 ...doop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
  10. +21 −0 ...src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
  11. +21 −0 ...h/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
  12. +5 −1 ...oop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java
  13. +154 −38 ...auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
  14. +81 −40 hadoop-common-project/hadoop-common/CHANGES.txt
  15. +18 −85 hadoop-common-project/hadoop-common/pom.xml
  16. +126 −0 hadoop-common-project/hadoop-common/src/CMakeLists.txt
  17. +10 −0 hadoop-common-project/hadoop-common/src/config.h.cmake
  18. +1 −1 hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
  19. +32 −0 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  20. +3 −1 ...ommon-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
  21. +2 −3 ...doop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs}/DelegationTokenRenewer.java
  22. +8 −8 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
  23. +4 −4 ...-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
  24. +20 −0 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
  25. +33 −0 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
  26. +28 −4 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
  27. +8 −9 ...common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
  28. +230 −12 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
  29. +6 −0 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicy.java
  30. +7 −4 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java
  31. +83 −39 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
  32. +19 −9 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
  33. +11 −8 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
  34. +3 −2 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java
  35. +3 −1 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
  36. +7 −1 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
  37. +142 −29 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
  38. +398 −0 ...ommon-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
  39. +20 −4 .../src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
  40. +11 −2 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
  41. +0 −42 hadoop-common-project/hadoop-common/src/main/native/.autom4te.cfg
  42. +0 −66 hadoop-common-project/hadoop-common/src/main/native/Makefile.am
  43. +0 −28 hadoop-common-project/hadoop-common/src/main/native/acinclude.m4
  44. +0 −130 hadoop-common-project/hadoop-common/src/main/native/configure.ac
  45. +0 −47 hadoop-common-project/hadoop-common/src/main/native/lib/Makefile.am
  46. +1 −4 ...ommon-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c
  47. +1 −4 ...mon-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c
  48. +5 −31 ...project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
  49. +5 −31 ...oject/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
  50. +6 −35 ...n/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h
  51. +0 −53 ...p-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am
  52. +5 −27 ...mon-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
  53. +5 −27 ...n-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
  54. +6 −33 ...ommon/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
  55. +1 −3 hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
  56. +1 −3 hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
  57. +3 −14 hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h
  58. +1 −1 hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh
  59. +9 −3 hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
  60. +20 −0 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
  61. +13 −1 ...mmon-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
  62. +43 −1 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java
  63. +9 −9 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
  64. +18 −12 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
  65. +43 −0 .../hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
  66. +38 −15 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java
  67. +86 −2 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestJarFinder.java
  68. +6 −0 hadoop-dist/pom.xml
  69. +82 −67 ...s-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
  70. +3 −4 ...doop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
  71. +6 −0 ...ct/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
  72. +398 −0 ...t/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
  73. +0 −551 ...-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParams.java
  74. +325 −359 ...-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
  75. +3 −4 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
  76. +2 −2 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
  77. +2 −2 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
  78. +2 −2 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java
  79. +2 −2 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java
  80. +15 −8 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java
  81. +27 −28 ...apache/hadoop/lib/wsrs/TestEnumParam.java → main/java/org/apache/hadoop/lib/wsrs/Parameters.java}
  82. +107 −0 ...-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
  83. +2 −2 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
  84. +9 −13 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java
  85. +1 −0 ...oject/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
  86. +9 −9 ...-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java
  87. +0 −50 ...op-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java
  88. +0 −53 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java
  89. +0 −52 ...op-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java
  90. +0 −47 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java
  91. +120 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestParam.java
  92. +0 −53 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java
  93. +0 −64 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java
  94. +22 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/httpfs-log4j.properties
  95. +170 −0 hadoop-hdfs-project/hadoop-hdfs-raid/pom.xml
  96. 0 ...mapreduce-project/src/contrib/raid → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/conf/raid.xml
  97. 0 ...dfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/hdfs/DistributedRaidFileSystem.java
  98. 0 ...src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/hdfs/RaidDFSUtil.java
  99. 0 ...s-raid/src/main}/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyRaid.java
  100. +171 −123 ...oject/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java
  101. +1 −1 ...ject/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java
  102. +13 −4 .../src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/BlockFixer.java
  103. 0 ...c → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/ConfigManager.java
  104. +2 −2 ...aid/src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/Decoder.java
  105. +12 −4 ...adoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/DirectoryTraversal.java
  106. 0 ... → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/DistBlockFixer.java
  107. +1 −0 ...id/src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/DistRaid.java
  108. +4 −4 ...rc → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/DistRaidNode.java
  109. +0 −1 ...aid/src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/Encoder.java
  110. 0 ...src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/ErasureCode.java
  111. +46 −1 ...src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/GaloisField.java
  112. 0 ...id/src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/HarIndex.java
  113. +14 −1 .../src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/JobMonitor.java
  114. 0 ...→ hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/LocalBlockFixer.java
  115. 0 ...c → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/LocalRaidNode.java
  116. 0 ...hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/ParityInputStream.java
  117. +1 −1 ...fs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/RaidConfigurationException.java
  118. 0 .../src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/RaidFilter.java
  119. +9 −2 ...id/src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/RaidNode.java
  120. +17 −3 ...d/src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/RaidShell.java
  121. 0 ...d/src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/RaidUtils.java
  122. +77 −0 ...→ hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/ReedSolomonCode.java
  123. 0 ...adoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/ReedSolomonDecoder.java
  124. 0 ...adoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/ReedSolomonEncoder.java
  125. 0 .../src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/XORDecoder.java
  126. 0 .../src → hadoop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/XOREncoder.java
  127. 0 ...doop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/protocol/PolicyInfo.java
  128. 0 ...doop-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/protocol/PolicyList.java
  129. 0 ...op-hdfs-project/hadoop-hdfs-raid/src/main}/java/org/apache/hadoop/raid/protocol/RaidProtocol.java
  130. 0 ...c/contrib/raid/bin → hadoop-hdfs-project/hadoop-hdfs-raid/src/main/sbin}/start-raidnode-remote.sh
  131. 0 ...ject/src/contrib/raid/bin → hadoop-hdfs-project/hadoop-hdfs-raid/src/main/sbin}/start-raidnode.sh
  132. 0 ...rc/contrib/raid/bin → hadoop-hdfs-project/hadoop-hdfs-raid/src/main/sbin}/stop-raidnode-remote.sh
  133. 0 ...oject/src/contrib/raid/bin → hadoop-hdfs-project/hadoop-hdfs-raid/src/main/sbin}/stop-raidnode.sh
  134. +3 −2 ...est → hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/hdfs/TestRaidDfs.java
  135. 0 ...id/src/test/java}/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockPlacementPolicyRaid.java
  136. 0 .../hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidTestUtil.java
  137. +30 −4 ... → hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestBlockFixer.java
  138. 0 ...fs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestBlockFixerBlockFixDist.java
  139. 0 ...project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestBlockFixerDistConcurrency.java
  140. 0 ...ject/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestBlockFixerGeneratedBlockDist.java
  141. 0 ...ject/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestBlockFixerParityBlockFixDist.java
  142. +2 −2 ...p-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestDirectoryTraversal.java
  143. +51 −0 ... hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestErasureCodes.java
  144. 0 ...→ hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestGaloisField.java
  145. 0 ...adoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestHarIndexParser.java
  146. +1 −1 ... → hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestRaidFilter.java
  147. +20 −2 ...est → hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestRaidHar.java
  148. +68 −32 ...st → hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestRaidNode.java
  149. +22 −12 ...t → hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestRaidPurge.java
  150. +3 −2 ...t → hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestRaidShell.java
  151. +4 −4 ...hadoop-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestRaidShellFsck.java
  152. +1 −1 ...p-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestReedSolomonDecoder.java
  153. +1 −1 ...p-hdfs-project/hadoop-hdfs-raid/src/test/java}/org/apache/hadoop/raid/TestReedSolomonEncoder.java
  154. +112 −31 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
  155. +17 −80 hadoop-hdfs-project/hadoop-hdfs/pom.xml
  156. +126 −0 hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
  157. +6 −0 hadoop-hdfs-project/hadoop-hdfs/src/config.h.cmake
  158. +53 −1 ...contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java
  159. +160 −0 ...trib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/TestBookKeeperConfiguration.java
  160. +0 −27 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/Makefile.am
  161. +0 −270 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/acinclude.m4
  162. +0 −82 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/configure.ac
  163. +0 −164 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/pom.xml
  164. +73 −0 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/CMakeLists.txt
  165. +0 −22 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/Makefile.am
  166. +2 −6 hadoop-hdfs-project/hadoop-hdfs/src/contrib/fuse-dfs/src/fuse_dfs.h
  167. +1 −1 hadoop-hdfs-project/hadoop-hdfs/src/main/docs/src/documentation/content/xdocs/webhdfs.xml
  168. +6 −2 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
  169. +1 −1 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
  170. +1 −1 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
  171. +107 −6 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java
  172. +87 −0 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/HdfsUtils.java
  173. +2 −2 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
  174. +3 −2 ...fs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfo.java
  175. +12 −40 ...project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
  176. +77 −0 ...hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java
  177. +26 −13 ...hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault.java
  178. +25 −3 ...ject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
  179. +12 −0 ...roject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/Host2NodesMap.java
  180. +16 −1 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Util.java
  181. +16 −0 ...fs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
  182. +4 −0 ...fs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
  183. +2 −1 ...ject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java
  184. +2 −2 ...ect/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java
  185. +1 −1 ...op-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
  186. +0 −12 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
  187. +8 −1 ...dfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
  188. +5 −2 ...op-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
  189. +253 −33 ...p-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
  190. +1 −8 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java
  191. +4 −3 .../hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFileUnderConstruction.java
  192. +9 −6 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
  193. +3 −17 ...-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
  194. +1 −1 ...p-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
  195. +276 −0 ...hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
  196. +2 −17 ...s-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
  197. +5 −0 ...-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamLimiter.java
  198. +7 −1 ...dfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java
  199. +35 −14 ...fs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
  200. +6 −1 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
  201. +31 −16 ...-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsBinaryLoader.java
  202. +6 −6 ...hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsLoader.java
  203. +53 −18 ...hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsViewer.java
  204. +6 −10 ...oop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
  205. +7 −5 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java
  206. +21 −5 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
  207. +4 −0 ...roject/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/InetSocketAddressParam.java
  208. +2 −2 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java
  209. +0 −42 hadoop-hdfs-project/hadoop-hdfs/src/main/native/Makefile.am
  210. +0 −125 hadoop-hdfs-project/hadoop-hdfs/src/main/native/configure.ac
  211. +0 −41 hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apfunctions.m4
  212. +0 −142 hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apjava.m4
  213. +0 −168 hadoop-hdfs-project/hadoop-hdfs/src/main/native/m4/apsupport.m4
  214. +52 −29 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
  215. +175 −10 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java
  216. +2 −2 ...hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileLengthOnClusterRestart.java
  217. +68 −0 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestParallelLocalRead.java
  218. +10 −15 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPersistBlocks.java
  219. +55 −4 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
  220. +75 −0 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestSafeMode.java
  221. +6 −9 ...ect/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java
  222. +48 −2 ...adoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicy.java
  223. +9 −3 ...roject/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockReplacement.java
  224. +2 −2 ...hdfs/server/datanode/{TestMulitipleNNDataBlockScanner.java → TestMultipleNNDataBlockScanner.java}
  225. +1 −1 ...dfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NameNodeAdapter.java
  226. +162 −0 ...-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java
  227. +127 −12 ...op-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java
  228. +10 −0 ...hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileOutputStream.java
  229. +52 −2 ...project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSEditLogLoader.java
  230. +16 −15 ...ject/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFileJournalManager.java
  231. +2 −1 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
  232. +5 −4 ...roject/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRecovery.java
  233. +3 −0 ...op-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartup.java
  234. +28 −2 ...doop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestEditLogsDuringFailover.java
  235. +12 −0 ...t/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestFailureOfSharedDir.java
  236. +140 −0 ...s/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java
  237. +54 −10 ...op-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TestOfflineEditsViewer.java
  238. +47 −0 ...s-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
  239. +8 −2 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java
  240. +165 −0 ...dfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
  241. +1 −1 hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
  242. +1 −1 hadoop-hdfs-project/pom.xml
  243. +55 −1 hadoop-mapreduce-project/CHANGES.txt
  244. +6 −0 ...hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/TaskAttempt.java
  245. +5 −1 ...mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobEventType.java
  246. +40 −0 ...e-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobUpdatedNodesEvent.java
  247. +37 −0 ...e-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/TaskAttemptKillEvent.java
  248. +95 −15 ...adoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
  249. +65 −13 ...preduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
  250. +56 −11 ...doop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java
  251. +85 −28 ...apreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
  252. +0 −3 ...op-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersPage.java
  253. +0 −3 .../hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobPage.java
  254. +0 −4 ...hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
  255. +6 −0 ...client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
  256. +178 −4 ...lient/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
  257. +147 −27 ...preduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
  258. +6 −0 ...-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
  259. +51 −8 ...-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java
  260. +12 −6 ...lient/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
  261. +20 −16 ...op-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
  262. +2 −3 ...t/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
  263. +1 −80 ...-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
  264. +18 −8 ...nt/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java
  265. +5 −2 ...-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
  266. +23 −15 ...mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleScheduler.java
  267. +67 −0 ...educe-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestShuffleScheduler.java
  268. +6 −0 ...oop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
  269. +66 −1 ...op-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java
  270. +6 −2 ...oject/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeState.java
  271. +4 −7 ...edshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
  272. +4 −13 ...s-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
  273. +3 −1 ...ct/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java
  274. +23 −41 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
  275. +69 −0 ...apreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt
  276. +6 −0 ...apreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/config.h.cmake
  277. +0 −3 ...r-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java
  278. +0 −3 ...ver-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java
  279. +0 −3 ...n-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java
  280. +0 −42 ...adoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.autom4te.cfg
  281. +0 −1 ...ver/hadoop-yarn-server-nodemanager/src/main/native/container-executor/.deps/container-executor.Po
  282. +0 −32 .../hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/Makefile.am
  283. +0 −54 ...hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/configure.ac
  284. +2 −3 .../hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c
  285. +3 −3 ...-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
  286. +14 −14 .../apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
  287. +15 −3 ...ger/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
  288. +1 −1 ...r/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java
  289. +0 −2 ...r-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppPage.java
  290. +16 −16 .../java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java
  291. +6 −6 ...src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java
  292. +17 −10 ...c/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java
  293. +2 −2 ...est/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySched.java
  294. +2 −2 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/CapacityScheduler.apt.vm
  295. +152 −1 hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/HDFSHighAvailability.apt.vm
  296. +0 −42 hadoop-mapreduce-project/src/c++/pipes/.autom4te.cfg
  297. +0 −31 hadoop-mapreduce-project/src/c++/pipes/Makefile.am
  298. +0 −57 hadoop-mapreduce-project/src/c++/pipes/configure.ac
  299. +0 −42 hadoop-mapreduce-project/src/c++/utils/.autom4te.cfg
  300. +0 −33 hadoop-mapreduce-project/src/c++/utils/Makefile.am
Sorry, we could not display the entire diff because too many files (353) changed.
View
4 dev-support/test-patch.sh
@@ -418,8 +418,8 @@ checkJavacWarnings () {
echo "======================================================================"
echo ""
echo ""
- echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
- $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+ echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Pnative -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
+ $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Pnative -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
if [[ $? != 0 ]] ; then
JIRA_COMMENT="$JIRA_COMMENT
View
60 hadoop-assemblies/src/main/resources/assemblies/hadoop-raid-dist.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly>
+ <id>hadoop-raid-dist</id>
+ <formats>
+ <format>dir</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <!-- Configuration files -->
+ <fileSet>
+ <directory>${basedir}/src/main/conf</directory>
+ <outputDirectory>/etc/hadoop</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/sbin</directory>
+ <outputDirectory>/sbin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/libexec</directory>
+ <outputDirectory>/libexec</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Documentation -->
+ <fileSet>
+ <directory>${project.build.directory}/site</directory>
+ <outputDirectory>/share/doc/hadoop/raid</outputDirectory>
+ </fileSet>
+ </fileSets>
+ <dependencySets>
+ <dependencySet>
+ <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
+ <unpack>false</unpack>
+ <scope>runtime</scope>
+ <useProjectArtifact>true</useProjectArtifact>
+ </dependencySet>
+ </dependencySets>
+</assembly>
View
67 hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
@@ -0,0 +1,67 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+ <id>hadoop-tools</id>
+ <formats>
+ <format>dir</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <fileSet>
+ <directory>../hadoop-pipes/src/main/native/pipes/api/hadoop</directory>
+ <includes>
+ <include>*.hh</include>
+ </includes>
+ <outputDirectory>/include</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>../hadoop-pipes/src/main/native/utils/api/hadoop</directory>
+ <includes>
+ <include>*.hh</include>
+ </includes>
+ <outputDirectory>/include</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>../hadoop-pipes/target/native</directory>
+ <includes>
+ <include>*.a</include>
+ </includes>
+ <outputDirectory>lib/native</outputDirectory>
+ </fileSet>
+ </fileSets>
+ <dependencySets>
+ <dependencySet>
+ <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
+ <unpack>false</unpack>
+ <scope>runtime</scope>
+ <useProjectArtifact>false</useProjectArtifact>
+ <!-- Exclude hadoop artifacts. They will be found via HADOOP* env -->
+ <excludes>
+ <exclude>org.apache.hadoop:hadoop-common</exclude>
+ <exclude>org.apache.hadoop:hadoop-hdfs</exclude>
+ <exclude>org.apache.hadoop:hadoop-mapreduce</exclude>
+ <!-- pipes is native stuff, this just keeps pom from being package-->
+ <exclude>org.apache.hadoop:hadoop-pipes</exclude>
+ <!-- use slf4j from common to avoid multiple binding warnings -->
+ <exclude>org.slf4j:slf4j-api</exclude>
+ <exclude>org.slf4j:slf4j-log4j12</exclude>
+ </excludes>
+ </dependencySet>
+ </dependencySets>
+</assembly>
View
18 .../hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceAudience.java
@@ -21,6 +21,24 @@
/**
* Annotation to inform users of a package, class or method's intended audience.
+ * Currently the audience can be {@link Public}, {@link LimitedPrivate} or
+ * {@link Private}. <br>
+ * All public classes must have InterfaceAudience annotation. <br>
+ * <ul>
+ * <li>Public classes that are not marked with this annotation must be
+ * considered by default as {@link Private}.</li>
+ *
+ * <li>External applications must only use classes that are marked
+ * {@link Public}. Avoid using non public classes as these classes
+ * could be removed or change in incompatible ways.</li>
+ *
+ * <li>Hadoop projects must only use classes that are marked
+ * {@link LimitedPrivate} or {@link Public}</li>
+ *
+ * <li> Methods may have a different annotation that it is more restrictive
+ * compared to the audience classification of the class. Example: A class
+ * might be {@link Public}, but a method may be {@link LimitedPrivate}
+ * </li></ul>
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
View
13 ...hadoop-annotations/src/main/java/org/apache/hadoop/classification/InterfaceStability.java
@@ -19,9 +19,20 @@
import java.lang.annotation.Documented;
+import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+
/**
* Annotation to inform users of how much to rely on a particular package,
- * class or method not changing over time.
+ * class or method not changing over time. Currently the stability can be
+ * {@link Stable}, {@link Evolving} or {@link Unstable}. <br>
+ *
+ * <ul><li>All classes that are annotated with {@link Public} or
+ * {@link LimitedPrivate} must have InterfaceStability annotation. </li>
+ * <li>Classes that are {@link Private} are to be considered unstable unless
+ * a different InterfaceStability annotation states otherwise.</li>
+ * <li>Incompatible changes must not be made to classes marked as stable.</li>
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
View
1 ...auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java
@@ -266,6 +266,7 @@ public static void extractToken(HttpURLConnection conn, Token token) throws IOEx
}
}
} else {
+ token.set(null);
throw new AuthenticationException("Authentication failed, status: " + conn.getResponseCode() +
", message: " + conn.getResponseMessage());
}
View
71 .../src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -341,45 +341,50 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha
LOG.warn("AuthenticationToken ignored: " + ex.getMessage());
token = null;
}
- if (token == null) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Request [{}] triggering authentication", getRequestURL(httpRequest));
- }
- token = authHandler.authenticate(httpRequest, httpResponse);
- if (token != null && token != AuthenticationToken.ANONYMOUS) {
- token.setExpires(System.currentTimeMillis() + getValidity() * 1000);
- }
- newToken = true;
- }
- if (token != null) {
- unauthorizedResponse = false;
- if (LOG.isDebugEnabled()) {
- LOG.debug("Request [{}] user [{}] authenticated", getRequestURL(httpRequest), token.getUserName());
+ if (authHandler.managementOperation(token, httpRequest, httpResponse)) {
+ if (token == null) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Request [{}] triggering authentication", getRequestURL(httpRequest));
+ }
+ token = authHandler.authenticate(httpRequest, httpResponse);
+ if (token != null && token.getExpires() != 0 &&
+ token != AuthenticationToken.ANONYMOUS) {
+ token.setExpires(System.currentTimeMillis() + getValidity() * 1000);
+ }
+ newToken = true;
}
- final AuthenticationToken authToken = token;
- httpRequest = new HttpServletRequestWrapper(httpRequest) {
-
- @Override
- public String getAuthType() {
- return authToken.getType();
+ if (token != null) {
+ unauthorizedResponse = false;
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Request [{}] user [{}] authenticated", getRequestURL(httpRequest), token.getUserName());
}
+ final AuthenticationToken authToken = token;
+ httpRequest = new HttpServletRequestWrapper(httpRequest) {
- @Override
- public String getRemoteUser() {
- return authToken.getUserName();
- }
+ @Override
+ public String getAuthType() {
+ return authToken.getType();
+ }
- @Override
- public Principal getUserPrincipal() {
- return (authToken != AuthenticationToken.ANONYMOUS) ? authToken : null;
+ @Override
+ public String getRemoteUser() {
+ return authToken.getUserName();
+ }
+
+ @Override
+ public Principal getUserPrincipal() {
+ return (authToken != AuthenticationToken.ANONYMOUS) ? authToken : null;
+ }
+ };
+ if (newToken && !token.isExpired() && token != AuthenticationToken.ANONYMOUS) {
+ String signedToken = signer.sign(token.toString());
+ Cookie cookie = createCookie(signedToken);
+ httpResponse.addCookie(cookie);
}
- };
- if (newToken && token != AuthenticationToken.ANONYMOUS) {
- String signedToken = signer.sign(token.toString());
- Cookie cookie = createCookie(signedToken);
- httpResponse.addCookie(cookie);
+ filterChain.doFilter(httpRequest, httpResponse);
}
- filterChain.doFilter(httpRequest, httpResponse);
+ } else {
+ unauthorizedResponse = false;
}
} catch (AuthenticationException ex) {
unauthorizedMsg = ex.toString();
View
28 ...src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationHandler.java
@@ -59,6 +59,34 @@
public void destroy();
/**
+ * Performs an authentication management operation.
+ * <p/>
+ * This is useful for handling operations like get/renew/cancel
+ * delegation tokens which are being handled as operations of the
+ * service end-point.
+ * <p/>
+ * If the method returns <code>TRUE</code> the request will continue normal
+ * processing, this means the method has not produced any HTTP response.
+ * <p/>
+ * If the method returns <code>FALSE</code> the request will end, this means
+ * the method has produced the corresponding HTTP response.
+ *
+ * @param token the authentication token if any, otherwise <code>NULL</code>.
+ * @param request the HTTP client request.
+ * @param response the HTTP client response.
+ * @return <code>TRUE</code> if the request should be processed as a regular
+ * request,
+ * <code>FALSE</code> otherwise.
+ *
+ * @throws IOException thrown if an IO error occurred.
+ * @throws AuthenticationException thrown if an Authentication error occurred.
+ */
+ public boolean managementOperation(AuthenticationToken token,
+ HttpServletRequest request,
+ HttpServletResponse response)
+ throws IOException, AuthenticationException;
+
+ /**
* Performs an authentication step for the given HTTP client request.
* <p/>
* This method is invoked by the {@link AuthenticationFilter} only if the HTTP client request is
View
10 ...h/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
@@ -115,10 +115,10 @@ public void setExpires(long expires) {
*/
private void generateToken() {
StringBuffer sb = new StringBuffer();
- sb.append(USER_NAME).append("=").append(userName).append(ATTR_SEPARATOR);
- sb.append(PRINCIPAL).append("=").append(principal).append(ATTR_SEPARATOR);
- sb.append(TYPE).append("=").append(type).append(ATTR_SEPARATOR);
- sb.append(EXPIRES).append("=").append(expires);
+ sb.append(USER_NAME).append("=").append(getUserName()).append(ATTR_SEPARATOR);
+ sb.append(PRINCIPAL).append("=").append(getName()).append(ATTR_SEPARATOR);
+ sb.append(TYPE).append("=").append(getType()).append(ATTR_SEPARATOR);
+ sb.append(EXPIRES).append("=").append(getExpires());
token = sb.toString();
}
@@ -165,7 +165,7 @@ public long getExpires() {
* @return if the token has expired.
*/
public boolean isExpired() {
- return expires != -1 && System.currentTimeMillis() > expires;
+ return getExpires() != -1 && System.currentTimeMillis() > getExpires();
}
/**
View
21 .../java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
@@ -233,6 +233,27 @@ protected String getKeytab() {
}
/**
+ * This is an empty implementation, it always returns <code>TRUE</code>.
+ *
+ *
+ *
+ * @param token the authentication token if any, otherwise <code>NULL</code>.
+ * @param request the HTTP client request.
+ * @param response the HTTP client response.
+ *
+ * @return <code>TRUE</code>
+ * @throws IOException it is never thrown.
+ * @throws AuthenticationException it is never thrown.
+ */
+ @Override
+ public boolean managementOperation(AuthenticationToken token,
+ HttpServletRequest request,
+ HttpServletResponse response)
+ throws IOException, AuthenticationException {
+ return true;
+ }
+
+ /**
* It enforces the the Kerberos SPNEGO authentication sequence returning an {@link AuthenticationToken} only
* after the Kerberos SPNEGO sequence has completed successfully.
* <p/>
View
21 ...in/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
@@ -94,6 +94,27 @@ public String getType() {
}
/**
+ * This is an empty implementation, it always returns <code>TRUE</code>.
+ *
+ *
+ *
+ * @param token the authentication token if any, otherwise <code>NULL</code>.
+ * @param request the HTTP client request.
+ * @param response the HTTP client response.
+ *
+ * @return <code>TRUE</code>
+ * @throws IOException it is never thrown.
+ * @throws AuthenticationException it is never thrown.
+ */
+ @Override
+ public boolean managementOperation(AuthenticationToken token,
+ HttpServletRequest request,
+ HttpServletResponse response)
+ throws IOException, AuthenticationException {
+ return true;
+ }
+
+ /**
* Authenticates an HTTP client request.
* <p/>
* It extracts the {@link PseudoAuthenticator#USER_NAME} parameter from the query string and creates
View
6 .../src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java
@@ -13,6 +13,7 @@
*/
package org.apache.hadoop.security.authentication.client;
+import junit.framework.Assert;
import junit.framework.TestCase;
import org.mockito.Mockito;
@@ -100,11 +101,14 @@ public void testExtractTokenFail() throws Exception {
headers.put("Set-Cookie", cookies);
Mockito.when(conn.getHeaderFields()).thenReturn(headers);
+ AuthenticatedURL.Token token = new AuthenticatedURL.Token();
+ token.set("bar");
try {
- AuthenticatedURL.extractToken(conn, new AuthenticatedURL.Token());
+ AuthenticatedURL.extractToken(conn, token);
fail();
} catch (AuthenticationException ex) {
// Expected
+ Assert.assertFalse(token.isSet());
} catch (Exception ex) {
fail();
}
View
192 .../test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
@@ -71,7 +71,9 @@ public void testInitEmpty() throws Exception {
public static class DummyAuthenticationHandler implements AuthenticationHandler {
public static boolean init;
+ public static boolean managementOperationReturn;
public static boolean destroy;
+ public static boolean expired;
public static final String TYPE = "dummy";
@@ -83,6 +85,20 @@ public static void reset() {
@Override
public void init(Properties config) throws ServletException {
init = true;
+ managementOperationReturn =
+ config.getProperty("management.operation.return", "true").equals("true");
+ expired = config.getProperty("expired.token", "false").equals("true");
+ }
+
+ @Override
+ public boolean managementOperation(AuthenticationToken token,
+ HttpServletRequest request,
+ HttpServletResponse response)
+ throws IOException, AuthenticationException {
+ if (!managementOperationReturn) {
+ response.setStatus(HttpServletResponse.SC_ACCEPTED);
+ }
+ return managementOperationReturn;
}
@Override
@@ -102,7 +118,7 @@ public AuthenticationToken authenticate(HttpServletRequest request, HttpServletR
String param = request.getParameter("authenticated");
if (param != null && param.equals("true")) {
token = new AuthenticationToken("u", "p", "t");
- token.setExpires(System.currentTimeMillis() + 1000);
+ token.setExpires((expired) ? 0 : System.currentTimeMillis() + 1000);
} else {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
@@ -170,10 +186,14 @@ public void testInit() throws Exception {
filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ "management.operation.return")).elements());
filter.init(config);
assertTrue(DummyAuthenticationHandler.init);
} finally {
@@ -201,10 +221,14 @@ public void testGetRequestURL() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ "management.operation.return")).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
@@ -221,12 +245,16 @@ public void testGetToken() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
- AuthenticationFilter.SIGNATURE_SECRET)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ AuthenticationFilter.SIGNATURE_SECRET,
+ "management.operation.return")).elements());
filter.init(config);
AuthenticationToken token = new AuthenticationToken("u", "p", DummyAuthenticationHandler.TYPE);
@@ -250,12 +278,15 @@ public void testGetTokenExpired() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
- AuthenticationFilter.SIGNATURE_SECRET)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ AuthenticationFilter.SIGNATURE_SECRET,
+ "management.operation.return")).elements());
filter.init(config);
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
@@ -284,12 +315,16 @@ public void testGetTokenInvalidType() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
- AuthenticationFilter.SIGNATURE_SECRET)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ AuthenticationFilter.SIGNATURE_SECRET,
+ "management.operation.return")).elements());
filter.init(config);
AuthenticationToken token = new AuthenticationToken("u", "p", "invalidtype");
@@ -318,10 +353,14 @@ public void testDoFilterNotAuthenticated() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ "management.operation.return")).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
@@ -349,18 +388,26 @@ public Object answer(InvocationOnMock invocation) throws Throwable {
}
}
- private void _testDoFilterAuthentication(boolean withDomainPath, boolean invalidToken) throws Exception {
+ private void _testDoFilterAuthentication(boolean withDomainPath,
+ boolean invalidToken,
+ boolean expired) throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
+ Mockito.when(config.getInitParameter("expired.token")).
+ thenReturn(Boolean.toString(expired));
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TOKEN_VALIDITY)).thenReturn("1000");
Mockito.when(config.getInitParameter(AuthenticationFilter.SIGNATURE_SECRET)).thenReturn("secret");
Mockito.when(config.getInitParameterNames()).thenReturn(
new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
AuthenticationFilter.AUTH_TOKEN_VALIDITY,
- AuthenticationFilter.SIGNATURE_SECRET)).elements());
+ AuthenticationFilter.SIGNATURE_SECRET,
+ "management.operation.return",
+ "expired.token")).elements());
if (withDomainPath) {
Mockito.when(config.getInitParameter(AuthenticationFilter.COOKIE_DOMAIN)).thenReturn(".foo.com");
@@ -370,7 +417,8 @@ private void _testDoFilterAuthentication(boolean withDomainPath, boolean invalid
AuthenticationFilter.AUTH_TOKEN_VALIDITY,
AuthenticationFilter.SIGNATURE_SECRET,
AuthenticationFilter.COOKIE_DOMAIN,
- AuthenticationFilter.COOKIE_PATH)).elements());
+ AuthenticationFilter.COOKIE_PATH,
+ "management.operation.return")).elements());
}
filter.init(config);
@@ -416,52 +464,66 @@ public Object answer(InvocationOnMock invocation) throws Throwable {
filter.doFilter(request, response, chain);
- assertNotNull(setCookie[0]);
- assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
- assertTrue(setCookie[0].getValue().contains("u="));
- assertTrue(setCookie[0].getValue().contains("p="));
- assertTrue(setCookie[0].getValue().contains("t="));
- assertTrue(setCookie[0].getValue().contains("e="));
- assertTrue(setCookie[0].getValue().contains("s="));
- assertTrue(calledDoFilter[0]);
-
- Signer signer = new Signer("secret".getBytes());
- String value = signer.verifyAndExtract(setCookie[0].getValue());
- AuthenticationToken token = AuthenticationToken.parse(value);
- assertEquals(System.currentTimeMillis() + 1000 * 1000, token.getExpires(), 100);
-
- if (withDomainPath) {
- assertEquals(".foo.com", setCookie[0].getDomain());
- assertEquals("/bar", setCookie[0].getPath());
+ if (expired) {
+ Mockito.verify(response, Mockito.never()).
+ addCookie(Mockito.any(Cookie.class));
} else {
- assertNull(setCookie[0].getDomain());
- assertNull(setCookie[0].getPath());
+ assertNotNull(setCookie[0]);
+ assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
+ assertTrue(setCookie[0].getValue().contains("u="));
+ assertTrue(setCookie[0].getValue().contains("p="));
+ assertTrue(setCookie[0].getValue().contains("t="));
+ assertTrue(setCookie[0].getValue().contains("e="));
+ assertTrue(setCookie[0].getValue().contains("s="));
+ assertTrue(calledDoFilter[0]);
+
+ Signer signer = new Signer("secret".getBytes());
+ String value = signer.verifyAndExtract(setCookie[0].getValue());
+ AuthenticationToken token = AuthenticationToken.parse(value);
+ assertEquals(System.currentTimeMillis() + 1000 * 1000,
+ token.getExpires(), 100);
+
+ if (withDomainPath) {
+ assertEquals(".foo.com", setCookie[0].getDomain());
+ assertEquals("/bar", setCookie[0].getPath());
+ } else {
+ assertNull(setCookie[0].getDomain());
+ assertNull(setCookie[0].getPath());
+ }
}
} finally {
filter.destroy();
}
}
public void testDoFilterAuthentication() throws Exception {
- _testDoFilterAuthentication(false, false);
+ _testDoFilterAuthentication(false, false, false);
+ }
+
+ public void testDoFilterAuthenticationImmediateExpiration() throws Exception {
+ _testDoFilterAuthentication(false, false, true);
}
public void testDoFilterAuthenticationWithInvalidToken() throws Exception {
- _testDoFilterAuthentication(false, true);
+ _testDoFilterAuthentication(false, true, false);
}
public void testDoFilterAuthenticationWithDomainPath() throws Exception {
- _testDoFilterAuthentication(true, false);
+ _testDoFilterAuthentication(true, false, false);
}
public void testDoFilterAuthenticated() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ "management.operation.return")).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
@@ -503,10 +565,14 @@ public void testDoFilterAuthenticatedExpired() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ "management.operation.return")).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
@@ -563,10 +629,14 @@ public void testDoFilterAuthenticatedInvalidType() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("true");
Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn(
DummyAuthenticationHandler.class.getName());
Mockito.when(config.getInitParameterNames()).thenReturn(
- new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements());
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ "management.operation.return")).elements());
filter.init(config);
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
@@ -618,4 +688,50 @@ public Object answer(InvocationOnMock invocation) throws Throwable {
}
}
+ public void testManagementOperation() throws Exception {
+ AuthenticationFilter filter = new AuthenticationFilter();
+ try {
+ FilterConfig config = Mockito.mock(FilterConfig.class);
+ Mockito.when(config.getInitParameter("management.operation.return")).
+ thenReturn("false");
+ Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).
+ thenReturn(DummyAuthenticationHandler.class.getName());
+ Mockito.when(config.getInitParameterNames()).thenReturn(
+ new Vector<String>(
+ Arrays.asList(AuthenticationFilter.AUTH_TYPE,
+ "management.operation.return")).elements());
+ filter.init(config);
+
+ HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+ Mockito.when(request.getRequestURL()).
+ thenReturn(new StringBuffer("http://foo:8080/bar"));
+
+ HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+ FilterChain chain = Mockito.mock(FilterChain.class);
+
+ filter.doFilter(request, response, chain);
+ Mockito.verify(response).setStatus(HttpServletResponse.SC_ACCEPTED);
+ Mockito.verifyNoMoreInteractions(response);
+
+ Mockito.reset(request);
+ Mockito.reset(response);
+
+ AuthenticationToken token = new AuthenticationToken("u", "p", "t");
+ token.setExpires(System.currentTimeMillis() + 1000);
+ Signer signer = new Signer("secret".getBytes());
+ String tokenSigned = signer.sign(token.toString());
+ Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
+ Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
+
+ filter.doFilter(request, response, chain);
+
+ Mockito.verify(response).setStatus(HttpServletResponse.SC_ACCEPTED);
+ Mockito.verifyNoMoreInteractions(response);
+
+ } finally {
+ filter.destroy();
+ }
+ }
+
}
View
121 hadoop-common-project/hadoop-common/CHANGES.txt
@@ -9,11 +9,11 @@ Trunk (unreleased changes)
NEW FEATURES
- HADOOP-8135. Add ByteBufferReadable interface to FSDataInputStream. (Henry
- Robinson via atm)
+ HADOOP-8469. Make NetworkTopology class pluggable. (Junping Du via
+ szetszwo)
- HDFS-3042. Automatic failover support for NameNode HA (todd)
- (see dedicated section below for breakdown of subtasks)
+ HADOOP-8470. Add NetworkTopologyWithNodeGroup, a 4-layer implementation
+ of NetworkTopology. (Junping Du via szetszwo)
IMPROVEMENTS
@@ -55,9 +55,6 @@ Trunk (unreleased changes)
HADOOP-7994. Remove getProtocolVersion and getProtocolSignature from the
client side translator and server side implementation. (jitendra)
- HADOOP-8244. Improve comments on ByteBufferReadable.read. (Henry Robinson
- via atm)
-
HADOOP-7757. Test file reference count is at least 3x actual value (Jon
Eagles via bobby)
@@ -82,6 +79,9 @@ Trunk (unreleased changes)
HADOOP-7659. fs -getmerge isn't guaranteed to work well over non-HDFS
filesystems (harsh)
+ HADOOP-8059. Add javadoc to InterfaceAudience and InterfaceStability.
+ (Brandon Li via suresh)
+
BUG FIXES
HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -165,39 +165,7 @@ Trunk (unreleased changes)
HADOOP-7761. Improve the performance of raw comparisons. (todd)
- BREAKDOWN OF HDFS-3042 SUBTASKS
-
- HADOOP-8220. ZKFailoverController doesn't handle failure to become active
- correctly (todd)
-
- HADOOP-8228. Auto HA: Refactor tests and add stress tests. (todd)
-
- HADOOP-8215. Security support for ZK Failover controller (todd)
-
- HADOOP-8245. Fix flakiness in TestZKFailoverController (todd)
-
- HADOOP-8257. TestZKFailoverControllerStress occasionally fails with Mockito
- error (todd)
-
- HADOOP-8260. Replace ClientBaseWithFixes with our own modified copy of the
- class (todd)
-
- HADOOP-8246. Auto-HA: automatically scope znode by nameservice ID (todd)
-
- HADOOP-8247. Add a config to enable auto-HA, which disables manual
- FailoverController (todd)
-
- HADOOP-8306. ZKFC: improve error message when ZK is not running. (todd)
-
- HADOOP-8279. Allow manual failover to be invoked when auto-failover is
- enabled. (todd)
-
- HADOOP-8276. Auto-HA: add config for java options to pass to zkfc daemon
- (todd via eli)
-
- HADOOP-8405. ZKFC tests leak ZK instances. (todd)
-
-Release 2.0.1-alpha - UNRELEASED
+Branch-2 ( Unreleased changes )
INCOMPATIBLE CHANGES
@@ -206,6 +174,17 @@ Release 2.0.1-alpha - UNRELEASED
NEW FEATURES
+ HDFS-3042. Automatic failover support for NameNode HA (todd)
+ (see dedicated section below for breakdown of subtasks)
+
+ HADOOP-8135. Add ByteBufferReadable interface to FSDataInputStream. (Henry
+ Robinson via atm)
+
+ HADOOP-8458. Add management hook to AuthenticationHandler to enable
+ delegation token operations support (tucu)
+
+ HADOOP-8465. hadoop-auth should support ephemeral authentication (tucu)
+
IMPROVEMENTS
HADOOP-8340. SNAPSHOT build versions should compare as less than their eventual
@@ -228,6 +207,14 @@ Release 2.0.1-alpha - UNRELEASED
HADOOP-8450. Remove src/test/system. (eli)
+ HADOOP-8244. Improve comments on ByteBufferReadable.read. (Henry Robinson
+ via atm)
+
+ HADOOP-8368. Use CMake rather than autotools to build native code (ccccabe via tucu)
+
+ HADOOP-8524. Allow users to get source of a Configuration
+ parameter (harsh)
+
BUG FIXES
HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname
@@ -267,6 +254,54 @@ Release 2.0.1-alpha - UNRELEASED
HADOOP-8481. update BUILDING.txt to talk about cmake rather than autotools.
(Colin Patrick McCabe via eli)
+ HADOOP-8485. Don't hardcode "Apache Hadoop 0.23" in the docs. (eli)
+
+ HADOOP-8488. test-patch.sh gives +1 even if the native build fails.
+ (Colin Patrick McCabe via eli)
+
+ HADOOP-8507. Avoid OOM while deserializing DelegationTokenIdentifer.
+ (Colin Patrick McCabe via eli)
+
+ HADOOP-8433. Don't set HADOOP_LOG_DIR in hadoop-env.sh.
+ (Brahma Reddy Battula via eli)
+
+ HADOOP-8509. JarFinder duplicate entry: META-INF/MANIFEST.MF exception (tucu)
+
+ HADOOP-8512. AuthenticatedURL should reset the Token when the server returns
+ other than OK on authentication (tucu)
+
+ BREAKDOWN OF HDFS-3042 SUBTASKS
+
+ HADOOP-8220. ZKFailoverController doesn't handle failure to become active
+ correctly (todd)
+
+ HADOOP-8228. Auto HA: Refactor tests and add stress tests. (todd)
+
+ HADOOP-8215. Security support for ZK Failover controller (todd)
+
+ HADOOP-8245. Fix flakiness in TestZKFailoverController (todd)
+
+ HADOOP-8257. TestZKFailoverControllerStress occasionally fails with Mockito
+ error (todd)
+
+ HADOOP-8260. Replace ClientBaseWithFixes with our own modified copy of the
+ class (todd)
+
+ HADOOP-8246. Auto-HA: automatically scope znode by nameservice ID (todd)
+
+ HADOOP-8247. Add a config to enable auto-HA, which disables manual
+ FailoverController (todd)
+
+ HADOOP-8306. ZKFC: improve error message when ZK is not running. (todd)
+
+ HADOOP-8279. Allow manual failover to be invoked when auto-failover is
+ enabled. (todd)
+
+ HADOOP-8276. Auto-HA: add config for java options to pass to zkfc daemon
+ (todd via eli)
+
+ HADOOP-8405. ZKFC tests leak ZK instances. (todd)
+
Release 2.0.0-alpha - 05-23-2012
INCOMPATIBLE CHANGES
@@ -696,6 +731,12 @@ Release 0.23.3 - UNRELEASED
HADOOP-8373. Port RPC.getServerAddress to 0.23 (Daryn Sharp via bobby)
+ HADOOP-8495. Update Netty to avoid leaking file descriptors during shuffle
+ (Jason Lowe via tgraves)
+
+ HADOOP-8129. ViewFileSystemTestSetup setupForViewFileSystem is erring
+ (Ahmed Radwan and Ravi Prakash via bobby)
+
Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES
View
103 hadoop-common-project/hadoop-common/pom.xml
@@ -536,32 +536,11 @@
<snappy.prefix>/usr/local</snappy.prefix>
<snappy.lib>${snappy.prefix}/lib</snappy.lib>
<snappy.include>${snappy.prefix}/include</snappy.include>
+ <runas.home></runas.home>
</properties>
<build>
<plugins>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <id>compile</id>
- <phase>compile</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <target>
- <mkdir dir="${project.build.directory}/native/javah"/>
- <copy toDir="${project.build.directory}/native">
- <fileset dir="${basedir}/src/main/native"/>
- </copy>
- <mkdir dir="${project.build.directory}/native/m4"/>
- </target>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>native-maven-plugin</artifactId>
<executions>
@@ -590,73 +569,27 @@
</executions>
</plugin>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>make-maven-plugin</artifactId>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
- <id>compile</id>
+ <id>make</id>
<phase>compile</phase>
- <goals>
- <goal>autoreconf</goal>
- <goal>configure</goal>
- <goal>make-install</goal>
- </goals>
+ <goals><goal>run</goal></goals>
+ <configuration>
+ <target>
+ <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
+ <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
+ <env key="CFLAGS" value="-I${snappy.include}"/>
+ <env key="LDFLAGS" value="-L${snappy.lib}"/>
+ </exec>
+ <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
+ <arg line="VERBOSE=1"/>
+ </exec>
+ </target>
+ </configuration>
</execution>
</executions>
- <configuration>
- <!-- autoreconf settings -->
- <workDir>${project.build.directory}/native</workDir>
- <arguments>
- <argument>-i</argument>
- <argument>-f</argument>
- </arguments>
-
- <!-- configure settings -->
- <configureEnvironment>
- <property>
- <name>OS_NAME</name>
- <value>${os.name}</value>
- </property>
- <property>
- <name>OS_ARCH</name>
- <value>${os.arch}</value>
- </property>
- <property>
- <name>JVM_DATA_MODEL</name>
- <value>${sun.arch.data.model}</value>
- </property>
- </configureEnvironment>
- <configureOptions>
- <configureOption>CPPFLAGS=-I${snappy.include}</configureOption>
- <configureOption>LDFLAGS=-L${snappy.lib}</configureOption>
- </configureOptions>
- <configureWorkDir>${project.build.directory}/native</configureWorkDir>
- <prefix>/usr/local</prefix>
-
- <!-- make settings -->
- <installEnvironment>
- <property>
- <name>OS_NAME</name>
- <value>${os.name}</value>
- </property>
- <property>
- <name>OS_ARCH</name>
- <value>${os.arch}</value>
- </property>
- <property>
- <name>JVM_DATA_MODEL</name>
- <value>${sun.arch.data.model}</value>
- </property>
- <property>
- <name>HADOOP_NATIVE_SRCDIR</name>
- <value>${project.build.directory}/native</value>
- </property>
- </installEnvironment>
-
- <!-- configure & make settings -->
- <destDir>${project.build.directory}/native/target</destDir>
-
- </configuration>
</plugin>
</plugins>
</build>
@@ -700,7 +633,7 @@
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
- <id>compile</id>
+ <id>kdc</id>
<phase>compile</phase>
<goals>
<goal>run</goal>
View
126 hadoop-common-project/hadoop-common/src/CMakeLists.txt
@@ -0,0 +1,126 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
+
+# Default to release builds
+set(CMAKE_BUILD_TYPE, Release)
+
+# If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit.
+# This variable is set by maven.
+if (JVM_ARCH_DATA_MODEL EQUAL 32)
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32")
+ set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32")
+ if (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
+ set(CMAKE_SYSTEM_PROCESSOR "i686")
+ endif ()
+endif (JVM_ARCH_DATA_MODEL EQUAL 32)
+
+# Compile a library with both shared and static variants
+function(add_dual_library LIBNAME)
+ add_library(${LIBNAME} SHARED ${ARGN})
+ add_library(${LIBNAME}_static STATIC ${ARGN})
+ set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
+endfunction(add_dual_library)
+
+# Link both a static and a dynamic target against some libraries
+function(target_link_dual_libraries LIBNAME)
+ target_link_libraries(${LIBNAME} ${ARGN})
+ target_link_libraries(${LIBNAME}_static ${ARGN})
+endfunction(target_link_dual_libraries)
+
+function(output_directory TGT DIR)
+ SET_TARGET_PROPERTIES(${TGT} PROPERTIES
+ RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
+ SET_TARGET_PROPERTIES(${TGT} PROPERTIES
+ ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
+ SET_TARGET_PROPERTIES(${TGT} PROPERTIES
+ LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
+endfunction(output_directory TGT DIR)
+
+function(dual_output_directory TGT DIR)
+ output_directory(${TGT} "${DIR}")
+ output_directory(${TGT}_static "${DIR}")
+endfunction(dual_output_directory TGT DIR)
+
+if (NOT GENERATED_JAVAH)
+ # Must identify where the generated headers have been placed
+ MESSAGE(FATAL_ERROR "You must set the cmake variable GENERATED_JAVAH")
+endif (NOT GENERATED_JAVAH)
+find_package(JNI REQUIRED)
+find_package(ZLIB REQUIRED)
+
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2")
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
+set(D main/native/src/org/apache/hadoop)
+
+GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
+
+INCLUDE(CheckFunctionExists)
+INCLUDE(CheckCSourceCompiles)
+CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
+CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
+
+find_library(SNAPPY_LIBRARY NAMES snappy PATHS)
+find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS)
+if (SNAPPY_LIBRARY)
+ GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
+ set(SNAPPY_SOURCE_FILES
+ "${D}/io/compress/snappy/SnappyCompressor.c"
+ "${D}/io/compress/snappy/SnappyDecompressor.c")
+else (${SNAPPY_LIBRARY})
+ set(SNAPPY_INCLUDE_DIR "")
+ set(SNAPPY_SOURCE_FILES "")
+endif (SNAPPY_LIBRARY)
+
+include_directories(
+ ${GENERATED_JAVAH}
+ main/native/src
+ ${CMAKE_CURRENT_SOURCE_DIR}
+ ${CMAKE_CURRENT_SOURCE_DIR}/src
+ ${CMAKE_BINARY_DIR}
+ ${JNI_INCLUDE_DIRS}
+ ${ZLIB_INCLUDE_DIRS}
+ ${SNAPPY_INCLUDE_DIR}
+)
+CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
+
+add_dual_library(hadoop
+ ${D}/io/compress/lz4/Lz4Compressor.c
+ ${D}/io/compress/lz4/Lz4Decompressor.c
+ ${D}/io/compress/lz4/lz4.c
+ ${SNAPPY_SOURCE_FILES}
+ ${D}/io/compress/zlib/ZlibCompressor.c
+ ${D}/io/compress/zlib/ZlibDecompressor.c
+ ${D}/io/nativeio/NativeIO.c
+ ${D}/io/nativeio/errno_enum.c
+ ${D}/io/nativeio/file_descriptor.c
+ ${D}/security/JniBasedUnixGroupsMapping.c
+ ${D}/security/JniBasedUnixGroupsNetgroupMapping.c
+ ${D}/security/getGroup.c
+ ${D}/util/NativeCrc32.c
+ ${D}/util/bulk_crc32.c
+)
+target_link_dual_libraries(hadoop
+ dl
+ ${JAVA_JVM_LIBRARY}
+)
+SET(LIBHADOOP_VERSION "1.0.0")
+SET_TARGET_PROPERTIES(hadoop PROPERTIES
+ SOVERSION ${LIBHADOOP_VERSION})
+dual_output_directory(hadoop target/usr/local/lib)
View
10 hadoop-common-project/hadoop-common/src/config.h.cmake
@@ -0,0 +1,10 @@
+#ifndef CONFIG_H
+#define CONFIG_H
+
+#cmakedefine HADOOP_ZLIB_LIBRARY "@HADOOP_ZLIB_LIBRARY@"
+#cmakedefine HADOOP_RUNAS_HOME "@HADOOP_RUNAS_HOME@"
+#cmakedefine HADOOP_SNAPPY_LIBRARY "@HADOOP_SNAPPY_LIBRARY@"
+#cmakedefine HAVE_SYNC_FILE_RANGE
+#cmakedefine HAVE_POSIX_FADVISE
+
+#endif
View
2 hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
@@ -61,7 +61,7 @@ export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"
export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
# Where log files are stored. $HADOOP_HOME/logs by default.
-export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
+#export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
# Where log files are stored in the secure data environment.
export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
View
32 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -1071,6 +1071,38 @@ public void setPattern(String name, Pattern pattern) {
}
/**
+ * Gets the absolute path to the resource object (file, URL, etc.), for a given
+ * property name.
+ *
+ * @param name - The property name to get the source of.
+ * @return null - If the property or its source wasn't found or if the property
+ * was defined in code (i.e. in a Configuration instance, not from a physical
+ * resource). Otherwise, returns the absolute path of the resource that loaded
+ * the property name, as a String.
+ */
+ @InterfaceStability.Unstable
+ public synchronized String getPropertySource(String name) {
+ if (properties == null) {
+ // If properties is null, it means a resource was newly added
+ // but the props were cleared so as to load it upon future
+ // requests. So lets force a load by asking a properties list.
+ getProps();
+ }
+ // Return a null right away if our properties still
+ // haven't loaded or the resource mapping isn't defined
+ if (properties == null || updatingResource == null) {
+ return null;
+ } else {
+ String source = updatingResource.get(name);
+ if (source == null || source.equals(UNKNOWN_RESOURCE)) {
+ return null;
+ } else {
+ return source;
+ }
+ }
+ }
+
+ /**
* A class that represents a set of positive integer ranges. It parses
* strings of the form: "2-3,5,7-" where ranges are separated by comma and
* the lower/upper bounds are separated by dash. Either the lower or upper
View
4 ...oject/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -63,7 +63,9 @@
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY =
"net.topology.node.switch.mapping.impl";
-
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String NET_TOPOLOGY_IMPL_KEY =
+ "net.topology.impl";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY =
"net.topology.table.file.name";
View
5 ...en/delegation/DelegationTokenRenewer.java → ...che/hadoop/fs/DelegationTokenRenewer.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hdfs.security.token.delegation;
+package org.apache.hadoop.fs;
import java.io.IOException;
import java.lang.ref.WeakReference;
@@ -25,7 +25,6 @@
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -161,4 +160,4 @@ public void run() {
}
}
}
-}
+}
View
16 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
@@ -254,24 +254,24 @@ public void setSymlink(final Path p) {
// Writable
//////////////////////////////////////////////////
public void write(DataOutput out) throws IOException {
- Text.writeString(out, getPath().toString(), Text.ONE_MEGABYTE);
+ Text.writeString(out, getPath().toString(), Text.DEFAULT_MAX_LEN);
out.writeLong(getLen());
out.writeBoolean(isDirectory());
out.writeShort(getReplication());
out.writeLong(getBlockSize());
out.writeLong(getModificationTime());
out.writeLong(getAccessTime());
getPermission().write(out);
- Text.writeString(out, getOwner(), Text.ONE_MEGABYTE);
- Text.writeString(out, getGroup(), Text.ONE_MEGABYTE);
+ Text.writeString(out, getOwner(), Text.DEFAULT_MAX_LEN);
+ Text.writeString(out, getGroup(), Text.DEFAULT_MAX_LEN);
out.writeBoolean(isSymlink());
if (isSymlink()) {
- Text.writeString(out, getSymlink().toString(), Text.ONE_MEGABYTE);
+ Text.writeString(out, getSymlink().toString(), Text.DEFAULT_MAX_LEN);
}
}
public void readFields(DataInput in) throws IOException {
- String strPath = Text.readString(in, Text.ONE_MEGABYTE);
+ String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN);
this.path = new Path(strPath);
this.length = in.readLong();
this.isdir = in.readBoolean();
@@ -280,10 +280,10 @@ public void readFields(DataInput in) throws IOException {
modification_time = in.readLong();
access_time = in.readLong();
permission.readFields(in);
- owner = Text.readString(in, Text.ONE_MEGABYTE);
- group = Text.readString(in, Text.ONE_MEGABYTE);
+ owner = Text.readString(in, Text.DEFAULT_MAX_LEN);
+ group = Text.readString(in, Text.DEFAULT_MAX_LEN);
if (in.readBoolean()) {
- this.symlink = new Path(Text.readString(in, Text.ONE_MEGABYTE));
+ this.symlink = new Path(Text.readString(in, Text.DEFAULT_MAX_LEN));
} else {
this.symlink = null;
}
View
8 ...project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
@@ -84,8 +84,8 @@ public PermissionStatus applyUMask(FsPermission umask) {
/** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
- username = Text.readString(in, Text.ONE_MEGABYTE);
- groupname = Text.readString(in, Text.ONE_MEGABYTE);
+ username = Text.readString(in, Text.DEFAULT_MAX_LEN);
+ groupname = Text.readString(in, Text.DEFAULT_MAX_LEN);
permission = FsPermission.read(in);
}
@@ -110,8 +110,8 @@ public static void write(DataOutput out,
String username,
String groupname,
FsPermission permission) throws IOException {
- Text.writeString(out, username, Text.ONE_MEGABYTE);
- Text.writeString(out, groupname, Text.ONE_MEGABYTE);
+ Text.writeString(out, username, Text.DEFAULT_MAX_LEN);
+ Text.writeString(out, groupname, Text.DEFAULT_MAX_LEN);
permission.write(out);
}
View
20 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
@@ -52,7 +52,9 @@
import org.apache.hadoop.jmx.JMXJsonServlet;
import org.apache.hadoop.log.LogLevel;
import org.apache.hadoop.metrics.MetricsServlet;
+import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.ReflectionUtils;
import org.mortbay.io.Buffer;
@@ -606,6 +608,24 @@ public void addSslListener(InetSocketAddress addr, Configuration sslConf,