Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge trunk into HDFS-1073.

Resolved several conflicts due to merge of HDFS-2149 and HDFS-2212.
Changes during resolution were:
- move the writing of the transaction ID out of EditLogOutputStream to
  FSEditLogOp.Writer to match trunk's organization
- remove JSPOOL related FsEditLogOp subclasses, add LogSegmentOp subclasses
- modify TestEditLogJournalFailures to not keep trying to use streams after
  the simulated halt, since newer stricter assertions caused these writes to
  fail


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1073@1152128 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
commit 254ca2a9fbe9c39a5732f65379b8f54b8df6ab87 1 parent 2caeb69
@toddlipcon toddlipcon authored
Showing with 1,920 additions and 8,561 deletions.
  1. +21 −0 hdfs/CHANGES.txt
  2. +0 −1  hdfs/build.xml
  3. +0 −6 hdfs/src/contrib/build.xml
  4. +0 −47 hdfs/src/contrib/hdfsproxy/README
  5. +0 −170 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy
  6. +0 −67 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-config.sh
  7. +0 −141 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh
  8. +0 −34 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh
  9. +0 −68 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh
  10. +0 −92 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-server.sh
  11. +0 −34 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-servers.sh
  12. +0 −68 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-slaves.sh
  13. +0 −152 hdfs/src/contrib/hdfsproxy/bin/proxy-util
  14. +0 −36 hdfs/src/contrib/hdfsproxy/bin/start-hdfsproxy-tomcat.sh
  15. +0 −37 hdfs/src/contrib/hdfsproxy/bin/start-hdfsproxy.sh
  16. +0 −28 hdfs/src/contrib/hdfsproxy/bin/stop-hdfsproxy-tomcat.sh
  17. +0 −28 hdfs/src/contrib/hdfsproxy/bin/stop-hdfsproxy.sh
  18. +0 −492 hdfs/src/contrib/hdfsproxy/build.xml
  19. +0 −24 hdfs/src/contrib/hdfsproxy/conf/configuration.xsl
  20. +0 −128 hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
  21. +0 −44 hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-env.sh
  22. +0 −44 hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template
  23. +0 −1  hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-hosts
  24. +0 −61 hdfs/src/contrib/hdfsproxy/conf/log4j.properties
  25. +0 −48 hdfs/src/contrib/hdfsproxy/conf/ssl-server.xml
  26. +0 −109 hdfs/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml
  27. +0 −166 hdfs/src/contrib/hdfsproxy/conf/tomcat-web.xml
  28. +0 −32 hdfs/src/contrib/hdfsproxy/conf/user-certs.xml
  29. +0 −26 hdfs/src/contrib/hdfsproxy/conf/user-permissions.xml
  30. +0 −127 hdfs/src/contrib/hdfsproxy/ivy.xml
  31. +0 −18 hdfs/src/contrib/hdfsproxy/ivy/libraries.properties
  32. +0 −166 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/AuthorizationFilter.java
  33. +0 −157 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/HdfsProxy.java
  34. +0 −83 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/KerberosAuthorizationFilter.java
  35. +0 −241 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
  36. +0 −69 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
  37. +0 −43 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
  38. +0 −368 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
  39. +0 −110 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
  40. +0 −76 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java
  41. +0 −39 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
  42. +0 −67 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
  43. +0 −358 hdfs/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
  44. +0 −128 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java
  45. +0 −87 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/FindFreePort.java
  46. +0 −51 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java
  47. +0 −161 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestAuthorizationFilter.java
  48. +0 −272 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
  49. +0 −113 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestLdapIpDirFilter.java
  50. +0 −120 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyFilter.java
  51. +0 −69 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyForwardServlet.java
  52. +0 −49 hdfs/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java
  53. +0 −77 hdfs/src/contrib/hdfsproxy/src/test/resources/cactus-web.xml
  54. +0 −104 hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-default.xml
  55. +0 −1  hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-hosts
  56. +0 −15 hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/hdfsproxy-site.xml
  57. +0 −76 hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/log4j.properties
  58. +0 −57 hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/ssl-client.xml
  59. +0 −47 hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/ssl-server.xml
  60. +0 −85 hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-certs.xml
  61. +0 −78 hdfs/src/contrib/hdfsproxy/src/test/resources/proxy-config/user-permissions.xml
  62. BIN  hdfs/src/contrib/hdfsproxy/src/test/resources/ssl-keys/client.keystore
  63. BIN  hdfs/src/contrib/hdfsproxy/src/test/resources/ssl-keys/proxy.keystore
  64. +0 −21 hdfs/src/contrib/hdfsproxy/src/test/resources/ssl-keys/test.crt
  65. +0 −60 hdfs/src/contrib/hdfsproxy/src/test/resources/tomcat-config/server.xml
  66. +0 −19 hdfs/src/contrib/hdfsproxy/src/test/resources/tomcat-config/tomcat-users.xml
  67. +0 −964 hdfs/src/contrib/hdfsproxy/src/test/resources/tomcat-config/web.xml
  68. +0 −154 hdfs/src/contrib/hdfsproxy/src/test/resources/tomcat-web.xml
  69. +0 −601 hdfs/src/docs/src/documentation/content/xdocs/hdfsproxy.xml
  70. +0 −1  hdfs/src/docs/src/documentation/content/xdocs/site.xml
  71. +21 −30 hdfs/src/java/org/apache/hadoop/hdfs/DFSClient.java
  72. +98 −38 hdfs/src/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
  73. +4 −1 hdfs/src/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicy.java
  74. +9 −9 hdfs/src/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault.java
  75. +10 −4 hdfs/src/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
  76. +250 −14 hdfs/src/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
  77. +2 −2 hdfs/src/java/org/apache/hadoop/hdfs/server/blockmanagement/DecommissionManager.java
  78. +19 −16 hdfs/src/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingReplicationBlocks.java
  79. +2 −1  hdfs/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
  80. +88 −81 hdfs/src/java/org/apache/hadoop/hdfs/server/common/Storage.java
  81. +8 −9 hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage.java
  82. +19 −39 hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
  83. +9 −11 hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
  84. +1 −2  hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
  85. +3 −2 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/BackupImage.java
  86. +29 −47 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditLogBackupOutputStream.java
  87. +21 −56 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java
  88. +11 −32 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditLogOutputStream.java
  89. +150 −0 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditsDoubleBuffer.java
  90. +109 −110 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
  91. +732 −61 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
  92. +9 −9 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
  93. +43 −307 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
  94. +8 −9 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java
  95. +36 −35 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
  96. +19 −22 hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
  97. +1 −1  hdfs/src/test/aop/org/apache/hadoop/hdfs/TestFiPipelines.java
  98. +4 −1 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
  99. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDatanodeDeath.java
  100. +2 −2 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDecommission.java
  101. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend2.java
  102. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend3.java
  103. +1 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend4.java
  104. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
  105. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java
  106. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java
  107. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationClient.java
  108. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationDelete.java
  109. +2 −2 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreationEmpty.java
  110. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestFileStatus.java
  111. +1 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestLeaseRecovery2.java
  112. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java
  113. +1 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestPipelines.java
  114. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java
  115. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestRenameWhileOpen.java
  116. +13 −18 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/UpgradeUtilities.java
  117. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithMultipleNameNodes.java
  118. +21 −0 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/blockmanagement/BlockManagerTestUtil.java
  119. +18 −15 ...rc/test/hdfs/org/apache/hadoop/hdfs/server/{namenode → blockmanagement}/TestComputeInvalidateWork.java
  120. +4 −8 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/blockmanagement/TestHeartbeatHandling.java
  121. +2 −2 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestBlockReport.java
  122. +2 −11 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNodeAdapter.java
  123. +2 −1  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestClusterId.java
  124. +3 −7 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestDeadDatanode.java
  125. +1 −6 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditLogJournalFailures.java
  126. +81 −0 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditsDoubleBuffer.java
  127. +2 −5 hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestSaveNamespace.java
  128. +1 −2  hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
  129. +1 −1  hdfs/src/test/unit/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
View
21 hdfs/CHANGES.txt
@@ -9,6 +9,8 @@ Trunk (unreleased changes)
HDFS-1536. Improve HDFS WebUI. (hairong)
+ HDFS-2210. Remove hdfsproxy. (eli)
+
NEW FEATURES
HDFS-1359. Add BlockPoolID to Block. (suresh)
@@ -600,6 +602,22 @@ Trunk (unreleased changes)
cause. (Ravi Prakash via atm)
HDFS-2180. Refactor NameNode HTTP server into new class. (todd)
+
+ HDFS-2198. Remove hardcoded configuration keys. (suresh)
+
+ HDFS-2149. Move EditLogOp serialization formats into FsEditLogOp
+ implementations. (Ivan Kelly via todd)
+
+ HDFS-2191. Move datanodeMap from FSNamesystem to DatanodeManager.
+ (szetszwo)
+
+ HDFS-2200. Change FSNamesystem.LOG to package private. (szetszwo)
+
+ HDFS-2195. Refactor StorageDirectory to not be an non-static inner class.
+ (todd via eli)
+
+ HDFS-2212. Refactor double-buffering code out of EditLogOutputStreams.
+ (todd via eli)
OPTIMIZATIONS
@@ -1370,6 +1388,9 @@ Release 0.22.0 - Unreleased
HDFS-2071. Use of isConnected() in DataXceiver is invalid. (Kihwal Lee
via todd)
+ HDFS-1981. NameNode does not saveNamespace() when editsNew is empty.
+ (Uma Maheswara Rao G via shv)
+
Release 0.21.1 - Unreleased
HDFS-1466. TestFcHdfsSymlink relies on /tmp/test not existing. (eli)
View
1  hdfs/build.xml
@@ -1397,7 +1397,6 @@
<exclude name="src/c++/libhdfs/install-sh" />
<exclude name="src/c++/libhdfs/ltmain.sh" />
<exclude name="src/c++/libhdfs/missing" />
- <exclude name="src/contrib/hdfsproxy/src/test/resources/" />
<exclude name="src/test/checkstyle-noframes-sorted.xsl" />
<exclude name="src/test/checkstyle.xml" />
<exclude name="src/test/findbugsExcludeFile.xml" />
View
6 hdfs/src/contrib/build.xml
@@ -48,12 +48,6 @@
<subant target="test">
<fileset dir="." includes="fuse-dfs/build.xml"/>
</subant>
-
- <!-- hdfsproxy tests failing due to HDFS-1666
- <subant target="test">
- <fileset dir="." includes="hdfsproxy/build.xml"/>
- </subant>
- -->
</target>
View
47 hdfs/src/contrib/hdfsproxy/README
@@ -1,47 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-HDFS Proxy is a proxy server through which a hadoop client (through HSFTP) or a standard
-HTTPS client (wget, curl, etc) can talk to a hadoop server and more importantly pull data
-from the sever. It put an access control layer in front of hadoop namenode server and extends
-its functionalities to allow hadoop cross-version data transfer.
-
-HDFSPROXY can be configured/started via either Jetty or Tomcat with different supporting features.
-
-A) With Jetty-based Installation, supporting features include:
-> Single Hadoop source cluster data transfer
-> Single Hadoop version data transfer
-> Authenticate users via user SSL certificates with ProxyFilter installed
-> Enforce access control based on configuration files.
-
-B) With Tomcat-based Installation, supporting features include:
-> Multiple Hadoop source cluster data transfer
-> Multiple Hadoop version data transfer
-> Authenticate users via user SSL certificates with ProxyFilter installed
-> Authentication and authorization via LDAP with LdapIpDirFilter installed
-> Access control based on configuration files if ProxyFilter is installed.
-> Access control based on LDAP entries if LdapIpDirFilter is installed.
-> Standard HTTPS Get Support for file transfer
-
-The detailed configuration/set-up guide is in the Forrest
-documentation, which can be found at $HADOOP_PREFIX/docs. In order to build the
-documentation on your own from source please use the following command in
-the downloaded source folder:
-
-ant docs -Dforrest.home=path to forrest -Djava5.home= path to jdk5.
-
-The documentation so built would be under $HADOOP_PREFIX/build/docs
View
170 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy
@@ -1,170 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# The HdfsProxy command script
-#
-# Environment Variables
-#
-# JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
-#
-# HDFSPROXY_CLASSPATH Extra Java CLASSPATH entries.
-#
-# HDFSPROXY_HEAPSIZE The maximum amount of heap to use, in MB.
-# Default is 1000.
-#
-# HDFSPROXY_OPTS Extra Java runtime options.
-#
-# HDFSPROXY_NAMENODE_OPTS These options are added to HDFSPROXY_OPTS
-# HDFSPROXY_CLIENT_OPTS when the respective command is run.
-# HDFSPROXY_{COMMAND}_OPTS etc HDFSPROXY_JT_OPTS applies to JobTracker
-# for e.g. HDFSPROXY_CLIENT_OPTS applies to
-# more than one command (fs, dfs, fsck,
-# dfsadmin etc)
-#
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-#
-# HDFSPROXY_ROOT_LOGGER The root appender. Default is INFO,console
-#
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-# some Java parameters
-if [ "$JAVA_HOME" != "" ]; then
- #echo "run java in $JAVA_HOME"
- JAVA_HOME=$JAVA_HOME
-fi
-
-if [ "$JAVA_HOME" = "" ]; then
- echo "Error: JAVA_HOME is not set."
- exit 1
-fi
-
-JAVA=$JAVA_HOME/bin/java
-JAVA_HEAP_MAX=-Xmx1000m
-
-# check envvars which might override default args
-if [ "$HDFSPROXY_HEAPSIZE" != "" ]; then
- #echo "run with heapsize $HDFSPROXY_HEAPSIZE"
- JAVA_HEAP_MAX="-Xmx""$HDFSPROXY_HEAPSIZE""m"
- #echo $JAVA_HEAP_MAX
-fi
-
-# CLASSPATH initially contains $HDFSPROXY_CONF_DIR
-CLASSPATH="${HDFSPROXY_CONF_DIR}"
-CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
-
-# for developers, add HdfsProxy classes to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/build/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/classes
-fi
-if [ -d "$HDFSPROXY_HOME/build/web/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/web
-fi
-if [ -d "$HDFSPROXY_HOME/build/test/hdfs/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/hdfs/classes
-fi
-
-# so that filenames w/ spaces are handled correctly in loops below
-IFS=
-
-# for releases, add hdfsproxy jar & webapps to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME
-fi
-for f in $HDFSPROXY_HOME/hdfsproxy-*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add libs to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/lib" ]; then
- for f in $HDFSPROXY_HOME/lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-
-if [ -d "$HDFSPROXY_HOME/../../" ]; then
- for f in $HDFSPROXY_HOME/../../*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-if [ -d "$HDFSPROXY_HOME/../../lib" ]; then
- for f in $HDFSPROXY_HOME/../../lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-if [ -d "$HDFSPROXY_HOME/../../lib/jsp-2.1" ]; then
- for f in $HDFSPROXY_HOME/../../lib/jsp-2.1/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
- done
-fi
-
-
-# add user-specified CLASSPATH last
-if [ "$HDFSPROXY_CLASSPATH" != "" ]; then
- CLASSPATH=${CLASSPATH}:${HDFSPROXY_CLASSPATH}
-fi
-
-# default log directory & file
-if [ "$HDFSPROXY_LOG_DIR" = "" ]; then
- HDFSPROXY_LOG_DIR="$HDFSPROXY_HOME/logs"
-fi
-if [ "$HDFSPROXY_LOGFILE" = "" ]; then
- HDFSPROXY_LOGFILE='hdfsproxy.log'
-fi
-
-# restore ordinary behaviour
-unset IFS
-
-# figure out which class to run
-CLASS='org.apache.hadoop.hdfsproxy.HdfsProxy'
-
-# cygwin path translation
-if $cygwin; then
- CLASSPATH=`cygpath -p -w "$CLASSPATH"`
- HDFSPROXY_HOME=`cygpath -d "$HDFSPROXY_HOME"`
- HDFSPROXY_LOG_DIR=`cygpath -d "$HDFSPROXY_LOG_DIR"`
-fi
-
-# cygwin path translation
-if $cygwin; then
- JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
-
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.dir=$HDFSPROXY_LOG_DIR"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.file=$HDFSPROXY_LOGFILE"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.home.dir=$HDFSPROXY_HOME"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.id.str=$HDFSPROXY_IDENT_STRING"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.root.logger=${HDFSPROXY_ROOT_LOGGER:-INFO,console}"
-if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
- HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
-fi
-
-# run it
-exec "$JAVA" $JAVA_HEAP_MAX $HDFSPROXY_OPTS -classpath "$CLASSPATH" $CLASS "$@"
View
67 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-config.sh
@@ -1,67 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# included in all the hadoop scripts with source command
-# should not be executable directly
-# also should not be passed any arguments, since we need original $*
-
-# resolve links - $0 may be a softlink
-
-this="$0"
-while [ -h "$this" ]; do
- ls=`ls -ld "$this"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '.*/.*' > /dev/null; then
- this="$link"
- else
- this=`dirname "$this"`/"$link"
- fi
-done
-
-# convert relative path to absolute path
-bin=`dirname "$this"`
-script=`basename "$this"`
-bin=`cd "$bin"; pwd`
-this="$bin/$script"
-
-# the root of the HdfsProxy installation
-export HDFSPROXY_HOME=`dirname "$this"`/..
-
-#check to see if the conf dir is given as an optional argument
-if [ $# -gt 1 ]
-then
- if [ "--config" = "$1" ]
- then
- shift
- confdir=$1
- shift
- HDFSPROXY_CONF_DIR=$confdir
- fi
-fi
-
-# Allow alternate conf dir location.
-HDFSPROXY_CONF_DIR="${HDFSPROXY_CONF_DIR:-$HDFSPROXY_HOME/conf}"
-
-#check to see it is specified whether to use the slaves file
-if [ $# -gt 1 ]
-then
- if [ "--hosts" = "$1" ]
- then
- shift
- slavesfile=$1
- shift
- export HDFSPROXY_SLAVES="${HDFSPROXY_CONF_DIR}/$slavesfile"
- fi
-fi
View
141 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh
@@ -1,141 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Runs a HdfsProxy as a daemon.
-#
-# Environment Variables
-#
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-# HDFSPROXY_LOG_DIR Where log files are stored. PWD by default.
-# HDFSPROXY_MASTER host:path where hdfsproxy code should be rsync'd from
-# HDFSPROXY_PID_DIR The pid files are stored. /tmp by default.
-# HDFSPROXY_IDENT_STRING A string representing this instance of hdfsproxy. $USER by default
-# HDFSPROXY_NICENESS The scheduling priority for daemons. Defaults to 0.
-##
-
-usage="Usage: hdfsproxy-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) "
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# get arguments
-startStop=$1
-shift
-
-hdfsproxy_rotate_log ()
-{
- log=$1;
- num=5;
- if [ -n "$2" ]; then
- num=$2
- fi
- if [ -f "$log" ]; then # rotate logs
- while [ $num -gt 1 ]; do
- prev=`expr $num - 1`
- [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
- num=$prev
- done
- mv "$log" "$log.$num";
- fi
-}
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-# get log directory
-if [ "$HDFSPROXY_LOG_DIR" = "" ]; then
- export HDFSPROXY_LOG_DIR="$HDFSPROXY_HOME/logs"
-fi
-mkdir -p "$HDFSPROXY_LOG_DIR"
-
-if [ "$HDFSPROXY_PID_DIR" = "" ]; then
- HDFSPROXY_PID_DIR=/tmp
-fi
-
-if [ "$HDFSPROXY_IDENT_STRING" = "" ]; then
- export HDFSPROXY_IDENT_STRING="$USER"
-fi
-
-# some variables
-export HDFSPROXY_LOGFILE=hdfsproxy-$HDFSPROXY_IDENT_STRING-$HOSTNAME.log
-export HDFSPROXY_ROOT_LOGGER="INFO,DRFA"
-log=$HDFSPROXY_LOG_DIR/hdfsproxy-$HDFSPROXY_IDENT_STRING-$HOSTNAME.out
-pid=$HDFSPROXY_PID_DIR/hdfsproxy-$HDFSPROXY_IDENT_STRING.pid
-
-# Set default scheduling priority
-if [ "$HDFSPROXY_NICENESS" = "" ]; then
- export HDFSPROXY_NICENESS=0
-fi
-
-case $startStop in
-
- (start)
-
- mkdir -p "$HDFSPROXY_PID_DIR"
-
- if [ -f $pid ]; then
- if kill -0 `cat $pid` > /dev/null 2>&1; then
- echo hdfsproxy running as process `cat $pid`. Stop it first.
- exit 1
- fi
- fi
-
- if [ "$HDFSPROXY_MASTER" != "" ]; then
- echo rsync from $HDFSPROXY_MASTER
- rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HDFSPROXY_MASTER/ "$HDFSPROXY_HOME"
- fi
-
- hdfsproxy_rotate_log $log
- echo starting hdfsproxy, logging to $log
- cd "$HDFSPROXY_HOME"
- nohup nice -n $HDFSPROXY_NICENESS "$HDFSPROXY_HOME"/bin/hdfsproxy --config $HDFSPROXY_CONF_DIR "$@" > "$log" 2>&1 < /dev/null &
- echo $! > $pid
- sleep 1; head "$log"
- ;;
-
- (stop)
-
- if [ -f $pid ]; then
- if kill -0 `cat $pid` > /dev/null 2>&1; then
- echo stopping hdfsproxy
- kill `cat $pid`
- else
- echo no hdfsproxy to stop
- fi
- else
- echo no hdfsproxy to stop
- fi
- ;;
-
- (*)
- echo $usage
- exit 1
- ;;
-
-esac
-
-
View
34 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Run a HdfsProxy command on all slave hosts.
-
-usage="Usage: hdfsproxy-daemons.sh [--config confdir] [--hosts hostlistfile] [start|stop] "
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. $bin/hdfsproxy-config.sh
-
-exec "$bin/hdfsproxy-slaves.sh" --config $HDFSPROXY_CONF_DIR cd "$HDFSPROXY_HOME" \; "$bin/hdfsproxy-daemon.sh" --config $HDFSPROXY_CONF_DIR "$@"
View
68 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh
@@ -1,68 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Run a shell command on all slave hosts.
-#
-# Environment Variables
-#
-# HDFSPROXY_SLAVES File naming remote hosts.
-# Default is ${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts.
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-# HDFSPROXY_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
-# HDFSPROXY_SSH_OPTS Options passed to ssh when running remote commands.
-##
-
-usage="Usage: hdfsproxy-slaves.sh [--config confdir] command..."
-
-# if no args specified, show usage
-if [ $# -le 0 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# If the slaves file is specified in the command line,
-# then it takes precedence over the definition in
-# hdfsproxy-env.sh. Save it here.
-HOSTLIST=$HDFSPROXY_SLAVES
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-if [ "$HOSTLIST" = "" ]; then
- if [ "$HDFSPROXY_SLAVES" = "" ]; then
- export HOSTLIST="${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts"
- else
- export HOSTLIST="${HDFSPROXY_SLAVES}"
- fi
-fi
-
-for slave in `cat "$HOSTLIST"`; do
- ssh $HDFSPROXY_SSH_OPTS $slave $"${@// /\\ }" \
- 2>&1 | sed "s/^/$slave: /" &
- if [ "$HDFSPROXY_SLAVE_SLEEP" != "" ]; then
- sleep $HDFSPROXY_SLAVE_SLEEP
- fi
-done
-
-wait
View
92 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-server.sh
@@ -1,92 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Runs a HdfsProxy as a daemon.
-#
-# Environment Variables
-#
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-# HDFSPROXY_MASTER host:path where hdfsproxy code should be rsync'd from
-# HDFSPROXY_PID_DIR The pid files are stored. /tmp by default.
-# HDFSPROXY_IDENT_STRING A string representing this instance of hdfsproxy. $USER by default
-# HDFSPROXY_NICENESS The scheduling priority for daemons. Defaults to 0.
-# TOMCAT_HOME_DIR tomcat home directory.
-##
-
-usage="Usage: hdfsproxy-tomcat-server.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) "
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# get arguments
-startStop=$1
-shift
-
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-
-if [ "$HDFSPROXY_IDENT_STRING" = "" ]; then
- export HDFSPROXY_IDENT_STRING="$USER"
-fi
-
-
-# Set default scheduling priority
-if [ "$HDFSPROXY_NICENESS" = "" ]; then
- export HDFSPROXY_NICENESS=0
-fi
-
-case $startStop in
-
- (start)
- if [ "$HDFSPROXY_MASTER" != "" ]; then
- echo rsync from $HDFSPROXY_MASTER
- rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HDFSPROXY_MASTER/ "$HDFSPROXY_HOME"
- fi
-
- echo starting hdfsproxy tomcat server
- cd "$HDFSPROXY_HOME"
- nohup nice -n $HDFSPROXY_NICENESS "$TOMCAT_HOME_DIR"/bin/startup.sh >& /dev/null &
- sleep 1
- ;;
-
- (stop)
-
- echo stopping hdfsproxy tomcat server
- cd "$HDFSPROXY_HOME"
- nohup nice -n $HDFSPROXY_NICENESS "$TOMCAT_HOME_DIR"/bin/shutdown.sh >& /dev/null &
- ;;
-
- (*)
- echo $usage
- exit 1
- ;;
-
-esac
-
-
View
34 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-servers.sh
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Run a HdfsProxy command on all slave hosts.
-
-usage="Usage: hdfsproxy-tomcat-servers.sh [--config confdir] [--hosts hostlistfile] [start|stop] "
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. $bin/hdfsproxy-config.sh
-
-exec "$bin/hdfsproxy-tomcat-slaves.sh" --config $HDFSPROXY_CONF_DIR cd "$HDFSPROXY_HOME" \; "$bin/hdfsproxy-tomcat-server.sh" --config $HDFSPROXY_CONF_DIR "$@"
View
68 hdfs/src/contrib/hdfsproxy/bin/hdfsproxy-tomcat-slaves.sh
@@ -1,68 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Run a shell command on all slave hosts.
-#
-# Environment Variables
-#
-# HDFSPROXY_SLAVES File naming remote hosts.
-# Default is ${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts.
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-# HDFSPROXY_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
-# HDFSPROXY_SSH_OPTS Options passed to ssh when running remote commands.
-##
-
-usage="Usage: hdfsproxy-tomcat-slaves.sh [--config confdir] command..."
-
-# if no args specified, show usage
-if [ $# -le 0 ]; then
- echo $usage
- exit 1
-fi
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# If the slaves file is specified in the command line,
-# then it takes precedence over the definition in
-# hdfsproxy-env.sh. Save it here.
-HOSTLIST=$HDFSPROXY_SLAVES
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-if [ "$HOSTLIST" = "" ]; then
- if [ "$HDFSPROXY_SLAVES" = "" ]; then
- export HOSTLIST="${HDFSPROXY_CONF_DIR}/hdfsproxy-hosts"
- else
- export HOSTLIST="${HDFSPROXY_SLAVES}"
- fi
-fi
-
-for slave in `cat "$HOSTLIST"`; do
- ssh $HDFSPROXY_SSH_OPTS $slave $"${@// /\\ }" \
- 2>&1 | sed "s/^/$slave: /" &
- if [ "$HDFSPROXY_SLAVE_SLEEP" != "" ]; then
- sleep $HDFSPROXY_SLAVE_SLEEP
- fi
-done
-
-wait
View
152 hdfs/src/contrib/hdfsproxy/bin/proxy-util
@@ -1,152 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# The Proxy command utility script
-#
-# Environment Variables
-#
-# JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
-#
-# HDFSPROXY_CLASSPATH Extra Java CLASSPATH entries.
-#
-# HDFSPROXY_HEAPSIZE The maximum amount of heap to use, in MB.
-# Default is 1000.
-#
-# HDFSPROXY_OPTS Extra Java runtime options.
-#
-# HDFSPROXY_NAMENODE_OPTS These options are added to HDFSPROXY_OPTS
-# HDFSPROXY_CLIENT_OPTS when the respective command is run.
-# HDFSPROXY_{COMMAND}_OPTS etc HDFSPROXY_JT_OPTS applies to JobTracker
-# for e.g. HDFSPROXY_CLIENT_OPTS applies to
-# more than one command (fs, dfs, fsck,
-# dfsadmin etc)
-#
-# HDFSPROXY_CONF_DIR Alternate conf dir. Default is ${HDFSPROXY_HOME}/conf.
-#
-# HDFSPROXY_ROOT_LOGGER The root appender. Default is INFO,console
-#
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
-if [ -f "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh" ]; then
- . "${HDFSPROXY_CONF_DIR}/hdfsproxy-env.sh"
-fi
-
-# some Java parameters
-if [ "$JAVA_HOME" != "" ]; then
- #echo "run java in $JAVA_HOME"
- JAVA_HOME=$JAVA_HOME
-fi
-
-if [ "$JAVA_HOME" = "" ]; then
- echo "Error: JAVA_HOME is not set."
- exit 1
-fi
-
-JAVA=$JAVA_HOME/bin/java
-JAVA_HEAP_MAX=-Xmx1000m
-
-# check envvars which might override default args
-if [ "$HDFSPROXY_HEAPSIZE" != "" ]; then
- #echo "run with heapsize $HDFSPROXY_HEAPSIZE"
- JAVA_HEAP_MAX="-Xmx""$HDFSPROXY_HEAPSIZE""m"
- #echo $JAVA_HEAP_MAX
-fi
-
-# CLASSPATH initially contains $HDFSPROXY_CONF_DIR
-CLASSPATH="${HADOOP_CONF_DIR}"
-CLASSPATH="${CLASSPATH}:${HDFSPROXY_CONF_DIR}"
-CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
-
-# for developers, add HdfsProxy classes to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/build/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/classes
-fi
-if [ -d "$HDFSPROXY_HOME/build/web/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/web
-fi
-if [ -d "$HDFSPROXY_HOME/build/test/hdfs/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME/build/test/hdfs/classes
-fi
-
-# so that filenames w/ spaces are handled correctly in loops below
-IFS=
-
-# for releases, add hdfsproxy jar & webapps to CLASSPATH
-if [ -d "$HDFSPROXY_HOME/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HDFSPROXY_HOME
-fi
-for f in $HDFSPROXY_HOME/hdfsproxy-*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add libs to CLASSPATH
-for f in $HDFSPROXY_HOME/lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add user-specified CLASSPATH last
-if [ "$HDFSPROXY_CLASSPATH" != "" ]; then
- CLASSPATH=${CLASSPATH}:${HDFSPROXY_CLASSPATH}
-fi
-
-# default log directory & file
-if [ "$HDFSPROXY_LOG_DIR" = "" ]; then
- HDFSPROXY_LOG_DIR="$HDFSPROXY_HOME/logs"
-fi
-if [ "$HDFSPROXY_LOGFILE" = "" ]; then
- HDFSPROXY_LOGFILE='proxy-util.log'
-fi
-
-# restore ordinary behaviour
-unset IFS
-
-# figure out which class to run
-CLASS='org.apache.hadoop.hdfsproxy.ProxyUtil'
-
-# cygwin path translation
-if $cygwin; then
- CLASSPATH=`cygpath -p -w "$CLASSPATH"`
- HDFSPROXY_HOME=`cygpath -d "$HDFSPROXY_HOME"`
- HDFSPROXY_LOG_DIR=`cygpath -d "$HDFSPROXY_LOG_DIR"`
-fi
-
-# cygwin path translation
-if $cygwin; then
- JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
-
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.dir=$HDFSPROXY_LOG_DIR"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.log.file=$HDFSPROXY_LOGFILE"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.home.dir=$HDFSPROXY_HOME"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.id.str=$HDFSPROXY_IDENT_STRING"
-HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Dhdfsproxy.root.logger=${HDFSPROXY_ROOT_LOGGER:-INFO,console}"
-if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
- HDFSPROXY_OPTS="$HDFSPROXY_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
-fi
-
-# run it
-exec "$JAVA" $JAVA_HEAP_MAX $HDFSPROXY_OPTS -classpath "$CLASSPATH" $CLASS "$@"
View
36 hdfs/src/contrib/hdfsproxy/bin/start-hdfsproxy-tomcat.sh
@@ -1,36 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Start hdfsproxy tomcat servers.
-# Run this on master node.
-
-usage="Usage: start-hdfsproxy-tomcat.sh"
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# get arguments
-if [ $# -ge 1 ]; then
- echo $usage
- exit 1
-fi
-
-# start hdfsproxy tomcat servers
-"$bin"/hdfsproxy-tomcat-servers.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts start
View
37 hdfs/src/contrib/hdfsproxy/bin/start-hdfsproxy.sh
@@ -1,37 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Start hdfsproxy daemons.
-# Run this on master node.
-
-usage="Usage: start-hdfsproxy.sh"
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# get arguments
-if [ $# -ge 1 ]; then
- echo $usage
- exit 1
-fi
-
-# start hdfsproxy daemons
-# "$bin"/hdfsproxy-daemon.sh --config $HDFSPROXY_CONF_DIR start
-"$bin"/hdfsproxy-daemons.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts start
View
28 hdfs/src/contrib/hdfsproxy/bin/stop-hdfsproxy-tomcat.sh
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Stop hdfsproxy tomcat servers. Run this on master node.
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# "$bin"/hdfsproxy-daemon.sh --config $HDFSPROXY_CONF_DIR stop
-"$bin"/hdfsproxy-tomcat-servers.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts stop
-
View
28 hdfs/src/contrib/hdfsproxy/bin/stop-hdfsproxy.sh
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# Stop hdfsproxy daemons. Run this on master node.
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
-
-. "$bin"/hdfsproxy-config.sh
-
-# "$bin"/hdfsproxy-daemon.sh --config $HDFSPROXY_CONF_DIR stop
-"$bin"/hdfsproxy-daemons.sh --config $HDFSPROXY_CONF_DIR --hosts hdfsproxy-hosts stop
-
View
492 hdfs/src/contrib/hdfsproxy/build.xml
@@ -1,492 +0,0 @@
-<?xml version="1.0" ?>
-
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<project name="hdfsproxy" default="jar" xmlns:ivy="antlib:org.apache.ivy.ant">
- <property name="hdfsproxyVersion" value="2.0"/>
- <property name="final.name" value="${ant.project.name}-${hdfsproxyVersion}"/>
- <property name="javac.debug" value="on"/>
- <property name="javac.optimize" value="on"/>
- <import file="../build-contrib.xml"/>
-
- <property name="bin.dir" value="${basedir}/bin"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="hadoop.jars.dir" value="${basedir}/hadoopjars"/>
-
- <property name="docs.dir" value="${basedir}/docs"/>
- <property name="test.build.dir" value="${build.dir}/test"/>
- <property name="test.build.classes" value="${test.build.dir}/classes"/>
- <property name="src.test.resources" value="${basedir}/src/test/resources"/>
- <property name="ssl.keystore.proxy" value="${src.test.resources}/ssl-keys/proxy.keystore"/>
- <property name="ssl.keystore.client" value="${src.test.resources}/ssl-keys/client.keystore"/>
- <property name="ssl.client.cert" value="${src.test.resources}/ssl-keys/test.crt"/>
- <property name="proxy.conf.test" value="${src.test.resources}/proxy-config"/>
- <property name="tomcat.conf.test" value="${src.test.resources}/tomcat-config"/>
- <property name="target.dir" value="${build.dir}/target"/>
- <property name="logs.dir" value="${target.dir}/logs"/>
- <property name="reports.dir" value="${target.dir}/reports"/>
- <property name="tomcatconfig.dir" value="${target.dir}/tomcat-config"/>
- <property name="tomcat.container.id" value="tomcat5x"/>
- <property name="cargo.logging" value="high"/>
- <property name="cactus.formatter.type" value="xml"/>
- <property name="cactus.warfile.name" value="test"/>
-
- <available file="${hadoop.root}/build/classes" type="dir" property="test.available"/>
- <property environment="env"/>
- <!-- check if environment has been set -->
- <condition property="proxy.conf.dir" value="${env.HDFSPROXY_CONF_DIR}" else="${basedir}/conf">
- <and>
- <isset property="env.HDFSPROXY_CONF_DIR"/>
- <available file="${env.HDFSPROXY_CONF_DIR}/hdfsproxy-default.xml"/>
- </and>
- </condition>
-
- <condition property="startCactus">
- <and>
- <or>
- <equals arg1="${testcase}" arg2="TestProxyFilter" />
- <equals arg1="${testcase}" arg2="TestAuthorizationFilter" />
- <equals arg1="${testcase}" arg2="TestLdapIpDirFilter" />
- <equals arg1="${testcase}" arg2="TestProxyUtil" />
- <equals arg1="${testcase}" arg2="TestProxyForwardServlet" />
- <not>
- <isset property="testcase"/>
- </not>
- </or>
- <isset property="test.available"/>
- </and>
- </condition>
-
- <condition property="useClover">
- <and>
- <isset property="clover.home"/>
- <available file="${clover.home}/lib/clover.jar"/>
- </and>
- </condition>
-
- <property name="ivy.settings.file" location="${hadoop.root}/ivy/ivysettings.xml"/>
-
- <target name="ivy-init" depends="ivy-init-antlib">
- <ivy:settings id="${ant.project.name}.ivy.settings"/>
- </target>
-
- <!-- Define the Cactus tasks -->
- <target name="load-tasks" depends="ivy-retrieve-common">
- <taskdef resource="cactus.tasks"
- classpathref="cactus.classpath">
- </taskdef>
- </target>
-
-
- <target name="jar" depends="compile" description="Create jar">
- <echo>
- Building the .jar files.
- </echo>
- <jar jarfile="${build.dir}/${final.name}.jar" basedir="${build.classes}" includes="org/apache/hadoop/hdfsproxy/**/*.class" >
- <manifest>
- <section name="org/apache/hadoop/hdfsproxy">
- <attribute name="Implementation-Title" value="HdfsProxy"/>
- <attribute name="Implementation-Version" value="${hdfsproxyVersion}"/>
- <attribute name="Implementation-Vendor" value="Apache"/>
- </section>
- </manifest>
- </jar>
- </target>
-
-
- <!-- ================================================================== -->
- <!-- Make war file -->
- <!-- ================================================================== -->
-
- <target name="war" depends="compile" description="Create war">
- <echo>
- Building the .war file
- </echo>
- <war destfile="${build.dir}/${final.name}.war" webxml="${basedir}/conf/tomcat-web.xml">
- <lib dir="${common.ivy.lib.dir}">
- <include name="commons-logging-${commons-logging.version}.jar"/>
- <include name="junit-${junit.version}.jar"/>
- <include name="log4j-${log4j.version}.jar"/>
- <include name="slf4j-api-${slf4j-api.version}.jar"/>
- <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
- <include name="xmlenc-${xmlenc.version}.jar"/>
- <include name="core-${core.vesion}.jar"/>
- <include name="hadoop-common-${hadoop-common.version}.jar"/>
- </lib>
- <classes dir="${proxy.conf.dir}">
- <include name="hdfsproxy-default.xml"/>
- <include name="user-certs.xml"/>
- <include name="user-permissions.xml"/>
- </classes>
- <classes dir="${build.classes}"/>
- <classes dir="${hadoop.root}/build/classes"/>
- </war>
- </target>
-
- <target name="forward" depends="compile" description="Create forward war">
- <echo>
- Building the forward war file
- </echo>
- <war destfile="${build.dir}/${final.name}-forward.war" webxml="${basedir}/conf/tomcat-forward-web.xml">
- <lib dir="${common.ivy.lib.dir}">
- <include name="commons-logging-${commons-logging.version}.jar"/>
- <include name="junit-${junit.version}.jar"/>
- <include name="log4j-${log4j.version}.jar"/>
- <include name="slf4j-api-${slf4j-api.version}.jar"/>
- <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
- <include name="xmlenc-${xmlenc.version}.jar"/>
- <include name="core-${core.vesion}.jar"/>
- </lib>
- <lib dir="${hadoop.root}/lib">
- <include name="hadoop-common-${hadoop-common.version}.jar"/>
- </lib>
- <classes dir="${proxy.conf.dir}">
- <include name="hdfsproxy-default.xml"/>
- <include name="hdfsproxy-site.xml"/>
- <include name="user-certs.xml"/>
- <include name="user-permissions.xml"/>
- </classes>
- <classes dir="${build.classes}"/>
- <classes dir="${hadoop.root}/build/classes"/>
- </war>
- </target>
-
- <target name="testwar" depends="compile" description="Create testing war">
- <echo>
- Building the testing .war file
- </echo>
- <war destfile="${build.dir}/${final.name}-test.war" webxml="${src.test.resources}/tomcat-web.xml">
- <lib dir="${common.ivy.lib.dir}">
- <include name="commons-logging-${commons-logging.version}.jar"/>
- <include name="junit-${junit.version}.jar"/>
- <include name="log4j-${log4j.version}.jar"/>
- <include name="slf4j-api-${slf4j-api.version}.jar"/>
- <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
- <include name="xmlenc-${xmlenc.version}.jar"/>
- <include name="core-${core.vesion}.jar"/>
- <include name="hadoop-common-${hadoop-common.version}.jar"/>
- </lib>
- <classes dir="${proxy.conf.test}" excludes="**/*.template **/*.sh"/>
- <classes dir="${build.classes}"/>
- <classes dir="${hadoop.root}/build/classes"/>
- </war>
- </target>
-
- <target name="cactifywar" depends="testwar,load-tasks,cactifywar-pure,cactifywar-clover" description="To include clover coverage test use -Dclover.home ..."/>
-
- <target name="cactifywar-pure" depends="testwar,load-tasks" unless="useClover">
- <mkdir dir="${target.dir}" />
- <echo> no clover found ...</echo>
- <cactifywar srcfile="${build.dir}/${final.name}-test.war"
- destfile="${target.dir}/${cactus.warfile.name}.war"
- mergewebxml="${src.test.resources}/cactus-web.xml">
- <servletredirector/>
- <servletredirector name="ServletRedirectorSecure"
- mapping="/ServletRedirectorSecure" roles="test"/>
- <filterredirector mapping="/test/filterRedirector.jsp"/>
- <classes dir="${test.build.dir}"/>
- </cactifywar>
- </target>
-
- <target name="cactifywar-clover" depends="testwar,load-tasks" if="useClover">
- <mkdir dir="${target.dir}" />
- <echo> Including clover.jar in the war file ...</echo>
- <cactifywar srcfile="${build.dir}/${final.name}-test.war"
- destfile="${target.dir}/${cactus.warfile.name}.war"
- mergewebxml="${src.test.resources}/cactus-web.xml">
- <servletredirector/>
- <servletredirector name="ServletRedirectorSecure"
- mapping="/ServletRedirectorSecure" roles="test"/>
- <filterredirector mapping="/test/filterRedirector.jsp"/>
- <classes dir="${test.build.dir}"/>
- <lib dir="${clover.home}/lib">
- <include name="clover.jar"/>
- </lib>
- </cactifywar>
- </target>
-
- <target name="test" depends="compile,compile-test,test-junit,test-cactus" description="Automated Test Framework" if="test.available"/>
-
- <target name="test-junit" depends="compile,compile-test" if="test.available">
- <junit fork="yes" printsummary="yes" errorProperty="tests.failed" failureProperty="tests.failed">
- <classpath refid="test.classpath"/>
- <sysproperty key="test.build.data" value="${build.test}/data"/>
- <sysproperty key="build.test" value="${build.test}"/>
- <sysproperty key="user.dir" value="${build.test}/data"/>
- <sysproperty key="fs.default.name" value="${fs.default.name}"/>
- <sysproperty key="hadoop.test.localoutputfile" value="${hadoop.test.localoutputfile}"/>
- <sysproperty key="hadoop.log.dir" value="${hadoop.log.dir}"/>
- <sysproperty key="test.src.dir" value="${test.src.dir}"/>
- <sysproperty key="javax.net.ssl.trustStore" value="${ssl.keystore.proxy}"/>
- <sysproperty key="javax.net.ssl.trustStorePassword" value="changeme"/>
- <sysproperty key="javax.net.ssl.keyStore.proxy" value="${ssl.keystore.proxy}"/>
- <sysproperty key="javax.net.ssl.keyStore" value="${ssl.keystore.client}"/>
- <sysproperty key="javax.net.ssl.keyStorePassword" value="changeme"/>
- <sysproperty key="javax.net.ssl.keyPassword" value="changeme"/>
- <sysproperty key="javax.net.ssl.clientCert" value="${ssl.client.cert}"/>
- <formatter type="plain" />
- <batchtest todir="${test.build.dir}" unless="testcase">
- <fileset dir="${src.test}">
- <include name="**/TestHdfsProxy.java"/>
- <include name="**/TestProxyUgiManager.java"/>
- </fileset>
- </batchtest>
- <batchtest todir="${test.build.dir}" if="testcase">
- <fileset dir="${src.test}">
- <include name="**/${testcase}.java"/>
- <exclude name="**/TestProxyFilter.java"/>
- <exclude name="**/TestAuthorizationFilter.java"/>
- <exclude name="**/TestLdapIpDirFilter.java"/>
- <exclude name="**/TestProxyUtil.java"/>
- <exclude name="**/TestProxyForwardServlet.java"/>
- </fileset>
- </batchtest>
- </junit>
- <fail if="tests.failed">Tests failed!</fail>
- </target>
-
-
- <target name="test-cactus" depends="compile,compile-test,cactifywar" if="startCactus">
- <exec executable="${env.JAVA_HOME}/bin/java" outputproperty="cargo.servlet.admin.port">
- <arg line="-cp ${build.test} org.apache.hadoop.hdfsproxy.FindFreePort -random"/>
- </exec>
- <exec executable="${env.JAVA_HOME}/bin/java" outputproperty="cargo.servlet.http.port">
- <arg line="-cp ${build.test} org.apache.hadoop.hdfsproxy.FindFreePort ${cargo.servlet.admin.port}"/>
- </exec>
- <exec executable="${env.JAVA_HOME}/bin/java" outputproperty="cargo.servlet.https.port">
- <arg line="-cp ${build.test} org.apache.hadoop.hdfsproxy.FindFreePort ${cargo.servlet.http.port}"/>
- </exec>
-
- <echo> Free Ports: startup-${cargo.servlet.admin.port} / http-${cargo.servlet.http.port} / https-${cargo.servlet.https.port}</echo>
- <echo>Please take a deep breath while Cargo gets the Tomcat for running the servlet tests...</echo>
-
- <mkdir dir="${tomcatconfig.dir}" />
- <mkdir dir="${tomcatconfig.dir}/conf" />
- <mkdir dir="${tomcatconfig.dir}/webapps" />
- <mkdir dir="${tomcatconfig.dir}/temp" />
- <mkdir dir="${logs.dir}" />
- <mkdir dir="${reports.dir}" />
- <copy file="${tomcat.conf.test}/server.xml" tofile="${tomcatconfig.dir}/conf/server.xml" overwrite="true">
- <filterset>
- <filter token="ADMIN.PORT" value="${cargo.servlet.admin.port}"/>
- <filter token="HTTP.PORT" value="${cargo.servlet.http.port}"/>
- <filter token="HTTPS.PORT" value="${cargo.servlet.https.port}"/>
- </filterset>
- </copy>
- <copy file="${tomcat.conf.test}/web.xml" tofile="${tomcatconfig.dir}/conf/web.xml"/>
- <copy file="${tomcat.conf.test}/tomcat-users.xml" tofile="${tomcatconfig.dir}/conf/tomcat-users.xml"/>
-
- <cactus warfile="${target.dir}/${cactus.warfile.name}.war" fork="yes" haltonfailure="no" printsummary="yes" failureproperty="tests.failed">
- <classpath>
- <path refid="cactus.classpath"/>
- <pathelement location="${build.classes}"/>
- <pathelement location="${src.test.resources}"/>
- <pathelement location="${src.test.resources}/proxy-config"/>
- </classpath>
- <containerset>
- <cargo containerId="${tomcat.container.id}" timeout="30000" output="${logs.dir}/output.log" log="${logs.dir}/cargo.log">
- <zipUrlInstaller
- installUrl="http://archive.apache.org/dist/tomcat/tomcat-6/v6.0.24/bin/apache-tomcat-6.0.24.zip"
- installDir="${target.dir}/${tomcat.container.id}"/>
- <configuration type="existing" home="${tomcatconfig.dir}">
- <property name="cargo.servlet.port" value="${cargo.servlet.http.port}"/>
- <property name="cargo.logging" value="${cargo.logging}"/>
- <property name="cactus.toDir" value="${build.test}"/>
- <deployable type="war" file="${target.dir}/${cactus.warfile.name}.war"/>
- </configuration>
- </cargo>
- </containerset>
- <sysproperty key="test.build.data" value="${build.test}/data"/>
- <sysproperty key="build.test" value="${build.test}"/>
- <sysproperty key="build.target" value="${target.dir}"/>
- <sysproperty key="javax.net.ssl.trustStore" value="${ssl.keystore.proxy}"/>
- <sysproperty key="javax.net.ssl.trustStorePassword" value="changeme"/>
- <sysproperty key="javax.net.ssl.keyStore.proxy" value="${ssl.keystore.proxy}"/>
- <sysproperty key="javax.net.ssl.keyStore" value="${ssl.keystore.client}"/>
- <sysproperty key="javax.net.ssl.keyStorePassword" value="changeme"/>
- <sysproperty key="javax.net.ssl.keyPassword" value="changeme"/>
- <sysproperty key="javax.net.ssl.clientCert" value="${ssl.client.cert}"/>
- <sysproperty key="test.proxy.conf.dir" value="${proxy.conf.test}"/>
- <sysproperty key="test.proxy.https.port" value="${cargo.servlet.https.port}"/>
-
- <formatter type="${cactus.formatter.type}"/>
- <batchtest todir="${reports.dir}" unless="testcase">
- <fileset dir="${src.test}">
- <include name="**/Test*.java"/>
- <exclude name="**/TestHdfsProxy.java"/>
- <exclude name="**/TestProxyUgiManager.java"/>
- </fileset>
- </batchtest>
- <batchtest todir="${reports.dir}" if="testcase">
- <fileset dir="${src.test}">
- <include name="**/${testcase}.java"/>
- <exclude name="**/TestHdfsProxy.java"/>
- <exclude name="**/TestProxyUgiManager.java"/>
- </fileset>
- </batchtest>
- </cactus>
- <fail if="tests.failed">Tests failed!</fail>
- </target>
- <!-- ====================================================== -->
- <!-- Macro definitions -->
- <!-- ====================================================== -->
- <macrodef name="macro_tar" description="Worker Macro for tar">
- <attribute name="param.destfile"/>
- <element name="param.listofitems"/>
- <sequential>
- <tar compression="gzip" longfile="gnu"
- destfile="@{param.destfile}">
- <param.listofitems/>
- </tar>
- </sequential>
- </macrodef>
-
- <!-- ================================================================== -->
- <!-- D I S T R I B U T I O N -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
- <target name="local-package" depends="jar,war" description="Package in local build directory">
- <mkdir dir="${build.dir}/${final.name}"/>
- <mkdir dir="${build.dir}/${final.name}/logs"/>
- <copy todir="${build.dir}/${final.name}" includeEmptyDirs="false">
- <fileset dir="${build.dir}">
- <include name="*.jar" />
- <include name="*.war" />
- </fileset>
- </copy>
- <copy todir="${build.dir}/${final.name}/lib" includeEmptyDirs="false">
- <fileset dir="${common.ivy.lib.dir}">
- <include name="commons-logging-${commons-logging.version}.jar"/>
- <include name="commons-logging-api-${commons-logging-api.version}.jar"/>
- <include name="junit-${junit.version}.jar"/>
- <include name="log4j-${log4j.version}.jar"/>
- <include name="slf4j-api-${slf4j-api.version}.jar"/>
- <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
- <include name="xmlenc-${xmlenc.version}.jar"/>
- <include name="jetty-util-${jetty-util.version}.jar"/>
- <include name="jetty-${jetty.version}.jar"/>
- <include name="servlet-api-2.5-${servlet-api-2.5.version}.jar"/>
- <include name="core-${core.vesion}.jar"/>
- <!-- </fileset>
- <fileset dir="${hadoop.root}/lib/jsp-${jsp.version}"> -->
- <include name="jsp-${jsp.version}-${jetty.version}.jar"/>
- <include name="jsp-api-${jsp.version}-${jetty.version}.jar"/>
- </fileset>
- </copy>
-
- <copy todir="${build.dir}/${final.name}/lib" includeEmptyDirs="false">
- <fileset dir="${hadoop.root}/build">
- <include name="*-core.jar"/>
- <include name="*-tools.jar"/>
- </fileset>
- </copy>
-
- <copy todir="${build.dir}/${final.name}/bin">
- <fileset dir="${bin.dir}"/>
- </copy>
-
-
- <copy todir="${build.dir}/${final.name}/conf">
- <fileset dir="${proxy.conf.dir}"/>
- </copy>
-
-
- <copy todir="${build.dir}/${final.name}">
- <fileset dir="${basedir}">
- <include name="README" />
- <include name="build.xml" />
- <include name="*.txt" />
- </fileset>
- </copy>
-
- <copy todir="${build.dir}/${final.name}/src" includeEmptyDirs="true">
- <fileset dir="${src.dir}" excludes="**/*.template **/docs/build/**/*"/>
- </copy>
-
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${build.dir}/${final.name}/bin"/>
- </chmod>
-
- </target>
- <target name="package" depends="local-package" description="Build distribution">
- <mkdir dir="${dist.dir}/contrib/${name}"/>
- <copy todir="${dist.dir}/contrib/${name}">
- <fileset dir="${build.dir}/${final.name}">
- <exclude name="**/lib/**" />
- <exclude name="**/src/**" />
- <exclude name="*.war" />
- </fileset>
- </copy>
- <chmod dir="${dist.dir}/contrib/${name}/bin" perm="a+x" includes="*"/>
- </target>
-
- <!-- ================================================================== -->
- <!-- Make release tarball -->
- <!-- ================================================================== -->
- <target name="tar" depends="local-package,war" description="Make release tarball">
- <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
-
- <target name="binary" depends="local-package,war" description="Make tarball without source and documentation">
- <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
- <param.listofitems>
- <tarfileset dir="${build.dir}" mode="664">
- <exclude name="${final.name}/bin/*" />
- <exclude name="${final.name}/src/**" />
- <exclude name="${final.name}/docs/**" />
- <include name="${final.name}/**" />
- </tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
- </param.listofitems>
- </macro_tar>
- </target>
-
-
- <!-- the unit test classpath -->
- <path id="test.classpath">
- <pathelement location="${proxy.conf.test}" />
- <pathelement location="${test.build.dir}" />
- <pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
- <!--<pathelement location="${hadoop.root}/src/contrib/test"/>-->
- <pathelement location="${hadoop.root}/conf"/>
- <pathelement location="${hadoop.root}/build"/>
- <pathelement location="${hadoop.root}/build/classes"/>
- <pathelement location="${hadoop.root}/build/tools"/>
- <pathelement location="${build.examples}"/>
- <pathelement path="${clover.jar}"/>
- <path refid="contrib-classpath"/>
- </path>
-
- <path id="cactus.classpath">
- <path refid="test.classpath"/>
- </path>
-
-</project>
View
24 hdfs/src/contrib/hdfsproxy/conf/configuration.xsl
@@ -1,24 +0,0 @@
-<?xml version="1.0"?>
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
-<xsl:output method="html"/>
-<xsl:template match="configuration">
-<html>
-<body>
-<table border="1">
-<tr>
- <td>name</td>
- <td>value</td>
- <td>description</td>
-</tr>
-<xsl:for-each select="property">
-<tr>
- <td><a name="{name}"><xsl:value-of select="name"/></a></td>
- <td><xsl:value-of select="value"/></td>
- <td><xsl:value-of select="description"/></td>
-</tr>
-</xsl:for-each>
-</table>
-</body>
-</html>
-</xsl:template>
-</xsl:stylesheet>
View
128 hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-default.xml
@@ -1,128 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!-- Put hdfsproxy specific properties in this file. -->
-
-<configuration>
-
-<property>
- <name>hdfsproxy.https.address</name>
- <value>0.0.0.0:8443</value>
- <description>the SSL port that hdfsproxy listens on
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.hosts</name>
- <value>hdfsproxy-hosts</value>
- <description>location of hdfsproxy-hosts file
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.dfs.namenode.address</name>
- <value>localhost:54321</value>
- <description>namenode address of the HDFS cluster being proxied
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.https.server.keystore.resource</name>
- <value>ssl-server.xml</value>
- <description>location of the resource from which ssl server keystore
- information will be extracted
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.user.permissions.file.location</name>
- <value>user-permissions.xml</value>
- <description>location of the user permissions file
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.user.certs.file.location</name>
- <value>user-certs.xml</value>
- <description>location of the user certs file
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.ugi.cache.ugi.lifetime</name>
- <value>15</value>
- <description> The lifetime (in minutes) of a cached ugi
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.ldap.initial.context.factory</name>
- <value>com.sun.jndi.ldap.LdapCtxFactory</value>
- <description> ldap initial context factory
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.ldap.provider.url</name>
- <value>ldap://localhost:389</value>
- <description> ldap server address
- </description>
-</property>
-
-<property>
- <name>hdfsproxy.ldap.role.base</name>
- <value>ou=proxyroles,dc=mycompany,dc=com</value>
- <description> ldap role base
- </description>
-</property>
-
-<property>
- <name>fs.default.name</name>
- <!-- cluster variant -->
- <value>hdfs://localhost:54321</value>
- <description>The name of the default file system. Either the
- literal string "local" or a host:port for NDFS.</description>
- <final>true</final>
- </property>
-
-<property>
- <name>dfs.blocksize</name>
- <value>134217728</value>
- <description>The default block size for new files.</description>
-</property>
-
-<property>
- <name>io.file.buffer.size</name>
- <value>131072</value>
- <description>The size of buffer for use in sequence files.
- The size of this buffer should probably be a multiple of hardware
- page size (4096 on Intel x86), and it determines how much data is
- buffered during read and write operations.</description>
-</property>
-
- <property>
- <name>hdfsproxy.kerberos.principal</name>
- <value>user@REALM</value>
- <description> kerberos principal to be used by hdfsproxy </description>
- </property>
-
- <property>
- <name>hdfsproxy.kerberos.keytab</name>
- <value>proxy.prod.headless.keytab</value>
- <description> kerberos keytab to be used by hdfsproxy </description>
- </property>
-
- <property>
- <name>hdfsproxy.kerberos.default.realm</name>
- <value>/instance@REALM</value>
- <description> kerberos default realm appended to non-qualified userIds </description>
- </property>
-
- <property>
- <name>dfs.namenode.kerberos.principal</name>
- <value>hdfs@REALM</value>
- <description> Namenode user name key. </description>
- </property>
-
-</configuration>
-
View
44 hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-env.sh
@@ -1,44 +0,0 @@
-# Set HdfsProxy-specific environment variables here.
-
-# The only required environment variable is JAVA_HOME. All others are
-# optional. When running a distributed configuration it is best to
-# set JAVA_HOME in this file, so that it is correctly defined on
-# remote nodes.
-
-# The java implementation to use. Required.
-# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
-
-# Extra Java CLASSPATH elements. Optional.
-# export HDFSPROXY_CLASSPATH=
-
-# The maximum amount of heap to use, in MB. Default is 1000.
-# export HDFSPROXY_HEAPSIZE=2000
-
-# Extra Java runtime options. Empty by default.
-# export HDFSPROXY_OPTS=
-
-# Extra ssh options. Empty by default.
-# export HDFSPROXY_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HDFSPROXY_CONF_DIR"
-
-# Where log files are stored. $HDFSPROXY_HOME/logs by default.
-# export HDFSPROXY_LOG_DIR=${HDFSPROXY_HOME}/logs
-
-# File naming remote slave hosts. $HDFSPROXY_HOME/conf/slaves by default.
-# export HDFSPROXY_SLAVES=${HDFSPROXY_HOME}/conf/slaves
-
-# host:path where hdfsproxy code should be rsync'd from. Unset by default.
-# export HDFSPROXY_MASTER=master:/home/$USER/src/hdfsproxy
-
-# Seconds to sleep between slave commands. Unset by default. This
-# can be useful in large clusters, where, e.g., slave rsyncs can
-# otherwise arrive faster than the master can service them.
-# export HDFSPROXY_SLAVE_SLEEP=0.1
-
-# The directory where pid files are stored. /tmp by default.
-# export HDFSPROXY_PID_DIR=/var/hdfsproxy/pids
-
-# A string representing this instance of hdfsproxy. $USER by default.
-# export HDFSPROXY_IDENT_STRING=$USER
-
-# The scheduling priority for daemon processes. See 'man nice'.
-# export HDFSPROXY_NICENESS=10
View
44 hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template
@@ -1,44 +0,0 @@
-# Set HdfsProxy-specific environment variables here.
-
-# The only required environment variable is JAVA_HOME. All others are
-# optional. When running a distributed configuration it is best to
-# set JAVA_HOME in this file, so that it is correctly defined on
-# remote nodes.
-
-# The java implementation to use. Required.
-# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
-
-# Extra Java CLASSPATH elements. Optional.
-# export HDFSPROXY_CLASSPATH=
-
-# The maximum amount of heap to use, in MB. Default is 1000.
-# export HDFSPROXY_HEAPSIZE=2000
-
-# Extra Java runtime options. Empty by default.
-# export HDFSPROXY_OPTS=
-
-# Extra ssh options. Empty by default.
-# export HDFSPROXY_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HDFSPROXY_CONF_DIR"
-
-# Where log files are stored. $HDFSPROXY_HOME/logs by default.
-# export HDFSPROXY_LOG_DIR=${HDFSPROXY_HOME}/logs
-
-# File naming remote slave hosts. $HDFSPROXY_HOME/conf/slaves by default.
-# export HDFSPROXY_SLAVES=${HDFSPROXY_HOME}/conf/slaves
-
-# host:path where hdfsproxy code should be rsync'd from. Unset by default.
-# export HDFSPROXY_MASTER=master:/home/$USER/src/hdfsproxy
-
-# Seconds to sleep between slave commands. Unset by default. This
-# can be useful in large clusters, where, e.g., slave rsyncs can
-# otherwise arrive faster than the master can service them.
-# export HDFSPROXY_SLAVE_SLEEP=0.1
-
-# The directory where pid files are stored. /tmp by default.
-# export HDFSPROXY_PID_DIR=/var/hdfsproxy/pids
-
-# A string representing this instance of hdfsproxy. $USER by default.
-# export HDFSPROXY_IDENT_STRING=$USER
-
-# The scheduling priority for daemon processes. See 'man nice'.
-# export HDFSPROXY_NICENESS=10
View
1  hdfs/src/contrib/hdfsproxy/conf/hdfsproxy-hosts
@@ -1 +0,0 @@
-localhost
View
61 hdfs/src/contrib/hdfsproxy/conf/log4j.properties
@@ -1,61 +0,0 @@
-# Define some default values that can be overridden by system properties
-hdfsproxy.root.logger=INFO,console
-hdfsproxy.log.dir=.
-hdfsproxy.log.file=hdfsproxy.log
-
-# Define the root logger to the system property "hdfsproxy.root.logger".
-log4j.rootLogger=${hdfsproxy.root.logger}
-
-# Logging Threshold
-log4j.threshhold=ALL
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hdfsproxy.log.dir}/${hdfsproxy.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#
-# Rolling File Appender
-#
-
-#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.RFA.File=${hdfsproxy.log.dir}/${hdfsproxy.log.file}
-
-# Logfile size and and 30-day backups
-#log4j.appender.RFA.MaxFileSize=1MB
-#log4j.appender.RFA.MaxBackupIndex=30
-
-#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.hdfsproxy.HttpsProxy=DEBUG
-#log4j.logger.org.apache.hadoop.hdfsproxy.ProxyFilter=DEBUG
-
View
48 hdfs/src/contrib/hdfsproxy/conf/ssl-server.xml
@@ -1,48 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<configuration>
-
-<property>
- <name>ssl.server.truststore.location</name>
- <value>${javax.net.ssl.keyStore.proxy}</value>
-</property>
-
-<property>
- <name>ssl.server.truststore.password</name>
- <value>changeme</value>
-</property>
-
-<property>
- <name>ssl.server.keystore.location</name>
- <value>${javax.net.ssl.keyStore.proxy}</value>
-</property>
-
-<property>
- <name>ssl.server.keystore.password</name>
- <value>changeme</value>
-</property>
-
-<property>
- <name>ssl.server.keystore.keypassword</name>
- <value>changeme</value>
-</property>
-
-</configuration>
View
109 hdfs/src/contrib/hdfsproxy/conf/tomcat-forward-web.xml
@@ -1,109 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!DOCTYPE web-app
- PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
- "http://java.sun.com/dtd/web-app_2_3.dtd">
-
-<web-app>
-
-
- <!-- General description of your web application -->
-
- <display-name>HDFS Proxy</display-name>
- <description>
- get data from grid forward war
- </description>
-
- <context-param>
- <param-name>webmaster</param-name>
- <param-value>zhiyong1@yahoo-inc.com</param-value>
- <description>
- The EMAIL address of the administrator to whom questions
- and comments about this application should be addressed.
- </description>
- </context-param>
-
- <filter>
- <filter-name>ldapIpDirFilter</filter-name>
- <filter-class>org.apache.hadoop.hdfsproxy.LdapIpDirFilter</filter-class>
- </filter>
-
- <filter-mapping>
- <filter-name>ldapIpDirFilter</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
-
-
-
-
- <servlet>
- <servlet-name>proxyForward</servlet-name>
- <description>forward data access to specifc servlets</description>
- <servlet-class>org.apache.hadoop.hdfsproxy.ProxyForwardServlet</servlet-class>
- </servlet>
-
- <servlet-mapping>
- <servlet-name>proxyForward</servlet-name>
- <url-pattern>/listPaths/*</url-pattern>
- </servlet-mapping>
- <servlet-mapping>
- <servlet-name>proxyForward</servlet-name>
- <url-pattern>/data/*</url-pattern>
- </servlet-mapping>
- <servlet-mapping>
- <servlet-name>proxyForward</servlet-name>
- <url-pattern>/streamFile/*</url-pattern>
- </servlet-mapping>
-
- <servlet>
- <servlet-name>fileForward</servlet-name>
- <description>forward file data access to streamFile</description>
- <servlet-class>org.apache