Permalink
Browse files

Merge trunk into branch

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-3077@1396918 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
2 parents 89d4a39 + eba00d8 commit d22ca1d605afbbd30a38bf87384503266fd526d6 @toddlipcon toddlipcon committed Oct 11, 2012
Showing 308 changed files with 16,797 additions and 7,271 deletions.
View
@@ -335,7 +335,7 @@ checkTests () {
echo "The patch appears to be a documentation patch that doesn't require tests."
JIRA_COMMENT="$JIRA_COMMENT
- +0 tests included. The patch appears to be a documentation patch that doesn't require tests."
+ {color:green}+0 tests included{color}. The patch appears to be a documentation patch that doesn't require tests."
return 0
fi
fi
@@ -681,12 +681,46 @@ runTests () {
failed_tests=""
modules=$(findModules)
- for module in $modules;
- do
+ #
+ # If we are building hadoop-hdfs-project, we must build the native component
+ # of hadoop-common-project first. In order to accomplish this, we move the
+ # hadoop-hdfs subprojects to the end of the list so that common will come
+ # first.
+ #
+ # Of course, we may not be building hadoop-common at all-- in this case, we
+ # explicitly insert a mvn compile -Pnative of common, to ensure that the
+ # native libraries show up where we need them.
+ #
+ building_common=0
+ for module in $modules; do
+ if [[ $module == hadoop-hdfs-project* ]]; then
+ hdfs_modules="$hdfs_modules $module"
+ elif [[ $module == hadoop-common-project* ]]; then
+ ordered_modules="$ordered_modules $module"
+ building_common=1
+ else
+ ordered_modules="$ordered_modules $module"
+ fi
+ done
+ if [ -n $hdfs_modules ]; then
+ ordered_modules="$ordered_modules $hdfs_modules"
+ if [[ $building_common -eq 0 ]]; then
+ echo " Building hadoop-common with -Pnative in order to provide \
+libhadoop.so to the hadoop-hdfs unit tests."
+ echo " $MVN compile -Pnative -D${PROJECT_NAME}PatchProcess"
+ if ! $MVN compile -Pnative -D${PROJECT_NAME}PatchProcess; then
+ JIRA_COMMENT="$JIRA_COMMENT
+ {color:red}-1 core tests{color}. Failed to build the native portion \
+of hadoop-common prior to running the unit tests in $ordered_modules"
+ return 1
+ fi
+ fi
+ fi
+ for module in $ordered_modules; do
cd $module
echo " Running tests in $module"
echo " $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess"
- $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess
+ $MVN clean install -fn -Pnative -Drequire.test.libhadoop -D${PROJECT_NAME}PatchProcess
module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-| |g" | sed -e "s|\.xml||g"`
# With -fn mvn always exits with a 0 exit code. Because of this we need to
# find the errors instead of using the exit code. We assume that if the build
@@ -914,6 +948,7 @@ if [[ $RESULT != 0 ]] ; then
fi
buildWithPatch
checkAuthor
+(( RESULT = RESULT + $? ))
if [[ $JENKINS == "true" ]] ; then
cleanUpXml
@@ -19,6 +19,8 @@
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
@@ -44,6 +46,9 @@
* sequence.
*/
public class KerberosAuthenticator implements Authenticator {
+
+ private static Logger LOG = LoggerFactory.getLogger(
+ KerberosAuthenticator.class);
/**
* HTTP header used by the SPNEGO server endpoint during an authentication sequence.
@@ -152,9 +157,18 @@ public void authenticate(URL url, AuthenticatedURL.Token token)
}
conn.setRequestMethod(AUTH_HTTP_METHOD);
conn.connect();
- if (isNegotiate()) {
+
+ if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
+ LOG.debug("JDK performed authentication on our behalf.");
+ // If the JDK already did the SPNEGO back-and-forth for
+ // us, just pull out the token.
+ AuthenticatedURL.extractToken(conn, token);
+ return;
+ } else if (isNegotiate()) {
+ LOG.debug("Performing our own SPNEGO sequence.");
doSpnegoSequence(token);
} else {
+ LOG.debug("Using fallback authenticator sequence.");
getFallBackAuthenticator().authenticate(url, token);
}
}
@@ -168,7 +182,11 @@ public void authenticate(URL url, AuthenticatedURL.Token token)
* @return the fallback {@link Authenticator}.
*/
protected Authenticator getFallBackAuthenticator() {
- return new PseudoAuthenticator();
+ Authenticator auth = new PseudoAuthenticator();
+ if (connConfigurator != null) {
+ auth.setConnectionConfigurator(connConfigurator);
+ }
+ return auth;
}
/*
@@ -197,11 +215,16 @@ private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException,
AccessControlContext context = AccessController.getContext();
Subject subject = Subject.getSubject(context);
if (subject == null) {
+ LOG.debug("No subject in context, logging in");
subject = new Subject();
LoginContext login = new LoginContext("", subject,
null, new KerberosConfiguration());
login.login();
}
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Using subject: " + subject);
+ }
Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
@Override
@@ -7,6 +7,8 @@ Trunk (Unreleased)
HADOOP-8124. Remove the deprecated FSDataOutputStream constructor,
FSDataOutputStream.sync() and Syncable.sync(). (szetszwo)
+ HADOOP-8886. Remove KFS support. (eli)
+
NEW FEATURES
HADOOP-8469. Make NetworkTopology class pluggable. (Junping Du via
@@ -117,6 +119,9 @@ Trunk (Unreleased)
HADOOP-8840. Fix the test-patch colorizer to cover all sorts of +1 lines.
(Harsh J via bobby)
+ HADOOP-8864. Addendum to HADOOP-8840: Add a coloring case for +0 results
+ too. (harsh)
+
BUG FIXES
HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -244,6 +249,16 @@ Trunk (Unreleased)
required context item is not configured
(Brahma Reddy Battula via harsh)
+ HADOOP-3957. Change MutableQuantiles to use a shared thread for rolling
+ over metrics. (Andrew Wang via todd)
+
+ HADOOP-8386. hadoop script doesn't work if 'cd' prints to stdout
+ (default behavior in some bash setups (esp. Ubuntu))
+ (Chiristopher Berner and Andy Isaacson via harsh)
+
+ HADOOP-8839. test-patch's -1 on @author tag presence doesn't cause
+ a -1 to the overall result (harsh)
+
OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -272,8 +287,24 @@ Release 2.0.3-alpha - Unreleased
HADOOP-8736. Add Builder for building RPC server. (Brandon Li via Suresh)
+ HADOOP-8851. Use -XX:+HeapDumpOnOutOfMemoryError JVM option in the forked
+ tests. (Ivan A. Veselovsky via atm)
+
+ HADOOP-8783. Improve RPC.Server's digest auth (daryn)
+
+ HADOOP-8889. Upgrade to Surefire 2.12.3 (todd)
+
+ HADOOP-8804. Improve Web UIs when the wildcard address is used.
+ (Senthil Kumar via eli)
+
+ HADOOP-8894. GenericTestUtils.waitFor should dump thread stacks on timeout
+ (todd)
+
OPTIMIZATIONS
+ HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang
+ via atm)
+
BUG FIXES
HADOOP-8795. BASH tab completion doesn't look in PATH, assumes path to
@@ -288,6 +319,14 @@ Release 2.0.3-alpha - Unreleased
HADOOP-8791. Fix rm command documentation to indicte it deletes
files and not directories. (Jing Zhao via suresh)
+ HADOOP-8616. ViewFS configuration requires a trailing slash. (Sandy Ryza
+ via atm)
+
+ HADOOP-8756. Fix SEGV when libsnappy is in java.library.path but
+ not LD_LIBRARY_PATH. (Colin Patrick McCabe via eli)
+
+ HADOOP-8881. FileBasedKeyStoresFactory initialization logging should be debug not info. (tucu)
+
Release 2.0.2-alpha - 2012-09-07
INCOMPATIBLE CHANGES
@@ -298,6 +337,8 @@ Release 2.0.2-alpha - 2012-09-07
HADOOP-8689. Make trash a server side configuration option. (eli)
HADOOP-8710. Remove ability for users to easily run the trash emptire. (eli)
+
+ HADOOP-8794. Rename YARN_HOME to HADOOP_YARN_HOME. (vinodkv via acmurthy)
NEW FEATURES
@@ -543,8 +584,6 @@ Release 2.0.2-alpha - 2012-09-07
HADOOP-8031. Configuration class fails to find embedded .jar resources;
should use URL.openStream() (genman via tucu)
- HADOOP-8738. junit JAR is showing up in the distro (tucu)
-
HADOOP-8737. cmake: always use JAVA_HOME to find libjvm.so, jni.h, jni_md.h.
(Colin Patrick McCabe via eli)
@@ -574,6 +613,8 @@ Release 2.0.2-alpha - 2012-09-07
HADOOP-8781. hadoop-config.sh should add JAVA_LIBRARY_PATH to LD_LIBRARY_PATH. (tucu)
+ HADOOP-8855. SSL-based image transfer does not work when Kerberos is disabled. (todd via eli)
+
BREAKDOWN OF HDFS-3042 SUBTASKS
HADOOP-8220. ZKFailoverController doesn't handle failure to become active
@@ -976,6 +1017,18 @@ Release 2.0.0-alpha - 05-23-2012
HADOOP-8655. Fix TextInputFormat for large deliminators. (Gelesh via
bobby)
+Release 0.23.5 - UNRELEASED
+
+ INCOMPATIBLE CHANGES
+
+ NEW FEATURES
+
+ IMPROVEMENTS
+
+ OPTIMIZATIONS
+
+ BUG FIXES
+
Release 0.23.4 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -990,7 +1043,10 @@ Release 0.23.4 - UNRELEASED
BUG FIXES
-Release 0.23.3 - UNRELEASED
+ HADOOP-8843. Old trash directories are never deleted on upgrade
+ from 1.x (jlowe)
+
+Release 0.23.3
INCOMPATIBLE CHANGES
@@ -175,18 +175,6 @@
<Bug pattern="ES_COMPARING_STRINGS_WITH_EQ" />
</Match>
- <Match>
- <Class name="org.apache.hadoop.fs.kfs.KFSOutputStream" />
- <Field name="path" />
- <Bug pattern="URF_UNREAD_FIELD" />
- </Match>
-
- <Match>
- <Class name="org.apache.hadoop.fs.kfs.KosmosFileSystem" />
- <Method name="initialize" />
- <Bug pattern="DM_EXIT" />
- </Match>
-
<Match>
<Class name="org.apache.hadoop.io.Closeable" />
<Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
@@ -194,11 +194,6 @@
<artifactId>avro</artifactId>
<scope>compile</scope>
</dependency>
- <dependency>
- <groupId>net.sf.kosmosfs</groupId>
- <artifactId>kfs</artifactId>
- <scope>compile</scope>
- </dependency>
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant</artifactId>
@@ -123,6 +123,7 @@ add_dual_library(hadoop
${D}/security/JniBasedUnixGroupsMapping.c
${D}/security/JniBasedUnixGroupsNetgroupMapping.c
${D}/security/getGroup.c
+ ${D}/util/NativeCodeLoader.c
${D}/util/NativeCrc32.c
${D}/util/bulk_crc32.c
)
@@ -2,7 +2,6 @@
#define CONFIG_H
#cmakedefine HADOOP_ZLIB_LIBRARY "@HADOOP_ZLIB_LIBRARY@"
-#cmakedefine HADOOP_RUNAS_HOME "@HADOOP_RUNAS_HOME@"
#cmakedefine HADOOP_SNAPPY_LIBRARY "@HADOOP_SNAPPY_LIBRARY@"
#cmakedefine HAVE_SYNC_FILE_RANGE
#cmakedefine HAVE_POSIX_FADVISE
@@ -19,7 +19,7 @@
bin=`which $0`
bin=`dirname ${bin}`
-bin=`cd "$bin"; pwd`
+bin=`cd "$bin" > /dev/null; pwd`
DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
@@ -269,21 +269,21 @@ fi
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/$HDFS_DIR'/*'
# put yarn in classpath if present
-if [ "$YARN_HOME" = "" ]; then
+if [ "$HADOOP_YARN_HOME" = "" ]; then
if [ -d "${HADOOP_PREFIX}/$YARN_DIR" ]; then
- export YARN_HOME=$HADOOP_PREFIX
+ export HADOOP_YARN_HOME=$HADOOP_PREFIX
fi
fi
-if [ -d "$YARN_HOME/$YARN_DIR/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_DIR
+if [ -d "$HADOOP_YARN_HOME/$YARN_DIR/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/$YARN_DIR
fi
-if [ -d "$YARN_HOME/$YARN_LIB_JARS_DIR" ]; then
- CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_LIB_JARS_DIR'/*'
+if [ -d "$HADOOP_YARN_HOME/$YARN_LIB_JARS_DIR" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/$YARN_LIB_JARS_DIR'/*'
fi
-CLASSPATH=${CLASSPATH}:$YARN_HOME/$YARN_DIR'/*'
+CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/$YARN_DIR'/*'
# put mapred in classpath if present AND different from YARN
if [ "$HADOOP_MAPRED_HOME" = "" ]; then
@@ -292,7 +292,7 @@ if [ "$HADOOP_MAPRED_HOME" = "" ]; then
fi
fi
-if [ "$HADOOP_MAPRED_HOME/$MAPRED_DIR" != "$YARN_HOME/$YARN_DIR" ] ; then
+if [ "$HADOOP_MAPRED_HOME/$MAPRED_DIR" != "$HADOOP_YARN_HOME/$YARN_DIR" ] ; then
if [ -d "$HADOOP_MAPRED_HOME/$MAPRED_DIR/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/$MAPRED_DIR
fi
@@ -33,6 +33,6 @@ if [ -f "${HADOOP_HDFS_HOME}"/sbin/start-dfs.sh ]; then
fi
# start yarn daemons if yarn is present
-if [ -f "${YARN_HOME}"/sbin/start-yarn.sh ]; then
- "${YARN_HOME}"/sbin/start-yarn.sh --config $HADOOP_CONF_DIR
+if [ -f "${HADOOP_YARN_HOME}"/sbin/start-yarn.sh ]; then
+ "${HADOOP_YARN_HOME}"/sbin/start-yarn.sh --config $HADOOP_CONF_DIR
fi
@@ -2,7 +2,7 @@
# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
-# default sampling period
+# default sampling period, in seconds
*.period=10
# The namenode-metrics.out will contain metrics from all context
Oops, something went wrong.

0 comments on commit d22ca1d

Please sign in to comment.