Permalink
Browse files

HDFS-326 Merge with SVN_HEAD of 2010-01-08

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/branches/HDFS-326@897222 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
1 parent d8f594a commit 48eb6192c98d46912214e2678ee8f13d5b0a33c1 @steveloughran steveloughran committed Jan 8, 2010
Showing with 2,864 additions and 941 deletions.
  1. +9 −9 .eclipse.templates/.classpath
  2. +94 −21 CHANGES.txt
  3. +2 −0 build.xml
  4. +3 −2 ivy/ivysettings.xml
  5. +3 −0 src/contrib/build-contrib.xml
  6. +3 −3 src/contrib/fuse-dfs/build.xml
  7. +1 −2 src/contrib/fuse-dfs/src/Makefile.am
  8. +15 −7 src/contrib/fuse-dfs/src/fuse_dfs_wrapper.sh
  9. +1 −1 src/docs/src/documentation/content/xdocs/hdfs_user_guide.xml
  10. +2 −2 src/java/hdfs-default.xml
  11. +158 −72 src/java/org/apache/hadoop/hdfs/DFSClient.java
  12. +1 −0 src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
  13. +3 −0 src/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java
  14. +98 −0 src/java/org/apache/hadoop/hdfs/HftpFileSystem.java
  15. +0 −3 src/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
  16. +99 −5 src/java/org/apache/hadoop/hdfs/protocol/DataTransferProtocol.java
  17. +0 −1 src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
  18. +67 −1 src/java/org/apache/hadoop/hdfs/server/common/Util.java
  19. +86 −203 src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
  20. +7 −5 src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
  21. +22 −10 src/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
  22. +1 −1 src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java
  23. +0 −46 src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicy.java
  24. +0 −11 src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicyDefault.java
  25. +5 −3 src/java/org/apache/hadoop/hdfs/server/namenode/Checkpointer.java
  26. +69 −0 src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java
  27. +0 −1 src/java/org/apache/hadoop/hdfs/server/namenode/EditLogBackupOutputStream.java
  28. +11 −10 src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
  29. +8 −40 src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
  30. +27 −41 src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
  31. +27 −26 src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
  32. +8 −2 src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
  33. +1 −0 src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
  34. +46 −5 src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
  35. +121 −26 src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
  36. +4 −4 src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java
  37. +18 −17 src/test/aop/org/apache/hadoop/fi/FiTestUtil.java
  38. +7 −17 src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
  39. +6 −6 src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java
  40. +31 −28 src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
  41. +12 −11 src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
  42. +286 −0 src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol2.java
  43. +126 −0 src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
  44. +40 −0 src/test/hdfs/org/apache/hadoop/cli/CmdFactoryDFS.java
  45. +18 −45 src/test/hdfs/org/apache/hadoop/cli/TestHDFSCLI.java
  46. +163 −16 src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
  47. +19 −17 src/test/hdfs/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java
  48. +85 −1 src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java
  49. +31 −7 src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java
  50. +0 −59 src/test/hdfs/org/apache/hadoop/hdfs/TestDFSClientExcludedNodes.java
  51. +242 −11 src/test/hdfs/org/apache/hadoop/hdfs/TestDFSClientRetries.java
  52. +21 −7 src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java
  53. +8 −9 src/test/hdfs/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
  54. +19 −12 src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java
  55. +22 −7 src/test/hdfs/org/apache/hadoop/hdfs/TestFileStatus.java
  56. +7 −3 src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSServerPorts.java
  57. +60 −0 src/test/hdfs/org/apache/hadoop/hdfs/TestHFlush.java
  58. +13 −3 src/test/hdfs/org/apache/hadoop/hdfs/TestLeaseRecovery2.java
  59. +1 −1 src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java
  60. +37 −27 src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java
  61. +72 −0 src/test/hdfs/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java
  62. +92 −0 src/test/hdfs/org/apache/hadoop/hdfs/server/common/TestGetUriFromString.java
  63. +2 −2 src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java
  64. +35 −20 src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStartup.java
  65. +11 −10 src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
  66. +39 −19 src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
  67. +120 −0 src/test/hdfs/org/apache/hadoop/security/TestGroupMappingServiceRefresh.java
  68. +219 −23 src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestNNLeaseRecovery.java
@@ -9,20 +9,19 @@
<classpathentry kind="src" path="src/contrib/thriftfs/src/java"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="var" path="ANT_HOME/lib/ant.jar"/>
- <classpathentry kind="lib" path="lib/hadoop-core-0.22.0-dev.jar"/>
- <classpathentry kind="lib" path="lib/hadoop-core-test-0.22.0-dev.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/hadoop-core-0.22.0-SNAPSHOT.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/test/hadoop-core-test-0.22.0-SNAPSHOT.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-cli-1.2.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-codec-1.3.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-el-1.0.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-httpclient-3.0.1.jar"/>
- <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-logging-1.0.4.jar"/>
- <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-logging-api-1.0.4.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-logging-1.1.1.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/commons-net-1.4.1.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/core-3.1.1.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/hsqldb-1.8.0.10.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/jasper-compiler-5.5.12.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/jasper-runtime-5.5.12.jar"/>
- <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/jets3t-0.6.1.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/jets3t-0.7.1.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/jetty-6.1.14.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/jetty-util-6.1.14.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/jsp-2.1-6.1.14.jar"/>
@@ -32,16 +31,17 @@
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/log4j-1.2.15.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/oro-2.0.8.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/servlet-api-2.5-6.1.14.jar"/>
- <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/slf4j-api-1.4.3.jar"/>
- <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/slf4j-log4j12-1.4.3.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/slf4j-api-1.5.8.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/test/slf4j-log4j12-1.4.3.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/xmlenc-0.52.jar"/>
- <classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/aspectjrt-1.5.3.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/test/mockito-all-1.8.0.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Hdfs/common/aspectjrt-1.6.5.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/cactus.core.framework.uberjar.javaEE.14-1.8.0.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/cactus.integration.ant-1.8.0.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/cactus.integration.shared.api-1.8.0.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/cargo-ant-0.9.jar"/>
<classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/cargo-core-uberjar-0.9.jar"/>
- <classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/standard-1.1.2.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/hdfsproxy/common/standard-1.1.2.jar"/>
<classpathentry kind="lib" path="src/contrib/thriftfs/lib/hadoopthriftapi.jar"/>
<classpathentry kind="lib" path="src/contrib/thriftfs/lib/libthrift.jar"/>
<classpathentry kind="lib" path="build/test/classes"/>
View
@@ -18,29 +18,28 @@ Trunk (unreleased changes)
IMPROVEMENTS
- HDFS-704. Unify build property names to facilitate cross-projects
- modifications (cos)
-
HDFS-703. Replace current fault injection implementation with one
from (cos)
HDFS-754. Reduce ivy console output to ovservable level (cos)
- HDFS-699. Add unit tests framework (Mockito) (cos, Eli Collins)
+ HDFS-832. HDFS side of HADOOP-6222. (cos)
- HDFS-630 In DFSOutputStream.nextBlockOutputStream(), the client can
- exclude specific datanodes when locating the next block
- (Cosmin Lehene via Stack)
+ HDFS-840. Change tests to use FileContext test helper introduced in
+ HADOOP-6394. (Jitendra Nath Pandey via suresh)
- HDFS-519. Create new tests for lease recovery (cos)
+ HDFS-685. Use the user-to-groups mapping service in the NameNode. (boryas, acmurthy)
+
+ HDFS-755. Read multiple checksum chunks at once in DFSInputStream.
+ (Todd Lipcon via tomwhite)
+
+ HDFS-786. Implement getContentSummary in HftpFileSystem.
+ (Tsz Wo (Nicholas), SZE via cdouglas)
OPTIMIZATIONS
BUG FIXES
- HDFS-646. Fix test-patch failure by adding test-contrib ant target.
- (gkesavan)
-
HDFS-695. RaidNode should read in configuration from hdfs-site.xml.
(dhruba)
@@ -49,8 +48,6 @@ Trunk (unreleased changes)
HDFS-750. Fix build failure due to TestRename. (suresh)
- HDFS-733. TestBlockReport fails intermittently. (cos)
-
HDFS-712. Move libhdfs from mapreduce subproject to hdfs subproject.
(Eli Collins via dhruba)
@@ -62,13 +59,6 @@ Trunk (unreleased changes)
HDFS-751. Fix TestCrcCorruption to pick up the correct datablocks to
corrupt. (dhruba)
- HDFS-774. Intermittent race condition in TestFiPipelines (cos)
-
- HDFS-741. TestHFlush test doesn't seek() past previously written part of
- the file (cos, szetszwo)
-
- HDFS-706. Intermittent failures in TestFiHFlush (cos)
-
HDFS-763. Fix slightly misleading report from DataBlockScanner
about corrupted scans. (dhruba)
@@ -81,7 +71,21 @@ Trunk (unreleased changes)
HDFS-785. Add Apache license to several namenode unit tests.
(Ravi Phulari via jghoman)
- HDFS-791. Build is broken after HDFS-787 patch has been applied (cos)
+ HDFS-802. Update Eclipse configuration to match changes to Ivy
+ configuration (Edwin Chan via cos)
+
+ HDFS-423. Unbreak FUSE build and fuse_dfs_wrapper.sh (Eli Collins via cos)
+
+ HDFS-825. Build fails to pull latest hadoop-core-* artifacts (cos)
+
+ HDFS-94. The Heap Size printed in the NameNode WebUI is accurate.
+ (Dmytro Molkov via dhruba)
+
+ HDFS-767. An improved retry policy when the DFSClient is unable to fetch a
+ block from the datanode. (Ning Zhang via dhruba)
+
+ HDFS-187. Initialize secondary namenode http address in TestStartup.
+ (Todd Lipcon via szetszwo)
Release 0.21.0 - Unreleased
@@ -186,6 +190,8 @@ Release 0.21.0 - Unreleased
HDFS-631. Rename configuration keys towards API standardization and
backward compatibility. (Jitendra Nath Pandey via suresh)
+ HDFS-669. Add unit tests framework (Mockito) (cos, Eli Collins)
+
HDFS-731. Support new Syncable interface in HDFS. (hairong)
HDFS-702. Add HDFS implementation of AbstractFileSystem.
@@ -194,6 +200,9 @@ Release 0.21.0 - Unreleased
HDFS-758. Add decommissioning status page to Namenode Web UI.
(Jitendra Nath Pandey via suresh)
+ HDFS-814. Add an api to get the visible length of a DFSDataInputStream.
+ (szetszwo)
+
IMPROVEMENTS
HDFS-381. Remove blocks from DataNode maps when corresponding file
@@ -354,6 +363,9 @@ Release 0.21.0 - Unreleased
HDFS-680. Add new access method to a copy of a block's replica. (shv)
+ HDFS-704. Unify build property names to facilitate cross-projects
+ modifications (cos)
+
HDFS-705. Create an adapter to access some of package-private methods of
DataNode from tests (cos)
@@ -393,6 +405,12 @@ Release 0.21.0 - Unreleased
HDFS-787. Upgrade some libraries to be consistent with common and
mapreduce. (omalley)
+ HDFS-519. Create new tests for lease recovery (cos)
+
+ HDFS-804. New unit tests for concurrent lease recovery (cos)
+
+ HDFS-813. Enable the append test in TestReadWhileWriting. (szetszwo)
+
BUG FIXES
HDFS-76. Better error message to users when commands fail because of
@@ -540,6 +558,52 @@ Release 0.21.0 - Unreleased
HDFS-691. Fix an overflow error in DFSClient.DFSInputStream.available().
(szetszwo)
+ HDFS-733. TestBlockReport fails intermittently. (cos)
+
+ HDFS-774. Intermittent race condition in TestFiPipelines (cos)
+
+ HDFS-741. TestHFlush test doesn't seek() past previously written part of
+ the file (cos, szetszwo)
+
+ HDFS-706. Intermittent failures in TestFiHFlush (cos)
+
+ HDFS-646. Fix test-patch failure by adding test-contrib ant target.
+ (gkesavan)
+
+ HDFS-791. Build is broken after HDFS-787 patch has been applied (cos)
+
+ HDFS-792. TestHDFSCLI is failing. (Todd Lipcon via cos)
+
+ HDFS-781. Namenode metrics PendingDeletionBlocks is not decremented.
+ (Suresh)
+
+ HDFS-192. Fix TestBackupNode failures. (shv)
+
+ HDFS-797. TestHDFSCLI much slower after HDFS-265 merge. (Todd Lipcon via cos)
+
+ HDFS-824. Stop lease checker in TestReadWhileWriting. (szetszwo)
+
+ HDFS-823. CheckPointer should use addInternalServlet for image-fetching
+ servlet (jghoman)
+
+ HDFS-456. Fix URI generation for windows file paths. (shv)
+
+ HDFS-812. FSNamesystem#internalReleaseLease throws NullPointerException on
+ a single-block file's lease recovery. (cos)
+
+ HDFS-724. Pipeline hangs if one of the block receiver is not responsive.
+ (hairong)
+
+ HDFS-564. Adding pipeline tests 17-35. (hairong)
+
+ HDFS-849. TestFiDataTransferProtocol2#pipeline_Fi_18 sometimes fails.
+ (hairong)
+
+ HDFS-762. Balancer causes Null Pointer Exception.
+ (Cristian Ivascu via dhruba)
+
+ HDFS-868. Fix link to Hadoop Upgrade Wiki. (Chris A. Mattmann via shv)
+
Release 0.20.2 - Unreleased
IMPROVEMENTS
@@ -570,6 +634,15 @@ Release 0.20.2 - Unreleased
HDFS-596. Fix memory leak in hdfsFreeFileInfo() for libhdfs.
(Zhang Bingjun via dhruba)
+ HDFS-793. Data node should receive the whole packet ack message before it
+ constructs and sends its own ack message for the packet. (hairong)
+
+ HDFS-185. Disallow chown, chgrp, chmod, setQuota, and setSpaceQuota when
+ name-node is in safemode. (Ravi Phulari via shv)
+
+ HDFS-101. DFS write pipeline: DFSClient sometimes does not detect second
+ datanode failure. (hairong)
+
Release 0.20.1 - 2009-09-01
IMPROVEMENTS
View
@@ -1086,6 +1086,8 @@
<env key="JVM_ARCH" value="${jvm.arch}"/>
<arg value="install"/>
</exec>
+ <!-- Create a build platform-agnostic link to c++ libs -->
+ <symlink overwrite="true" link="${build.dir}/c++/lib" resource="${install.c++}/lib"/>
</target>
<target name="compile-ant-tasks" depends="compile-core">
View
@@ -39,14 +39,15 @@
<resolvers>
<ibiblio name="maven2" root="${repo.maven.org}" pattern="${maven2.pattern.ext}" m2compatible="true"/>
- <ibiblio name="apache-snapshot" root="${snapshot.apache.org}" m2compatible="true"/>
+ <ibiblio name="apache-snapshot" root="${snapshot.apache.org}" m2compatible="true"
+ checkmodified="true" changingPattern=".*SNAPSHOT"/>
<filesystem name="fs" m2compatible="true" force="true">
<artifact pattern="${repo.dir}/org/apache/hadoop/[module]/[revision]/[module]-[revision].[ext]"/>
<ivy pattern="${repo.dir}/org/apache/hadoop/[module]/[revision]/[module]-[revision].pom"/>
</filesystem>
- <chain name="default" dual="true">
+ <chain name="default" dual="true" checkmodified="true" changingPattern=".*SNAPSHOT">
<resolver ref="apache-snapshot"/>
<resolver ref="maven2"/>
</chain>
@@ -43,6 +43,9 @@
<property name="test.timeout" value="900000"/>
<property name="build.dir" location="${hadoop.root}/build/contrib/${name}"/>
<property name="build.classes" location="${build.dir}/classes"/>
+ <!-- NB: sun.arch.data.model is not supported on all platforms -->
+ <property name="build.platform"
+ value="${os.name}-${os.arch}-${sun.arch.data.model}"/>
<property name="build.test" location="${build.dir}/test"/>
<property name="build.examples" location="${build.dir}/examples"/>
<property name="hadoop.log.dir" location="${build.dir}/test/logs"/>
@@ -32,9 +32,9 @@
<target name="check-libhdfs-exists" if="fusedfs">
- <property name="libhdfs.lib" value="${hadoop.root}/build/libhdfs/libhdfs.so"/>
+ <property name="libhdfs.lib" value="${hadoop.root}/build/c++/${build.platform}/lib/libhdfs.so"/>
<available file="${libhdfs.lib}" property="libhdfs-exists"/>
- <fail message="libhdfs.so does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile-libhdfs -Dlibhdfs=1">
+ <fail message="libhdfs.so does not exist: ${libhdfs.lib}. Please check flags -Dlibhdfs=1 -Dfusedfs=1 are set or first try ant compile -Dcompile.c++=true -Dlibhdfs=true">
<condition>
<not><isset property="libhdfs-exists"/></not>
</condition>
@@ -59,7 +59,7 @@
<env key="OS_ARCH" value="${os.arch}"/>
<env key="HADOOP_HOME" value="${hadoop.root}"/>
<env key="PACKAGE_VERSION" value="0.1.0"/>
-
+ <env key="BUILD_PLATFORM" value="${build.platform}" />
<env key="PERMS" value="${perms}"/>
</exec>
<mkdir dir="${build.dir}"/>
@@ -17,5 +17,4 @@
bin_PROGRAMS = fuse_dfs
fuse_dfs_SOURCES = fuse_dfs.c fuse_options.c fuse_trash.c fuse_stat_struct.c fuse_users.c fuse_init.c fuse_connect.c fuse_impls_access.c fuse_impls_chmod.c fuse_impls_chown.c fuse_impls_create.c fuse_impls_flush.c fuse_impls_getattr.c fuse_impls_mkdir.c fuse_impls_mknod.c fuse_impls_open.c fuse_impls_read.c fuse_impls_release.c fuse_impls_readdir.c fuse_impls_rename.c fuse_impls_rmdir.c fuse_impls_statfs.c fuse_impls_symlink.c fuse_impls_truncate.c fuse_impls_utimens.c fuse_impls_unlink.c fuse_impls_write.c
AM_CPPFLAGS= -DPERMS=$(PERMS) -D_FILE_OFFSET_BITS=64 -I$(JAVA_HOME)/include -I$(HADOOP_HOME)/src/c++/libhdfs/ -I$(JAVA_HOME)/include/linux/ -D_FUSE_DFS_VERSION=\"$(PACKAGE_VERSION)\" -DPROTECTED_PATHS=\"$(PROTECTED_PATHS)\" -I$(FUSE_HOME)/include
-AM_LDFLAGS= -L$(HADOOP_HOME)/build/libhdfs -lhdfs -L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
-
+AM_LDFLAGS= -L$(HADOOP_HOME)/build/c++/$(BUILD_PLATFORM)/lib -lhdfs -L$(FUSE_HOME)/lib -lfuse -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
@@ -19,12 +20,6 @@ if [ "$HADOOP_HOME" = "" ]; then
export HADOOP_HOME=/usr/local/share/hadoop
fi
-export PATH=$HADOOP_HOME/contrib/fuse_dfs:$PATH
-
-for f in ls $HADOOP_HOME/lib/*.jar $HADOOP_HOME/*.jar ; do
-export CLASSPATH=$CLASSPATH:$f
-done
-
if [ "$OS_ARCH" = "" ]; then
export OS_ARCH=amd64
fi
@@ -37,4 +32,17 @@ if [ "$LD_LIBRARY_PATH" = "" ]; then
export LD_LIBRARY_PATH=$JAVA_HOME/jre/lib/$OS_ARCH/server:/usr/local/share/hdfs/libhdfs/:/usr/local/lib
fi
-./fuse_dfs $@
+# If dev build set paths accordingly
+if [ -d $HADOOP_HDFS_HOME/build ]; then
+ export HADOOP_HOME=$HADOOP_HDFS_HOME
+ for f in ${HADOOP_HOME}/build/*.jar ; do
+ export CLASSPATH=$CLASSPATH:$f
+ done
+ for f in $HADOOP_HOME/build/ivy/lib/Hadoop-Hdfs/common/*.jar ; do
+ export CLASSPATH=$CLASSPATH:$f
+ done
+ export PATH=$HADOOP_HOME/build/contrib/fuse-dfs:$PATH
+ export LD_LIBRARY_PATH=$HADOOP_HOME/build/c++/lib:$JAVA_HOME/jre/lib/$OS_ARCH/server
+fi
+
+fuse_dfs $@
@@ -530,7 +530,7 @@
of Hadoop and rollback the cluster to the state it was in
before
the upgrade. HDFS upgrade is described in more detail in
- <a href="http://wiki.apache.org/hadoop/Hadoop%20Upgrade">Hadoop Upgrade</a> Wiki page.
+ <a href="http://wiki.apache.org/hadoop/Hadoop_Upgrade">Hadoop Upgrade</a> Wiki page.
HDFS can have one such backup at a time. Before upgrading,
administrators need to remove existing backup using <code>bin/hadoop
dfsadmin -finalizeUpgrade</code> command. The following
@@ -169,7 +169,7 @@ creations/deletions), or "all".</description>
<property>
<name>dfs.namenode.name.dir</name>
- <value>${hadoop.tmp.dir}/dfs/name</value>
+ <value>file://${hadoop.tmp.dir}/dfs/name</value>
<description>Determines where on the local filesystem the DFS name node
should store the name table(fsimage). If this is a comma-delimited list
of directories then the name table is replicated in all of the
@@ -447,7 +447,7 @@ creations/deletions), or "all".</description>
<property>
<name>dfs.namenode.checkpoint.dir</name>
- <value>${hadoop.tmp.dir}/dfs/namesecondary</value>
+ <value>file://${hadoop.tmp.dir}/dfs/namesecondary</value>
<description>Determines where on the local filesystem the DFS secondary
name node should store the temporary images to merge.
If this is a comma-delimited list of directories then the image is
Oops, something went wrong.

0 comments on commit 48eb619

Please sign in to comment.