Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

CLOUDERA BUILD. Retrofiting HttpFS to work with Hadoop cdh3u4.

  • Loading branch information...
commit b0b478f46354db80f2b8f428bde6ce737248de32 1 parent be7eb59
Alejandro Abdelnur authored
View
30 README.txt
@@ -1,5 +1,10 @@
-----------------------------------------------------------------------------
-HttpFS - Hadoop HDFS over HTTP
+HttpFS backport for cdh3u4 - Hadoop HDFS over HTTP
+
+The HttpFS source for this backport has been taken from the following
+Apache Hadoop Subversion branch@revision:
+
+ https://svn.apache.org/repos/asf/hadoop/common/trunk@1363175
HttpFS is a server that provides a REST HTTP gateway to HDFS with full
filesystem read & write capabilities.
@@ -14,4 +19,27 @@ to cross the firewall into the cluster).
HttpFS can be used to access data in HDFS using HTTP utilities (such as curl
and wget) and HTTP libraries Perl from other languages than Java.
+
+Requirements:
+
+ * Unix OS
+ * JDK 1.6.*
+ * Maven 3.*
+
+How to build:
+
+ Clone this Git repository. Checkout the cdh3u4 branch.
+
+ Run 'mvn package -Pdist'.
+
+ The resulting TARBALL will under the 'target/' directory.
+
+How to install:
+
+ Expand the build TARBALL.
+
+ Follow the setup instructions:
+
+ http://cloudera.github.com/httpfs/
+
-----------------------------------------------------------------------------
View
350 pom.xml
@@ -20,31 +20,121 @@
http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-project</artifactId>
- <version>3.0.0-SNAPSHOT</version>
- <relativePath>../../hadoop-project</relativePath>
+ <groupId>com.cloudera.cdh</groupId>
+ <artifactId>cdh-root</artifactId>
+ <version>3.0-u4</version>
</parent>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-httpfs</artifactId>
- <version>3.0.0-SNAPSHOT</version>
+ <version>0.20.2-cdh3u4</version>
<packaging>war</packaging>
- <name>Apache Hadoop HttpFS</name>
- <description>Apache Hadoop HttpFS</description>
+ <name>Apache Hadoop HttpFS for cdh3</name>
+ <description>Apache Hadoop HttpFS for cdh3</description>
+
+ <repositories>
+ <repository>
+ <id>cdh.repo</id>
+ <url>https://repository.cloudera.com/content/groups/cloudera-repos</url>
+ <name>Cloudera Repositories</name>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ <repository>
+ <id>cdh.snapshots.repo</id>
+ <url>https://repository.cloudera.com/content/repositories/snapshots</url>
+ <name>Cloudera Snapshots Repository</name>
+ <snapshots>
+ <enabled>true</enabled>
+ </snapshots>
+ <releases>
+ <enabled>false</enabled>
+ </releases>
+ </repository>
+ </repositories>
<properties>
+ <maven.test.redirectTestOutputToFile>true</maven.test.redirectTestOutputToFile>
<tomcat.version>6.0.32</tomcat.version>
<httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
<httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
<httpfs.source.revision>REVISION NOT AVAIL</httpfs.source.revision>
<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm:ssZ</maven.build.timestamp.format>
<httpfs.build.timestamp>${maven.build.timestamp}</httpfs.build.timestamp>
- <httpfs.tomcat.dist.dir>
- ${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/httpfs/tomcat
- </httpfs.tomcat.dist.dir>
+ <httpfs.tomcat.dist.dir>${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/httpfs/tomcat</httpfs.tomcat.dist.dir>
</properties>
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-client</artifactId>
+ <version>${cdh.hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-minicluster</artifactId>
+ <version>${cdh.hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.2</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>1.8.5</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-server</artifactId>
+ <version>1.8</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ <version>1.8</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ <version>2.5</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.servlet.jsp</groupId>
+ <artifactId>jsp-api</artifactId>
+ <version>2.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>1.4</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jdom</groupId>
+ <artifactId>jdom</artifactId>
+ <version>1.1</version>
+ </dependency>
+ <dependency>
+ <groupId>com.googlecode.json-simple</groupId>
+ <artifactId>json-simple</artifactId>
+ <version>1.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+
<dependencies>
<dependency>
<groupId>junit</groupId>
@@ -57,9 +147,14 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-annotations</artifactId>
- <scope>provided</scope>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
@@ -93,145 +188,13 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>javax.xml.stream</groupId>
- <artifactId>stax-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commons-httpclient</groupId>
- <artifactId>commons-httpclient</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>servlet-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>jsp-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty-util</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-api-2.1</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>servlet-api-2.5</artifactId>
- </exclusion>
- <exclusion>
- <groupId>net.java.dev.jets3t</groupId>
- <artifactId>jets3t</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.eclipse.jdt</groupId>
- <artifactId>core</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commons-el</groupId>
- <artifactId>commons-el</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
+ <artifactId>hadoop-client</artifactId>
<scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commons-httpclient</groupId>
- <artifactId>commons-httpclient</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>servlet-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>jsp-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty-util</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-api-2.1</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>servlet-api-2.5</artifactId>
- </exclusion>
- <exclusion>
- <groupId>net.java.dev.jets3t</groupId>
- <artifactId>jets3t</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.eclipse.jdt</groupId>
- <artifactId>core</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commons-el</groupId>
- <artifactId>commons-el</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <scope>test</scope>
- <type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
+ <artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
- <type>test-jar</type>
</dependency>
<dependency>
<groupId>log4j</groupId>
@@ -268,13 +231,80 @@
</resource>
</resources>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <artifactId>maven-clean-plugin</artifactId>
+ <version>2.4.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <version>1.6</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>2.10</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-install-plugin</artifactId>
+ <version>2.3.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.3.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <version>2.8.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-war-plugin</artifactId>
+ <version>2.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-project-info-reports-plugin</artifactId>
+ <version>2.4</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-eclipse-plugin</artifactId>
+ <version>2.6</version>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+
<plugins>
<plugin>
<!-- workaround for filtered/unfiltered resources in same directory -->
<!-- remove when maven-eclipse-plugin 2.9 is available -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
- <version>2.6</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -384,13 +414,6 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>findbugs-maven-plugin</artifactId>
- <configuration>
- <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
- </configuration>
- </plugin>
</plugins>
</build>
@@ -429,13 +452,6 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
- <dependencies>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-assemblies</artifactId>
- <version>${project.version}</version>
- </dependency>
- </dependencies>
<executions>
<execution>
<id>dist</id>
@@ -447,9 +463,9 @@
<finalName>${project.artifactId}-${project.version}</finalName>
<appendAssemblyId>false</appendAssemblyId>
<attach>false</attach>
- <descriptorRefs>
- <descriptorRef>hadoop-httpfs-dist</descriptorRef>
- </descriptorRefs>
+ <descriptors>
+ <descriptor>${basedir}/src/main/assemblies/hadoop-httpfs-dist.xml</descriptor>
+ </descriptors>
</configuration>
</execution>
</executions>
@@ -469,7 +485,7 @@
<target>
<mkdir dir="downloads"/>
<get
- src="http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
+ src="http://archive.cloudera.com/tarballs/apache-tomcat-${tomcat.version}.tar.gz"
dest="downloads/tomcat.tar.gz" verbose="true" skipexisting="true"/>
<delete dir="${project.build.directory}/tomcat.exp"/>
<mkdir dir="${project.build.directory}/tomcat.exp"/>
@@ -516,7 +532,7 @@
<goal>run</goal>
</goals>
<configuration>
- <target if="tar">
+ <target>
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/dist-maketar.sh">
View
52 src/main/assemblies/hadoop-httpfs-dist.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly>
+ <id>hadoop-httpfs-dist</id>
+ <formats>
+ <format>dir</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <!-- Configuration files -->
+ <fileSet>
+ <directory>${basedir}/src/main/conf</directory>
+ <outputDirectory>/etc/hadoop</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/sbin</directory>
+ <outputDirectory>/sbin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/libexec</directory>
+ <outputDirectory>/libexec</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Documentation -->
+ <fileSet>
+ <directory>${project.build.directory}/site</directory>
+ <outputDirectory>/share/doc/hadoop/httpfs</outputDirectory>
+ </fileSet>
+ </fileSets>
+</assembly>
View
27 src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+/**
+ * Minimal backport from Hadoop 2.
+ */
+public class CommonConfigurationKeysPublic {
+
+ public static final String FS_DEFAULT_NAME_KEY = "fs.default.name";
+
+}
View
14 src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
@@ -104,15 +104,12 @@
FILE, DIRECTORY, SYMLINK;
public static FILE_TYPE getType(FileStatus fileStatus) {
- if (fileStatus.isFile()) {
+ if (!fileStatus.isDir()) {
return FILE;
}
- if (fileStatus.isDirectory()) {
+ if (fileStatus.isDir()) {
return DIRECTORY;
}
- if (fileStatus.isSymlink()) {
- return SYMLINK;
- }
throw new IllegalArgumentException("Could not determine filetype for: " +
fileStatus.getPath());
}
@@ -813,11 +810,8 @@ private FileStatus createFileStatus(Path parent, JSONObject json) {
permission, owner, group, path);
break;
case SYMLINK:
- Path symLink = null;
- fileStatus = new FileStatus(len, false,
- replication, blockSize, mTime, aTime,
- permission, owner, group, symLink,
- path);
+ throw new IllegalArgumentException("SYMLINKs are not supported in cdh3 : " +
+ fileStatus.getPath());
}
return fileStatus;
}
View
7 src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
@@ -21,7 +21,6 @@
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.GlobFilter;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
@@ -270,10 +269,10 @@ public FSCreate(InputStream is, String path, short perm, boolean override,
@Override
public Void execute(FileSystem fs) throws IOException {
if (replication == -1) {
- replication = fs.getDefaultReplication(path);
+ replication = fs.getDefaultReplication();
}
if (blockSize == -1) {
- blockSize = fs.getDefaultBlockSize(path);
+ blockSize = fs.getDefaultBlockSize();
}
FsPermission fsPermission = new FsPermission(permission);
int bufferSize = fs.getConf().getInt("httpfs.buffer.size", 4096);
@@ -427,7 +426,7 @@ public JSONObject execute(FileSystem fs) throws IOException {
*/
public FSListStatus(String path, String filter) throws IOException {
this.path = new Path(path);
- this.filter = (filter == null) ? this : new GlobFilter(filter);
+ this.filter = this;
}
/**
View
2  src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
@@ -439,7 +439,7 @@ protected URI createUploadRedirectionURL(UriInfo uriInfo, Enum<?> uploadOperatio
UriBuilder uriBuilder = uriInfo.getRequestUriBuilder();
uriBuilder = uriBuilder.replaceQueryParam(OperationParam.NAME, uploadOperation).
queryParam(DataParam.NAME, Boolean.TRUE);
- return uriBuilder.build(null);
+ return uriBuilder.build((Object)null);
}
View
19 src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
@@ -18,9 +18,8 @@
package org.apache.hadoop.lib.wsrs;
-import org.apache.hadoop.util.StringUtils;
-
import java.util.Arrays;
+import java.util.Iterator;
public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
Class<E> klass;
@@ -36,7 +35,21 @@ protected E parse(String str) throws Exception {
@Override
protected String getDomain() {
- return StringUtils.join(",", Arrays.asList(klass.getEnumConstants()));
+ return join(",", Arrays.asList(klass.getEnumConstants()));
+ }
+
+ //StringUtils does not have this method in cdh3
+ private static String join(CharSequence separator, Iterable<?> strings) {
+ Iterator<?> i = strings.iterator();
+ if (!i.hasNext()) {
+ return "";
+ }
+ StringBuilder sb = new StringBuilder(i.next().toString());
+ while (i.hasNext()) {
+ sb.append(separator);
+ sb.append(i.next().toString());
+ }
+ return sb.toString();
}
}
View
29 src/main/java/org/apache/hadoop/util/Time.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+/**
+ * Minimal backport from Hadoop 2.
+ */
+public class Time {
+
+ public static long now() {
+ return System.currentTimeMillis();
+ }
+
+}
View
20 src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
@@ -169,17 +169,15 @@ private void testAppend() throws Exception {
os.close();
fs.close();
fs = getHttpFileSystem();
- os = fs.append(new Path(path.toUri().getPath()));
- os.write(2);
- os.close();
- fs.close();
- fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
- InputStream is = fs.open(path);
- Assert.assertEquals(is.read(), 1);
- Assert.assertEquals(is.read(), 2);
- Assert.assertEquals(is.read(), -1);
- is.close();
- fs.close();
+ try {
+ os = fs.append(new Path(path.toUri().getPath()));
+ os.write(2);
+ os.close();
+ } catch (IOException ex) {
+ // cdh3u4 does not support appends.
+ } finally {
+ fs.close();
+ }
}
private void testRename() throws Exception {
View
5 src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java
@@ -22,7 +22,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+//import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.test.TestJettyHelper;
import org.junit.Assert;
import org.junit.runner.RunWith;
@@ -35,6 +35,7 @@ public TestWebhdfsFileSystem(TestHttpFSFileSystem.Operation operation) {
super(operation);
}
+/* TODO: remove this when webhdfs is backported to cdh3u4, webhdfs:// will then be avail
@Override
protected FileSystem getHttpFileSystem() throws Exception {
Configuration conf = new Configuration();
@@ -51,5 +52,5 @@ protected void testGet() throws Exception {
Assert.assertEquals(fs.getUri(), uri);
fs.close();
}
-
+*/
}
View
2  src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java
@@ -30,11 +30,13 @@
@Test(expected = IllegalArgumentException.class)
public void getHomeDirNotDef() {
+ ServerWebApp.setHomeDirForCurrentThread(null);
ServerWebApp.getHomeDir("TestServerWebApp00");
}
@Test
public void getHomeDir() {
+ ServerWebApp.setHomeDirForCurrentThread(null);
System.setProperty("TestServerWebApp0.home.dir", "/tmp");
assertEquals(ServerWebApp.getHomeDir("TestServerWebApp0"), "/tmp");
assertEquals(ServerWebApp.getDir("TestServerWebApp0", ".log.dir", "/tmp/log"), "/tmp/log");
View
9 src/test/java/org/apache/hadoop/test/TestHdfsHelper.java
@@ -65,7 +65,7 @@ public void evaluate() throws Throwable {
Configuration conf = HadoopUsersConfTestHelper.getBaseConf();
if (Boolean.parseBoolean(System.getProperty(HADOOP_MINI_HDFS, "true"))) {
miniHdfs = startMiniHdfs(conf);
- conf = miniHdfs.getConfiguration(0);
+ conf = miniHdfs.getFileSystem().getConf();
}
try {
HDFS_CONF_TL.set(conf);
@@ -143,14 +143,15 @@ private static synchronized MiniDFSCluster startMiniHdfs(Configuration conf) thr
conf.set("dfs.block.access.token.enable", "false");
conf.set("dfs.permissions", "true");
conf.set("hadoop.security.authentication", "simple");
- MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
- builder.numDataNodes(2);
- MiniDFSCluster miniHdfs = builder.build();
+ MiniDFSCluster miniHdfs = new MiniDFSCluster(conf, 1, true, null);
FileSystem fileSystem = miniHdfs.getFileSystem();
fileSystem.mkdirs(new Path("/tmp"));
fileSystem.mkdirs(new Path("/user"));
+ fileSystem.mkdirs(new Path("/hadoop/mapred/system"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
+ fileSystem.setPermission(new Path("/hadoop/mapred/system"), FsPermission.valueOf("-rwx------"));
+ String nnURI = fileSystem.getUri().toString();
MINI_DFS = miniHdfs;
}
return MINI_DFS;
Please sign in to comment.
Something went wrong with that request. Please try again.