Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge -r 1212059:1212060 from trunk to branch. FIXES: HDFS-2178

  • Loading branch information...
commit 3e94aebae97f2427bf6717de0d41021cee8d5f1a 1 parent 594533c
Alejandro Abdelnur authored
Showing with 14,685 additions and 1 deletion.
  1. +1 −0  .gitignore
  2. +60 −0 hadoop-assemblies/src/main/resources/assemblies/hadoop-httpfs-dist.xml
  3. +5 −0 hadoop-common-project/hadoop-common/pom.xml
  4. +1 −0  hadoop-dist/pom.xml
  5. +17 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/README.txt
  6. +530 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
  7. +41 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
  8. +35 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-log4j.properties
  9. +17 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-site.xml
  10. +863 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
  11. +41 −0 ...fs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java
  12. +45 −0 ...hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java
  13. +64 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
  14. +717 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
  15. +91 −0 ...hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
  16. +536 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParams.java
  17. +41 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java
  18. +604 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
  19. +126 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
  20. +96 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java
  21. +134 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
  22. +178 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
  23. +766 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
  24. +90 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java
  25. +79 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java
  26. +41 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java
  27. +42 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java
  28. +52 −0 ...-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java
  29. +28 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java
  30. +50 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java
  31. +28 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java
  32. +30 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java
  33. +278 −0 ...-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
  34. +403 −0 .../hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java
  35. +129 −0 ...hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java
  36. +56 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java
  37. +176 −0 ...-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java
  38. +110 −0 ...op-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
  39. +91 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
  40. +101 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
  41. +159 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
  42. +199 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
  43. +157 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
  44. +43 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
  45. +35 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
  46. +42 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
  47. +67 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ExceptionProvider.java
  48. +52 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/InputStreamEntity.java
  49. +35 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java
  50. +62 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java
  51. +62 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java
  52. +35 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java
  53. +54 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java
  54. +35 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
  55. +69 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java
  56. +79 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/UserProvider.java
  57. +167 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
  58. +20 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/default-log4j.properties
  59. +204 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
  60. +21 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs.properties
  61. +62 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
  62. +16 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/WEB-INF/web.xml
  63. +21 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/ROOT/index.html
  64. +67 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/logging.properties
  65. +150 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml
  66. +88 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml
  67. +121 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/ServerSetup.apt.vm
  68. +91 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/UsingHttpTools.apt.vm
  69. +88 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/apt/index.apt.vm
  70. +49 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/configuration.xsl
  71. +34 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/site.xml
  72. +485 −0 ...op-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
  73. +55 −0 ...p-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java
  74. +164 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
  75. +94 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestRunnableCallable.java
  76. +62 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/lang/TestXException.java
  77. +68 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestBaseService.java
  78. +790 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java
  79. +76 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServerConstructor.java
  80. +306 −0 ...ject/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/hadoop/TestFileSystemAccessService.java
  81. +404 −0 ...oop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/instrumentation/TestInstrumentationService.java
  82. +49 −0 ...-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/scheduler/TestSchedulerService.java
  83. +62 −0 ...hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestGroupsService.java
  84. +225 −0 ...s-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestProxyUserService.java
  85. +64 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java
  86. +117 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestMDCFilter.java
  87. +76 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestServerWebApp.java
  88. +144 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestCheck.java
  89. +125 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/util/TestConfigurationUtils.java
  90. +50 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java
  91. +53 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java
  92. +52 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestEnumParam.java
  93. +47 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestInputStreamEntity.java
  94. +52 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java
  95. +45 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONMapProvider.java
  96. +44 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestJSONProvider.java
  97. +47 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java
  98. +53 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java
  99. +64 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java
  100. +91 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestUserProvider.java
  101. +28 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HFSTestCase.java
  102. +174 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HTestCase.java
  103. +177 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/HadoopUsersConfTestHelper.java
  104. +70 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/SysPropsForTestsLoader.java
  105. +34 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDir.java
  106. +149 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java
  107. +30 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestException.java
  108. +66 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestExceptionHelper.java
  109. +187 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java
  110. +154 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java
  111. +40 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfs.java
  112. +159 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java
  113. +40 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJetty.java
  114. +118 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
  115. +13 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp1.properties
  116. +15 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp2.properties
  117. +1 −0  hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/classutils.txt
  118. +22 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/default-log4j.properties
  119. +13 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/server.properties
  120. +20 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver-default.xml
  121. +13 −0 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver.properties
  122. +3 −0  hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
  123. +1 −0  hadoop-hdfs-project/pom.xml
  124. +17 −1 hadoop-project/pom.xml
View
1  .gitignore
@@ -8,3 +8,4 @@
.settings
target
hadoop-hdfs-project/hadoop-hdfs/downloads
+hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads
View
60 hadoop-assemblies/src/main/resources/assemblies/hadoop-httpfs-dist.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<assembly>
+ <id>hadoop-httpfs-dist</id>
+ <formats>
+ <format>dir</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <!-- Configuration files -->
+ <fileSet>
+ <directory>${basedir}/src/main/conf</directory>
+ <outputDirectory>/etc/hadoop</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ </fileSet>
+ <!-- Readme, licenses, etc. -->
+ <fileSet>
+ <directory>${basedir}</directory>
+ <outputDirectory>/</outputDirectory>
+ <includes>
+ <include>*.txt</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/sbin</directory>
+ <outputDirectory>/sbin</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>${basedir}/src/main/libexec</directory>
+ <outputDirectory>/libexec</outputDirectory>
+ <includes>
+ <include>*</include>
+ </includes>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <!-- Documentation -->
+ <fileSet>
+ <directory>${project.build.directory}/site</directory>
+ <outputDirectory>/share/doc/hadoop/httpfs</outputDirectory>
+ </fileSet>
+ </fileSets>
+</assembly>
View
5 hadoop-common-project/hadoop-common/pom.xml
@@ -264,6 +264,11 @@
<artifactId>hadoop-auth</artifactId>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>com.googlecode.json-simple</groupId>
+ <artifactId>json-simple</artifactId>
+ <scope>compile</scope>
+ </dependency>
</dependencies>
<build>
View
1  hadoop-dist/pom.xml
@@ -112,6 +112,7 @@
run cd hadoop-${project.version}
run cp -r $ROOT/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version}/* .
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
+ run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
COMMON_LIB=share/hadoop/common/lib
MODULES=../../../../modules
View
17 hadoop-hdfs-project/hadoop-hdfs-httpfs/README.txt
@@ -0,0 +1,17 @@
+-----------------------------------------------------------------------------
+HttpFS - Hadoop HDFS over HTTP
+
+HttpFS is a server that provides a REST HTTP gateway to HDFS with full
+filesystem read & write capabilities.
+
+HttpFS can be used to transfer data between clusters running different
+versions of Hadoop (overcoming RPC versioning issues), for example using
+Hadoop DistCP.
+
+HttpFS can be used to access data in HDFS on a cluster behind of a firewall
+(the HttpFS server acts as a gateway and is the only system that is allowed
+to cross the firewall into the cluster).
+
+HttpFS can be used to access data in HDFS using HTTP utilities (such as curl
+and wget) and HTTP libraries Perl from other languages than Java.
+-----------------------------------------------------------------------------
View
530 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
@@ -0,0 +1,530 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+-->
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-project</artifactId>
+ <version>0.23.1-SNAPSHOT</version>
+ <relativePath>../../hadoop-project</relativePath>
+ </parent>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs-httpfs</artifactId>
+ <version>0.23.1-SNAPSHOT</version>
+ <packaging>war</packaging>
+
+ <name>Apache Hadoop HttpFS</name>
+ <description>Apache Hadoop HttpFS</description>
+
+ <properties>
+ <tomcat.version>6.0.32</tomcat.version>
+ <httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
+ <httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
+ <httpfs.source.revision>REVISION NOT AVAIL</httpfs.source.revision>
+ <maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm:ssZ</maven.build.timestamp.format>
+ <httpfs.build.timestamp>${maven.build.timestamp}</httpfs.build.timestamp>
+ <httpfs.tomcat.dist.dir>
+ ${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/httpfs/tomcat
+ </httpfs.tomcat.dist.dir>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-server</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>javax.servlet.jsp</groupId>
+ <artifactId>jsp-api</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jdom</groupId>
+ <artifactId>jdom</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.googlecode.json-simple</groupId>
+ <artifactId>json-simple</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>javax.xml.stream</groupId>
+ <artifactId>stax-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-compiler</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-runtime</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>jsp-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet.jsp</groupId>
+ <artifactId>jsp-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-api-2.1</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>servlet-api-2.5</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>net.java.dev.jets3t</groupId>
+ <artifactId>jets3t</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.eclipse.jdt</groupId>
+ <artifactId>core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-el</groupId>
+ <artifactId>commons-el</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-compiler</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-runtime</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>jsp-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet.jsp</groupId>
+ <artifactId>jsp-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-api-2.1</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>servlet-api-2.5</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>net.java.dev.jets3t</groupId>
+ <artifactId>jets3t</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.eclipse.jdt</groupId>
+ <artifactId>core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>commons-el</groupId>
+ <artifactId>commons-el</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <scope>test</scope>
+ <type>test-jar</type>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <scope>test</scope>
+ <type>test-jar</type>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ <includes>
+ <include>httpfs.properties</include>
+ </includes>
+ </resource>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>false</filtering>
+ <excludes>
+ <exclude>httpfs.properties</exclude>
+ </excludes>
+ </resource>
+ </resources>
+
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <threadCount>1</threadCount>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>javadoc</goal>
+ </goals>
+ <phase>site</phase>
+ <configuration>
+ <linksource>true</linksource>
+ <quiet>true</quiet>
+ <verbose>false</verbose>
+ <source>${maven.compile.source}</source>
+ <charset>${maven.compile.encoding}</charset>
+ <groups>
+ <group>
+ <title>HttpFs API</title>
+ <packages>*</packages>
+ </group>
+ </groups>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-project-info-reports-plugin</artifactId>
+ <executions>
+ <execution>
+ <configuration>
+ <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+ </configuration>
+ <goals>
+ <goal>dependencies</goal>
+ </goals>
+ <phase>site</phase>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <configuration>
+ <excludes>
+ </excludes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>create-web-xmls</id>
+ <phase>generate-test-resources</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <mkdir dir="${project.build.directory}/test-classes/webapp"/>
+
+ <copy todir="${project.build.directory}/test-classes/webapp">
+ <fileset dir="${basedir}/src/main/webapp"/>
+ </copy>
+ </target>
+ </configuration>
+ </execution>
+ <execution>
+ <id>site</id>
+ <phase>site</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <xslt in="${basedir}/src/main/resources/httpfs-default.xml"
+ out="${project.build.directory}/site/httpfs-default.html"
+ style="${basedir}/src/site/configuration.xsl"/>
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-war-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>default-war</id>
+ <phase>package</phase>
+ <goals>
+ <goal>war</goal>
+ </goals>
+ <configuration>
+ <warName>webhdfs</warName>
+ <webappDirectory>${project.build.directory}/webhdfs</webappDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <profiles>
+ <profile>
+ <id>docs</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-site-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>docs</id>
+ <phase>prepare-package</phase>
+ <goals>
+ <goal>site</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+
+ <profile>
+ <id>dist</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-assemblies</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+ <executions>
+ <execution>
+ <id>dist</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ <configuration>
+ <finalName>${project.artifactId}-${project.version}</finalName>
+ <appendAssemblyId>false</appendAssemblyId>
+ <attach>false</attach>
+ <descriptorRefs>
+ <descriptorRef>hadoop-httpfs-dist</descriptorRef>
+ </descriptorRefs>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <!-- Downloading Tomcat TAR.GZ, using downloads/ dir to avoid downloading over an over -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>dist</id>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <phase>package</phase>
+ <configuration>
+ <target>
+ <mkdir dir="downloads"/>
+ <get
+ src="http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
+ dest="downloads/tomcat.tar.gz" verbose="true" skipexisting="true"/>
+ <delete dir="${project.build.directory}/tomcat.exp"/>
+ <mkdir dir="${project.build.directory}/tomcat.exp"/>
+
+ <!-- Using Unix script to preserve file permissions -->
+ <echo file="${project.build.directory}/tomcat-untar.sh">
+
+ which cygpath 2> /dev/null
+ if [ $? = 1 ]; then
+ BUILD_DIR="${project.build.directory}"
+ else
+ BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+ fi
+ cd $BUILD_DIR/tomcat.exp
+ tar xzf ${basedir}/downloads/tomcat.tar.gz
+ </echo>
+ <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+ <arg line="./tomcat-untar.sh"/>
+ </exec>
+
+ <move file="${project.build.directory}/tomcat.exp/apache-tomcat-${tomcat.version}"
+ tofile="${httpfs.tomcat.dist.dir}"/>
+ <delete dir="${project.build.directory}/tomcat.exp"/>
+ <delete dir="${httpfs.tomcat.dist.dir}/webapps"/>
+ <mkdir dir="${httpfs.tomcat.dist.dir}/webapps"/>
+ <delete file="${httpfs.tomcat.dist.dir}/conf/server.xml"/>
+ <copy file="${basedir}/src/main/tomcat/server.xml"
+ toDir="${httpfs.tomcat.dist.dir}/conf"/>
+ <copy file="${basedir}/src/main/tomcat/logging.properties"
+ toDir="${httpfs.tomcat.dist.dir}/conf"/>
+ <copy toDir="${httpfs.tomcat.dist.dir}/webapps/ROOT">
+ <fileset dir="${basedir}/src/main/tomcat/ROOT"/>
+ </copy>
+ <copy toDir="${httpfs.tomcat.dist.dir}/webapps/webhdfs">
+ <fileset dir="${project.build.directory}/webhdfs"/>
+ </copy>
+ </target>
+ </configuration>
+ </execution>
+ <execution>
+ <id>tar</id>
+ <phase>package</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target if="tar">
+ <!-- Using Unix script to preserve symlinks -->
+ <echo file="${project.build.directory}/dist-maketar.sh">
+
+ which cygpath 2> /dev/null
+ if [ $? = 1 ]; then
+ BUILD_DIR="${project.build.directory}"
+ else
+ BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+ fi
+ cd $BUILD_DIR
+ tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
+ </echo>
+ <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+ <arg line="./dist-maketar.sh"/>
+ </exec>
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+</project>
View
41 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License. See accompanying LICENSE file.
+#
+
+# Set httpfs specific environment variables here.
+
+# Settings for the Embedded Tomcat that runs HttpFS
+# Java System properties for HttpFS should be specified in this variable
+#
+# export CATALINA_OPTS=
+
+# HttpFS logs directory
+#
+# export HTTPFS_LOG=${HTTPFS_HOME}/logs
+
+# HttpFS temporary directory
+#
+# export HTTPFS_TEMP=${HTTPFS_HOME}/temp
+
+# The HTTP port used by HttpFS
+#
+# export HTTPFS_HTTP_PORT=14000
+
+# The Admin port used by HttpFS
+#
+# export HTTPFS_ADMIN_PORT=`expr ${HTTPFS_HTTP_PORT} + 1`
+
+# The hostname HttpFS server runs on
+#
+# export HTTPFS_HTTP_HOSTNAME=`hostname -f`
View
35 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-log4j.properties
@@ -0,0 +1,35 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License. See accompanying LICENSE file.
+#
+
+# If the Java System property 'httpfs.log.dir' is not defined at HttpFSServer start up time
+# Setup sets its value to '${httpfs.home}/logs'
+
+log4j.appender.httpfs=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.httpfs.DatePattern='.'yyyy-MM-dd
+log4j.appender.httpfs.File=${httpfs.log.dir}/httpfs.log
+log4j.appender.httpfs.Append=true
+log4j.appender.httpfs.layout=org.apache.log4j.PatternLayout
+log4j.appender.httpfs.layout.ConversionPattern=%d{ISO8601} %5p %c{1} [%X{hostname}][%X{user}:%X{doAs}] %X{op} %m%n
+
+log4j.appender.httpfsaudit=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.httpfsaudit.DatePattern='.'yyyy-MM-dd
+log4j.appender.httpfsaudit.File=${httpfs.log.dir}/httpfs-audit.log
+log4j.appender.httpfsaudit.Append=true
+log4j.appender.httpfsaudit.layout=org.apache.log4j.PatternLayout
+log4j.appender.httpfsaudit.layout.ConversionPattern=%d{ISO8601} %5p [%X{hostname}][%X{user}:%X{doAs}] %X{op} %m%n
+
+log4j.logger.httpfsaudit=INFO, httpfsaudit
+
+log4j.logger.org.apache.hadoop.fs.http.server=INFO, httpfs
+log4j.logger.org.apache.hadoop.lib=INFO, httpfs
View
17 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-site.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration>
+
+</configuration>
View
863 ...t/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
@@ -0,0 +1,863 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PositionedReadable;
+import org.apache.hadoop.fs.Seekable;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.FileNotFoundException;
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.lang.reflect.Constructor;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * HttpFSServer implementation of the FileSystemAccess FileSystem.
+ * <p/>
+ * This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
+ */
+public class HttpFSFileSystem extends FileSystem {
+
+ public static final String SERVICE_NAME = "/webhdfs";
+
+ public static final String SERVICE_VERSION = "/v1";
+
+ public static final String SERVICE_PREFIX = SERVICE_NAME + SERVICE_VERSION;
+
+ public static final String OP_PARAM = "op";
+ public static final String DO_AS_PARAM = "doas";
+ public static final String OVERWRITE_PARAM = "overwrite";
+ public static final String REPLICATION_PARAM = "replication";
+ public static final String BLOCKSIZE_PARAM = "blocksize";
+ public static final String PERMISSION_PARAM = "permission";
+ public static final String DESTINATION_PARAM = "destination";
+ public static final String RECURSIVE_PARAM = "recursive";
+ public static final String OWNER_PARAM = "owner";
+ public static final String GROUP_PARAM = "group";
+ public static final String MODIFICATION_TIME_PARAM = "modificationtime";
+ public static final String ACCESS_TIME_PARAM = "accesstime";
+ public static final String RENEWER_PARAM = "renewer";
+
+ public static final String DEFAULT_PERMISSION = "default";
+
+ public static final String RENAME_JSON = "boolean";
+
+ public static final String DELETE_JSON = "boolean";
+
+ public static final String MKDIRS_JSON = "boolean";
+
+ public static final String HOME_DIR_JSON = "Path";
+
+ public static final String SET_REPLICATION_JSON = "boolean";
+
+ public static enum FILE_TYPE {
+ FILE, DIRECTORY, SYMLINK;
+
+ public static FILE_TYPE getType(FileStatus fileStatus) {
+ if (fileStatus.isFile()) {
+ return FILE;
+ }
+ if (fileStatus.isDirectory()) {
+ return DIRECTORY;
+ }
+ if (fileStatus.isSymlink()) {
+ return SYMLINK;
+ }
+ throw new IllegalArgumentException("Could not determine filetype for: " +
+ fileStatus.getPath());
+ }
+ }
+
+ public static final String FILE_STATUSES_JSON = "FileStatuses";
+ public static final String FILE_STATUS_JSON = "FileStatus";
+ public static final String PATH_SUFFIX_JSON = "pathSuffix";
+ public static final String TYPE_JSON = "type";
+ public static final String LENGTH_JSON = "length";
+ public static final String OWNER_JSON = "owner";
+ public static final String GROUP_JSON = "group";
+ public static final String PERMISSION_JSON = "permission";
+ public static final String ACCESS_TIME_JSON = "accessTime";
+ public static final String MODIFICATION_TIME_JSON = "modificationTime";
+ public static final String BLOCK_SIZE_JSON = "blockSize";
+ public static final String REPLICATION_JSON = "replication";
+
+ public static final String FILE_CHECKSUM_JSON = "FileChecksum";
+ public static final String CHECKSUM_ALGORITHM_JSON = "algorithm";
+ public static final String CHECKSUM_BYTES_JSON = "bytes";
+ public static final String CHECKSUM_LENGTH_JSON = "length";
+
+ public static final String CONTENT_SUMMARY_JSON = "ContentSummary";
+ public static final String CONTENT_SUMMARY_DIRECTORY_COUNT_JSON = "directoryCount";
+ public static final String CONTENT_SUMMARY_FILE_COUNT_JSON = "fileCount";
+ public static final String CONTENT_SUMMARY_LENGTH_JSON = "length";
+ public static final String CONTENT_SUMMARY_QUOTA_JSON = "quota";
+ public static final String CONTENT_SUMMARY_SPACE_CONSUMED_JSON = "spaceConsumed";
+ public static final String CONTENT_SUMMARY_SPACE_QUOTA_JSON = "spaceQuota";
+
+ public static final String DELEGATION_TOKEN_JSON = "Token";
+ public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString";
+
+ public static final String ERROR_JSON = "RemoteException";
+ public static final String ERROR_EXCEPTION_JSON = "exception";
+ public static final String ERROR_CLASSNAME_JSON = "javaClassName";
+ public static final String ERROR_MESSAGE_JSON = "message";
+
+ public static final int HTTP_TEMPORARY_REDIRECT = 307;
+
+
+ /**
+ * Get operations.
+ */
+ public enum GetOpValues {
+ OPEN, GETFILESTATUS, LISTSTATUS, GETHOMEDIR, GETCONTENTSUMMARY, GETFILECHECKSUM,
+ GETDELEGATIONTOKEN, GETFILEBLOCKLOCATIONS, INSTRUMENTATION
+ }
+
+ /**
+ * Post operations.
+ */
+ public static enum PostOpValues {
+ APPEND
+ }
+
+ /**
+ * Put operations.
+ */
+ public static enum PutOpValues {
+ CREATE, MKDIRS, RENAME, SETOWNER, SETPERMISSION, SETREPLICATION, SETTIMES,
+ RENEWDELEGATIONTOKEN, CANCELDELEGATIONTOKEN
+ }
+
+ /**
+ * Delete operations.
+ */
+ public static enum DeleteOpValues {
+ DELETE
+ }
+
+ private static final String HTTP_GET = "GET";
+ private static final String HTTP_PUT = "PUT";
+ private static final String HTTP_POST = "POST";
+ private static final String HTTP_DELETE = "DELETE";
+
+ private AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
+ private URI uri;
+ private Path workingDir;
+ private String doAs;
+
+ /**
+ * Convenience method that creates a <code>HttpURLConnection</code> for the
+ * HttpFSServer file system operations.
+ * <p/>
+ * This methods performs and injects any needed authentication credentials
+ * via the {@link #getConnection(URL, String)} method
+ *
+ * @param method the HTTP method.
+ * @param params the query string parameters.
+ * @param path the file path
+ * @param makeQualified if the path should be 'makeQualified'
+ *
+ * @return a <code>HttpURLConnection</code> for the HttpFSServer server,
+ * authenticated and ready to use for the specified path and file system operation.
+ *
+ * @throws IOException thrown if an IO error occurrs.
+ */
+ private HttpURLConnection getConnection(String method, Map<String, String> params,
+ Path path, boolean makeQualified) throws IOException {
+ params.put(DO_AS_PARAM, doAs);
+ if (makeQualified) {
+ path = makeQualified(path);
+ }
+ URI uri = path.toUri();
+ StringBuilder sb = new StringBuilder();
+ sb.append(uri.getScheme()).append("://").append(uri.getAuthority()).
+ append(SERVICE_PREFIX).append(uri.getPath());
+
+ String separator = "?";
+ for (Map.Entry<String, String> entry : params.entrySet()) {
+ sb.append(separator).append(entry.getKey()).append("=").
+ append(URLEncoder.encode(entry.getValue(), "UTF8"));
+ separator = "&";
+ }
+ URL url = new URL(sb.toString());
+ return getConnection(url, method);
+ }
+
+ /**
+ * Convenience method that creates a <code>HttpURLConnection</code> for the specified URL.
+ * <p/>
+ * This methods performs and injects any needed authentication credentials.
+ *
+ * @param url url to connect to.
+ * @param method the HTTP method.
+ *
+ * @return a <code>HttpURLConnection</code> for the HttpFSServer server, authenticated and ready to use for
+ * the specified path and file system operation.
+ *
+ * @throws IOException thrown if an IO error occurrs.
+ */
+ private HttpURLConnection getConnection(URL url, String method) throws IOException {
+ Class<? extends Authenticator> klass =
+ getConf().getClass("httpfs.authenticator.class", HttpKerberosAuthenticator.class, Authenticator.class);
+ Authenticator authenticator = ReflectionUtils.newInstance(klass, getConf());
+ try {
+ HttpURLConnection conn = new AuthenticatedURL(authenticator).openConnection(url, authToken);
+ conn.setRequestMethod(method);
+ if (method.equals(HTTP_POST) || method.equals(HTTP_PUT)) {
+ conn.setDoOutput(true);
+ }
+ return conn;
+ } catch (Exception ex) {
+ throw new IOException(ex);
+ }
+ }
+
+ /**
+ * Convenience method that JSON Parses the <code>InputStream</code> of a <code>HttpURLConnection</code>.
+ *
+ * @param conn the <code>HttpURLConnection</code>.
+ *
+ * @return the parsed JSON object.
+ *
+ * @throws IOException thrown if the <code>InputStream</code> could not be JSON parsed.
+ */
+ private static Object jsonParse(HttpURLConnection conn) throws IOException {
+ try {
+ JSONParser parser = new JSONParser();
+ return parser.parse(new InputStreamReader(conn.getInputStream()));
+ } catch (ParseException ex) {
+ throw new IOException("JSON parser error, " + ex.getMessage(), ex);
+ }
+ }
+
+ /**
+ * Validates the status of an <code>HttpURLConnection</code> against an expected HTTP
+ * status code. If the current status code is not the expected one it throws an exception
+ * with a detail message using Server side error messages if available.
+ *
+ * @param conn the <code>HttpURLConnection</code>.
+ * @param expected the expected HTTP status code.
+ *
+ * @throws IOException thrown if the current status code does not match the expected one.
+ */
+ private static void validateResponse(HttpURLConnection conn, int expected) throws IOException {
+ int status = conn.getResponseCode();
+ if (status != expected) {
+ try {
+ JSONObject json = (JSONObject) jsonParse(conn);
+ json = (JSONObject) json.get(ERROR_JSON);
+ String message = (String) json.get(ERROR_MESSAGE_JSON);
+ String exception = (String) json.get(ERROR_EXCEPTION_JSON);
+ String className = (String) json.get(ERROR_CLASSNAME_JSON);
+
+ try {
+ ClassLoader cl = HttpFSFileSystem.class.getClassLoader();
+ Class klass = cl.loadClass(className);
+ Constructor constr = klass.getConstructor(String.class);
+ throw (IOException) constr.newInstance(message);
+ } catch (IOException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new IOException(MessageFormat.format("{0} - {1}", exception, message));
+ }
+ } catch (IOException ex) {
+ if (ex.getCause() instanceof IOException) {
+ throw (IOException) ex.getCause();
+ }
+ throw new IOException(MessageFormat.format("HTTP status [{0}], {1}", status, conn.getResponseMessage()));
+ }
+ }
+ }
+
+ /**
+ * Called after a new FileSystem instance is constructed.
+ *
+ * @param name a uri whose authority section names the host, port, etc. for this FileSystem
+ * @param conf the configuration
+ */
+ @Override
+ public void initialize(URI name, Configuration conf) throws IOException {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ doAs = ugi.getUserName();
+ super.initialize(name, conf);
+ try {
+ uri = new URI(name.getScheme() + "://" + name.getHost() + ":" + name.getPort());
+ } catch (URISyntaxException ex) {
+ throw new IOException(ex);
+ }
+ }
+
+ /**
+ * Returns a URI whose scheme and authority identify this FileSystem.
+ *
+ * @return the URI whose scheme and authority identify this FileSystem.
+ */
+ @Override
+ public URI getUri() {
+ return uri;
+ }
+
+ /**
+ * HttpFSServer subclass of the <code>FSDataInputStream</code>.
+ * <p/>
+ * This implementation does not support the
+ * <code>PositionReadable</code> and <code>Seekable</code> methods.
+ */
+ private static class HttpFSDataInputStream extends FilterInputStream implements Seekable, PositionedReadable {
+
+ protected HttpFSDataInputStream(InputStream in, int bufferSize) {
+ super(new BufferedInputStream(in, bufferSize));
+ }
+
+ @Override
+ public int read(long position, byte[] buffer, int offset, int length) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void readFully(long position, byte[] buffer, int offset, int length) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void readFully(long position, byte[] buffer) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void seek(long pos) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public long getPos() throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public boolean seekToNewSource(long targetPos) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+ }
+
+ /**
+ * Opens an FSDataInputStream at the indicated Path.
+ * </p>
+ * IMPORTANT: the returned <code><FSDataInputStream/code> does not support the
+ * <code>PositionReadable</code> and <code>Seekable</code> methods.
+ *
+ * @param f the file name to open
+ * @param bufferSize the size of the buffer to be used.
+ */
+ @Override
+ public FSDataInputStream open(Path f, int bufferSize) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, GetOpValues.OPEN.toString());
+ HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ return new FSDataInputStream(new HttpFSDataInputStream(conn.getInputStream(), bufferSize));
+ }
+
+ /**
+ * HttpFSServer subclass of the <code>FSDataOutputStream</code>.
+ * <p/>
+ * This implementation closes the underlying HTTP connection validating the Http connection status
+ * at closing time.
+ */
+ private static class HttpFSDataOutputStream extends FSDataOutputStream {
+ private HttpURLConnection conn;
+ private int closeStatus;
+
+ public HttpFSDataOutputStream(HttpURLConnection conn, OutputStream out, int closeStatus, Statistics stats)
+ throws IOException {
+ super(out, stats);
+ this.conn = conn;
+ this.closeStatus = closeStatus;
+ }
+
+ @Override
+ public void close() throws IOException {
+ try {
+ super.close();
+ } finally {
+ validateResponse(conn, closeStatus);
+ }
+ }
+
+ }
+
+ /**
+ * Converts a <code>FsPermission</code> to a Unix octal representation.
+ *
+ * @param p the permission.
+ *
+ * @return the Unix string symbolic reprentation.
+ */
+ public static String permissionToString(FsPermission p) {
+ return (p == null) ? DEFAULT_PERMISSION : Integer.toString(p.toShort(), 8);
+ }
+
+ /*
+ * Common handling for uploading data for create and append operations.
+ */
+ private FSDataOutputStream uploadData(String method, Path f, Map<String, String> params,
+ int bufferSize, int expectedStatus) throws IOException {
+ HttpURLConnection conn = getConnection(method, params, f, true);
+ conn.setInstanceFollowRedirects(false);
+ boolean exceptionAlreadyHandled = false;
+ try {
+ if (conn.getResponseCode() == HTTP_TEMPORARY_REDIRECT) {
+ exceptionAlreadyHandled = true;
+ String location = conn.getHeaderField("Location");
+ if (location != null) {
+ conn = getConnection(new URL(location), method);
+ conn.setRequestProperty("Content-Type", "application/octet-stream");
+ try {
+ OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize);
+ return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics);
+ } catch (IOException ex) {
+ validateResponse(conn, expectedStatus);
+ throw ex;
+ }
+ } else {
+ validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ throw new IOException("Missing HTTP 'Location' header for [" + conn.getURL() + "]");
+ }
+ } else {
+ throw new IOException(
+ MessageFormat.format("Expected HTTP status was [307], received [{0}]",
+ conn.getResponseCode()));
+ }
+ } catch (IOException ex) {
+ if (exceptionAlreadyHandled) {
+ throw ex;
+ } else {
+ validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ throw ex;
+ }
+ }
+ }
+
+
+ /**
+ * Opens an FSDataOutputStream at the indicated Path with write-progress
+ * reporting.
+ * <p/>
+ * IMPORTANT: The <code>Progressable</code> parameter is not used.
+ *
+ * @param f the file name to open.
+ * @param permission file permission.
+ * @param overwrite if a file with this name already exists, then if true,
+ * the file will be overwritten, and if false an error will be thrown.
+ * @param bufferSize the size of the buffer to be used.
+ * @param replication required block replication for the file.
+ * @param blockSize block size.
+ * @param progress progressable.
+ *
+ * @throws IOException
+ * @see #setPermission(Path, FsPermission)
+ */
+ @Override
+ public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize,
+ short replication, long blockSize, Progressable progress) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PutOpValues.CREATE.toString());
+ params.put(OVERWRITE_PARAM, Boolean.toString(overwrite));
+ params.put(REPLICATION_PARAM, Short.toString(replication));
+ params.put(BLOCKSIZE_PARAM, Long.toString(blockSize));
+ params.put(PERMISSION_PARAM, permissionToString(permission));
+ return uploadData(HTTP_PUT, f, params, bufferSize, HttpURLConnection.HTTP_CREATED);
+ }
+
+
+ /**
+ * Append to an existing file (optional operation).
+ * <p/>
+ * IMPORTANT: The <code>Progressable</code> parameter is not used.
+ *
+ * @param f the existing file to be appended.
+ * @param bufferSize the size of the buffer to be used.
+ * @param progress for reporting progress if it is not null.
+ *
+ * @throws IOException
+ */
+ @Override
+ public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PostOpValues.APPEND.toString());
+ return uploadData(HTTP_POST, f, params, bufferSize, HttpURLConnection.HTTP_OK);
+ }
+
+ /**
+ * Renames Path src to Path dst. Can take place on local fs
+ * or remote DFS.
+ */
+ @Override
+ public boolean rename(Path src, Path dst) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PutOpValues.RENAME.toString());
+ params.put(DESTINATION_PARAM, dst.toString());
+ HttpURLConnection conn = getConnection(HTTP_PUT, params, src, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) jsonParse(conn);
+ return (Boolean) json.get(RENAME_JSON);
+ }
+
+ /**
+ * Delete a file.
+ *
+ * @deprecated Use delete(Path, boolean) instead
+ */
+ @SuppressWarnings({"deprecation"})
+ @Deprecated
+ @Override
+ public boolean delete(Path f) throws IOException {
+ return delete(f, false);
+ }
+
+ /**
+ * Delete a file.
+ *
+ * @param f the path to delete.
+ * @param recursive if path is a directory and set to
+ * true, the directory is deleted else throws an exception. In
+ * case of a file the recursive can be set to either true or false.
+ *
+ * @return true if delete is successful else false.
+ *
+ * @throws IOException
+ */
+ @Override
+ public boolean delete(Path f, boolean recursive) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, DeleteOpValues.DELETE.toString());
+ params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
+ HttpURLConnection conn = getConnection(HTTP_DELETE, params, f, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) jsonParse(conn);
+ return (Boolean) json.get(DELETE_JSON);
+ }
+
+ /**
+ * List the statuses of the files/directories in the given path if the path is
+ * a directory.
+ *
+ * @param f given path
+ *
+ * @return the statuses of the files/directories in the given patch
+ *
+ * @throws IOException
+ */
+ @Override
+ public FileStatus[] listStatus(Path f) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, GetOpValues.LISTSTATUS.toString());
+ HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) jsonParse(conn);
+ json = (JSONObject) json.get(FILE_STATUSES_JSON);
+ JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON);
+ FileStatus[] array = new FileStatus[jsonArray.size()];
+ f = makeQualified(f);
+ for (int i = 0; i < jsonArray.size(); i++) {
+ array[i] = createFileStatus(f, (JSONObject) jsonArray.get(i));
+ }
+ return array;
+ }
+
+ /**
+ * Set the current working directory for the given file system. All relative
+ * paths will be resolved relative to it.
+ *
+ * @param newDir new directory.
+ */
+ @Override
+ public void setWorkingDirectory(Path newDir) {
+ workingDir = newDir;
+ }
+
+ /**
+ * Get the current working directory for the given file system
+ *
+ * @return the directory pathname
+ */
+ @Override
+ public Path getWorkingDirectory() {
+ if (workingDir == null) {
+ workingDir = getHomeDirectory();
+ }
+ return workingDir;
+ }
+
+ /**
+ * Make the given file and all non-existent parents into
+ * directories. Has the semantics of Unix 'mkdir -p'.
+ * Existence of the directory hierarchy is not an error.
+ */
+ @Override
+ public boolean mkdirs(Path f, FsPermission permission) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PutOpValues.MKDIRS.toString());
+ params.put(PERMISSION_PARAM, permissionToString(permission));
+ HttpURLConnection conn = getConnection(HTTP_PUT, params, f, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) jsonParse(conn);
+ return (Boolean) json.get(MKDIRS_JSON);
+ }
+
+ /**
+ * Return a file status object that represents the path.
+ *
+ * @param f The path we want information from
+ *
+ * @return a FileStatus object
+ *
+ * @throws FileNotFoundException when the path does not exist;
+ * IOException see specific implementation
+ */
+ @Override
+ public FileStatus getFileStatus(Path f) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, GetOpValues.GETFILESTATUS.toString());
+ HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) jsonParse(conn);
+ json = (JSONObject) json.get(FILE_STATUS_JSON);
+ f = makeQualified(f);
+ return createFileStatus(f, json);
+ }
+
+ /**
+ * Return the current user's home directory in this filesystem.
+ * The default implementation returns "/user/$USER/".
+ */
+ @Override
+ public Path getHomeDirectory() {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, GetOpValues.GETHOMEDIR.toString());
+ try {
+ HttpURLConnection conn = getConnection(HTTP_GET, params, new Path(getUri().toString(), "/"), false);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) jsonParse(conn);
+ return new Path((String) json.get(HOME_DIR_JSON));
+ } catch (IOException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+
+ /**
+ * Set owner of a path (i.e. a file or a directory).
+ * The parameters username and groupname cannot both be null.
+ *
+ * @param p The path
+ * @param username If it is null, the original username remains unchanged.
+ * @param groupname If it is null, the original groupname remains unchanged.
+ */
+ @Override
+ public void setOwner(Path p, String username, String groupname) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PutOpValues.SETOWNER.toString());
+ params.put(OWNER_PARAM, username);
+ params.put(GROUP_PARAM, groupname);
+ HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ }
+
+ /**
+ * Set permission of a path.
+ *
+ * @param p path.
+ * @param permission permission.
+ */
+ @Override
+ public void setPermission(Path p, FsPermission permission) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PutOpValues.SETPERMISSION.toString());
+ params.put(PERMISSION_PARAM, permissionToString(permission));
+ HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ }
+
+ /**
+ * Set access time of a file
+ *
+ * @param p The path
+ * @param mtime Set the modification time of this file.
+ * The number of milliseconds since Jan 1, 1970.
+ * A value of -1 means that this call should not set modification time.
+ * @param atime Set the access time of this file.
+ * The number of milliseconds since Jan 1, 1970.
+ * A value of -1 means that this call should not set access time.
+ */
+ @Override
+ public void setTimes(Path p, long mtime, long atime) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PutOpValues.SETTIMES.toString());
+ params.put(MODIFICATION_TIME_PARAM, Long.toString(mtime));
+ params.put(ACCESS_TIME_PARAM, Long.toString(atime));
+ HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ }
+
+ /**
+ * Set replication for an existing file.
+ *
+ * @param src file name
+ * @param replication new replication
+ *
+ * @return true if successful;
+ * false if file does not exist or is a directory
+ *
+ * @throws IOException
+ */
+ @Override
+ public boolean setReplication(Path src, short replication) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, PutOpValues.SETREPLICATION.toString());
+ params.put(REPLICATION_PARAM, Short.toString(replication));
+ HttpURLConnection conn = getConnection(HTTP_PUT, params, src, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) jsonParse(conn);
+ return (Boolean) json.get(SET_REPLICATION_JSON);
+ }
+
+ /**
+ * Creates a <code>FileStatus</code> object using a JSON file-status payload
+ * received from a HttpFSServer server.
+ *
+ * @param json a JSON file-status payload received from a HttpFSServer server
+ *
+ * @return the corresponding <code>FileStatus</code>
+ */
+ private FileStatus createFileStatus(Path parent, JSONObject json) {
+ String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
+ Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
+ FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON));
+ long len = (Long) json.get(LENGTH_JSON);
+ String owner = (String) json.get(OWNER_JSON);
+ String group = (String) json.get(GROUP_JSON);
+ FsPermission permission =
+ new FsPermission(Short.parseShort((String) json.get(PERMISSION_JSON), 8));
+ long aTime = (Long) json.get(ACCESS_TIME_JSON);
+ long mTime = (Long) json.get(MODIFICATION_TIME_JSON);
+ long blockSize = (Long) json.get(BLOCK_SIZE_JSON);
+ short replication = ((Long) json.get(REPLICATION_JSON)).shortValue();
+ FileStatus fileStatus = null;
+
+ switch (type) {
+ case FILE:
+ case DIRECTORY:
+ fileStatus = new FileStatus(len, (type == FILE_TYPE.DIRECTORY),
+ replication, blockSize, mTime, aTime,
+ permission, owner, group, path);
+ break;
+ case SYMLINK:
+ Path symLink = null;
+ fileStatus = new FileStatus(len, false,
+ replication, blockSize, mTime, aTime,
+ permission, owner, group, symLink,
+ path);
+ }
+ return fileStatus;
+ }
+
+ @Override
+ public ContentSummary getContentSummary(Path f) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, GetOpValues.GETCONTENTSUMMARY.toString());
+ HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
+ return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON),
+ (Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON),
+ (Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON),
+ (Long) json.get(CONTENT_SUMMARY_QUOTA_JSON),
+ (Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON),
+ (Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON)
+ );
+ }
+
+ @Override
+ public FileChecksum getFileChecksum(Path f) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, GetOpValues.GETFILECHECKSUM.toString());
+ HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+ validateResponse(conn, HttpURLConnection.HTTP_OK);
+ final JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
+ return new FileChecksum() {
+ @Override
+ public String getAlgorithmName() {
+ return (String) json.get(CHECKSUM_ALGORITHM_JSON);
+ }
+
+ @Override
+ public int getLength() {
+ return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
+ }
+
+ @Override
+ public byte[] getBytes() {
+ return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void readFields(DataInput in) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+ };
+ }
+
+}
View
41 ...hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+
+import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
+
+/**
+ * A <code>KerberosAuthenticator</code> subclass that fallback to
+ * {@link HttpPseudoAuthenticator}.
+ */
+public class HttpKerberosAuthenticator extends KerberosAuthenticator {
+
+ /**
+ * Returns the fallback authenticator if the server does not use
+ * Kerberos SPNEGO HTTP authentication.
+ *
+ * @return a {@link HttpPseudoAuthenticator} instance.
+ */
+ @Override
+ protected Authenticator getFallBackAuthenticator() {
+ return new HttpPseudoAuthenticator();
+ }
+}
View
45 ...p-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
+
+import java.io.IOException;
+
+/**
+ * A <code>PseudoAuthenticator</code> subclass that uses FileSystemAccess's
+ * <code>UserGroupInformation</code> to obtain the client user name (the UGI's login user).
+ */
+public class HttpPseudoAuthenticator extends PseudoAuthenticator {
+
+ /**
+ * Return the client user name.
+ *
+ * @return the client user name.
+ */
+ @Override
+ protected String getUserName() {
+ try {
+ return UserGroupInformation.getLoginUser().getUserName();
+ } catch (IOException ex) {
+ throw new SecurityException("Could not obtain current user, " + ex.getMessage(), ex);
+ }
+ }
+}
View
64 ...project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+
+import javax.servlet.FilterConfig;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Subclass of Alfredo's <code>AuthenticationFilter</code> that obtains its configuration
+ * from HttpFSServer's server configuration.
+ */
+public class AuthFilter extends AuthenticationFilter {
+ private static final String CONF_PREFIX = "httpfs.authentication.";
+
+ /**
+ * Returns the Alfredo configuration from HttpFSServer's configuration.
+ * <p/>
+ * It returns all HttpFSServer's configuration properties prefixed with
+ * <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
+ * prefix is removed from the returned property names.
+ *
+ * @param configPrefix parameter not used.
+ * @param filterConfig parameter not used.
+ *
+ * @return Alfredo configuration read from HttpFSServer's configuration.
+ */
+ @Override
+ protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
+ Properties props = new Properties();
+ Configuration conf = HttpFSServerWebApp.get().getConfig();
+
+ props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
+ for (Map.Entry<String, String> entry : conf) {
+ String name = entry.getKey();
+ if (name.startsWith(CONF_PREFIX)) {
+ String value = conf.get(name);
+ name = name.substring(CONF_PREFIX.length());
+ props.setProperty(name, value);
+ }
+ }
+ return props;
+ }
+
+
+}
View
717 ...oject/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
@@ -0,0 +1,717 @@
+package org.apache.hadoop.fs.http.server;
+
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.GlobFilter;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.lib.service.FileSystemAccess;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * FileSystem operation executors used by {@link HttpFSServer}.
+ */
+public class FSOperations {
+
+ /**
+ * Converts a Unix permission octal & symbolic representation
+ * (i.e. 655 or -rwxr--r--) into a FileSystemAccess permission.
+ *
+ * @param str Unix permission symbolic representation.
+ *
+ * @return the FileSystemAccess permission. If the given string was
+ * 'default', it returns <code>FsPermission.getDefault()</code>.
+ */
+ private static FsPermission getPermission(String str) {
+ FsPermission permission;
+ if (str.equals(HttpFSFileSystem.DEFAULT_PERMISSION)) {
+ permission = FsPermission.getDefault();
+ } else if (str.length() == 3) {
+ permission = new FsPermission(Short.parseShort(str, 8));
+ } else {
+ permission = FsPermission.valueOf(str);
+ }
+ return permission;
+ }
+
+ @SuppressWarnings({"unchecked", "deprecation"})
+ private static Map fileStatusToJSONRaw(FileStatus status, boolean emptyPathSuffix) {
+ Map json = new LinkedHashMap();
+ json.put(HttpFSFileSystem.PATH_SUFFIX_JSON, (emptyPathSuffix) ? "" : status.getPath().getName());
+ json.put(HttpFSFileSystem.TYPE_JSON, HttpFSFileSystem.FILE_TYPE.getType(status).toString());
+ json.put(HttpFSFileSystem.LENGTH_JSON, status.getLen());
+ json.put(HttpFSFileSystem.OWNER_JSON, status.getOwner());
+ json.put(HttpFSFileSystem.GROUP_JSON, status.getGroup());
+ json.put(HttpFSFileSystem.PERMISSION_JSON, HttpFSFileSystem.permissionToString(status.getPermission()));
+ json.put(HttpFSFileSystem.ACCESS_TIME_JSON, status.getAccessTime());
+ json.put(HttpFSFileSystem.MODIFICATION_TIME_JSON, status.getModificationTime());
+ json.put(HttpFSFileSystem.BLOCK_SIZE_JSON, status.getBlockSize());
+ json.put(HttpFSFileSystem.REPLICATION_JSON, status.getReplication());
+ return json;
+ }
+
+ /**
+ * Converts a FileSystemAccess <code>FileStatus</code> object into a JSON
+ * object.
+ *
+ * @param status FileSystemAccess file status.
+ *
+ * @return The JSON representation of the file status.
+ */
+ @SuppressWarnings({"unchecked", "deprecation"})
+ private static Map fileStatusToJSON(FileStatus status) {
+ Map json = new LinkedHashMap();
+ json.put(HttpFSFileSystem.FILE_STATUS_JSON, fileStatusToJSONRaw(status, true));
+ return json;
+ }
+
+ /**
+ * Converts a <code>FileChecksum</code> object into a JSON array
+ * object.
+ *
+ * @param checksum file checksum.
+ *
+ * @return The JSON representation of the file checksum.
+ */
+ @SuppressWarnings({"unchecked"})
+ private static Map fileChecksumToJSON(FileChecksum checksum) {
+ Map json = new LinkedHashMap();
+ json.put(HttpFSFileSystem.CHECKSUM_ALGORITHM_JSON, checksum.getAlgorithmName());
+ json.put(HttpFSFileSystem.CHECKSUM_BYTES_JSON,
+ org.apache.hadoop.util.StringUtils.byteToHexString(checksum.getBytes()));
+ json.put(HttpFSFileSystem.CHECKSUM_LENGTH_JSON, checksum.getLength());
+ Map response = new LinkedHashMap();
+ response.put(HttpFSFileSystem.FILE_CHECKSUM_JSON, json);
+ return response;
+ }
+
+ /**
+ * Converts a <code>ContentSummary</code> object into a JSON array
+ * object.
+ *
+ * @param contentSummary the content summary
+ *
+ * @return The JSON representation of the content summary.
+ */
+ @SuppressWarnings({"unchecked"})
+ private static Map contentSummaryToJSON(ContentSummary contentSummary) {
+ Map json = new LinkedHashMap();
+ json.put(HttpFSFileSystem.CONTENT_SUMMARY_DIRECTORY_COUNT_JSON, contentSummary.getDirectoryCount());
+ json.put(HttpFSFileSystem.CONTENT_SUMMARY_FILE_COUNT_JSON, contentSummary.getFileCount());
+ json.put(HttpFSFileSystem.CONTENT_SUMMARY_LENGTH_JSON, contentSummary.getLength());
+ json.put(HttpFSFileSystem.CONTENT_SUMMARY_QUOTA_JSON, contentSummary.getQuota());
+ json.put(HttpFSFileSystem.CONTENT_SUMMARY_SPACE_CONSUMED_JSON, contentSummary.getSpaceConsumed());
+ json.put(HttpFSFileSystem.CONTENT_SUMMARY_SPACE_QUOTA_JSON, contentSummary.getSpaceQuota());
+ Map response = new LinkedHashMap();
+ response.put(HttpFSFileSystem.CONTENT_SUMMARY_JSON, json);
+ return response;
+ }
+
+ /**
+ * Converts a FileSystemAccess <code>FileStatus</code> array into a JSON array
+ * object.
+ *
+ * @param status FileSystemAccess file status array.
+ * <code>SCHEME://HOST:PORT</code> in the file status.
+ *
+ * @return The JSON representation of the file status array.
+ */
+ @SuppressWarnings("unchecked")
+ private static Map fileStatusToJSON(FileStatus[] status) {
+ JSONArray json = new JSONArray();
+ if (status != null) {
+ for (FileStatus s : status) {
+ json.add(fileStatusToJSONRaw(s, false));
+ }
+ }
+ Map response = new LinkedHashMap();
+ Map temp = new LinkedHashMap();
+ temp.put(HttpFSFileSystem.FILE_STATUS_JSON, json);
+ response.put(HttpFSFileSystem.FILE_STATUSES_JSON, temp);
+ return response;
+ }
+
+ /**
+ * Converts an object into a Json Map with with one key-value entry.
+ * <p/>
+ * It assumes the given value is either a JSON primitive type or a
+ * <code>JsonAware</code> instance.
+ *
+ * @param name name for the key of the entry.
+ * @param value for the value of the entry.
+ *
+ * @return the JSON representation of the key-value pair.
+ */
+ @SuppressWarnings("unchecked")
+ private static JSONObject toJSON(String name, Object value) {
+ JSONObject json = new JSONObject();
+ json.put(name, value);
+ return json;
+ }
+
+ /**
+ * Executor that performs an append FileSystemAccess files system operation.
+ */
+ public static class FSAppend implements FileSystemAccess.FileSystemExecutor<Void> {
+ private InputStream is;
+ private Path path;
+
+ /**
+ * Creates an Append executor.
+ *
+ * @param is input stream to append.
+ * @param path path of the file to append.
+ */
+ public FSAppend(InputStream is, String path) {
+ this.is = is;
+ this.path = new Path(path);
+ }
+
+ /**
+ * Executes the filesystem operation.
+ *
+ * @param fs filesystem instance to use.
+ *
+ * @return void.
+ *
+ * @throws IOException thrown if an IO error occured.
+ */
+ @Override
+ public Void execute(FileSystem fs) throws IOException {
+ int bufferSize = fs.getConf().getInt("httpfs.buffer.size", 4096);
+ OutputStream os = fs.append(path, bufferSize);
+ IOUtils.copyBytes(is, os, bufferSize, true);
+ os.close();
+ return null;
+ }
+
+ }
+
+ /**
+ * Executor that performs a content-summary FileSystemAccess files system operation.
+ */
+ public static class FSContentSummary implements FileSystemAccess.FileSystemExecutor<Map> {
+ private Path path;
+
+ /**
+ * Creates a content-summary executor.
+ *
+ * @param path the path to retrieve the content-summary.
+ */
+ public FSContentSummary(String path) {
+ this.path = new Path(path);
+ }
+
+ /**
+ * Executes the filesystem operation.
+ *
+ * @param fs filesystem instance to use.
+ *
+ * @return a Map object (JSON friendly) with the content-summary.
+ *
+ * @throws IOException thrown if an IO error occured.
+ */
+ @Override
+ public Map execute(FileSystem fs) throws IOException {
+ ContentSummary contentSummary = fs.getContentSummary(path);
+ return contentSummaryToJSON(contentSummary);
+ }
+
+ }
+
+ /**
+ * Executor that performs a create FileSystemAccess files system operation.
+ */
+ public static class FSCreate implements FileSystemAccess.FileSystemExecutor<Void> {
+ private InputStream is;
+ private Path path;
+ private String permission;
+ private boolean override;
+ private short replication;
+ private long blockSize;
+
+ /**
+ * Creates a Create executor.
+ *
+ * @param is input stream to for the file to create.
+ * @param path path of the file to create.
+ * @param perm permission for the file.
+ * @param override if the file should be overriden if it already exist.
+ * @param repl the replication factor for the file.
+ * @param blockSize the block size for the file.
+ */
+ public FSCreate(InputStream is, String path, String perm, boolean override, short repl, long blockSize) {
+ this.is = is;
+ this.path = new Path(path);
+ this.permission = perm;
+ this.override = override;
+ this.replication = repl;
+ this.blockSize = blockSize;
+ }
+
+ /**
+ * Executes the filesystem operation.
+ *
+ * @param fs filesystem instance to use.
+ *
+ * @return The URI of the created file.
+ *
+ * @throws IOException thrown if an IO error occured.
+ */
+ @Override
+ public Void execute(FileSystem fs) throws IOException {
+ if (replication == -1) {
+ replication = (short) fs.getConf().getInt("dfs.replication", 3);
+ }
+ if (blockSize == -1) {
+ blockSize = fs.getConf().getInt("dfs.block.size", 67108864);
+ }
+ FsPermission fsPermission = getPermission(permission);
+ int bufferSize = fs.getConf().getInt("httpfs.buffer.size", 4096);
+ OutputStream os = fs.create(path, fsPermission, override, bufferSize, replication, blockSize, null);
+ IOUtils.copyBytes(is, os, bufferSize, true);
+ os.close();
+ return null;
+ }
+
+ }
+
+ /**
+ * Executor that performs a delete FileSystemAccess files system operation.
+ */
+ public static class FSDelete implements FileSystemAccess.FileSystemExecutor<JSONObject> {
+ private Path path;
+ private boolean recursive;
+
+ /**
+ * Creates a Delete executor.
+ *
+ * @param path path to delete.
+ * @param recursive if the delete should be recursive or not.
+ */
+ public FSDelete(String path, boolean recursive) {
+ this.path = new Path(path);
+ this.recursive = recursive;
+ }
+
+ /**
+ * Executes the filesystem operation.
+ *
+ * @param fs filesystem instance to use.
+ *
+ * @return <code>true</code> if the delete operation was successful,
+ * <code>false</code> otherwise.
+ *
+ * @throws IOException thrown if an IO error occured.
+ */
+ @Override
+ public JSONObject execute(FileSystem fs) throws IOException {
+ boolean deleted = fs.delete(path, recursive);
+ return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted);
+ }
+
+ }
+
+ /**
+ * Executor that performs a file-checksum FileSystemAccess files system operation.
+ */
+ public static class FSFileChecksum implements FileSystemAccess.FileSystemExecutor<Map> {
+ private Path path;
+
+ /**
+ * Creates a file-checksum executor.
+ *
+ * @param path the path to retrieve the checksum.
+ */
+ public FSFileChecksum(String path) {
+ this.path = new Path(path);
+ }
+
+ /**
+ * Executes the filesystem operation.
+ *
+ * @param fs filesystem instance to use.
+ *
+ * @return a Map object (JSON friendly) with the file checksum.
+ *
+ * @throws IOException thrown if an IO error occured.
+ */
+ @Override
+ public Map execute(FileSystem fs) throws IOException {
+ FileChecksum checksum = fs.getFileChecksum(path);
+ return fileChecksumToJSON(checksum);
+ }
+
+ }
+
+ /**
+ * Executor that performs a file-status FileSystemAccess files system operation.
+ */
+ public static class FSFileStatus implements FileSystemAccess.FileSystemExecutor<Map> {
+ private Path path;
+
+ /**
+ * Creates a file-status executor.
+ *