Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

HADOOP-371. Include contrib jars and site documentation in distributi…

…ons. Contributed by Nigel.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@492649 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
commit 631f603beb6bd269b9cd80d54e4ecf74fcd8a182 1 parent 7f1391a
@cutting cutting authored
View
4 CHANGES.txt
@@ -177,6 +177,10 @@ Trunk (unreleased changes)
50. HADOOP-853. Rename 'site' to 'docs', in preparation for inclusion
in releases. (cutting)
+51. HADOOP-371. Include contrib jars and site documentation in
+ distributions. Also add contrib and example documentation to
+ distributed javadoc, in separate sections. (Nigel Daley via cutting)
+
Release 0.9.2 - 2006-12-15
View
24 build.xml
@@ -417,6 +417,7 @@
<mkdir dir="${build.javadoc}"/>
<javadoc
overview="${src.dir}/overview.html"
+ packagenames="org.apache.hadoop.*"
destdir="${build.javadoc}"
author="true"
version="true"
@@ -427,11 +428,18 @@
>
<packageset dir="${src.dir}"/>
<packageset dir="${examples.dir}"/>
+
+ <packageset dir="src/contrib/streaming/src/java"/>
+ <packageset dir="src/contrib/smallJobsBenchmark/src/java"/>
+
<link href="${javadoc.link.java}"/>
<classpath refid="classpath"/>
- <group title="Core" packages="org.apache.hadoop.*"/>
- <group title="Examples" packages="org.apache.hadoop.examples"/>
+ <group title="Core" packages="org.apache.*"/>
+ <group title="Examples" packages="org.apache.hadoop.examples*"/>
+
+ <group title="contrib: Streaming" packages="org.apache.hadoop.streaming*"/>
+ <group title="contrib: Small Jobs Benchmark" packages="org.apache.hadoop.benchmarks.mapred*"/>
</javadoc>
</target>
@@ -439,6 +447,9 @@
<target name="default-doc">
<style basedir="${conf.dir}" destdir="${build.docs}"
includes="hadoop-default.xml" style="conf/configuration.xsl"/>
+ <copy todir="${build.docs}">
+ <fileset dir="${docs.dir}" />
+ </copy>
</target>
<!-- ================================================================== -->
@@ -446,9 +457,10 @@
<!-- ================================================================== -->
<!-- -->
<!-- ================================================================== -->
- <target name="package" depends="jar, javadoc, examples, compile-core-test">
+ <target name="package" depends="jar, javadoc, examples, compile-core-test, deploy-contrib">
<mkdir dir="${dist.dir}"/>
<mkdir dir="${dist.dir}/lib"/>
+ <mkdir dir="${dist.dir}/contrib"/>
<mkdir dir="${dist.dir}/bin"/>
<mkdir dir="${dist.dir}/docs"/>
<mkdir dir="${dist.dir}/docs/api"/>
@@ -466,6 +478,12 @@
<arg line="${native.src.dir}/packageNativeHadoop.sh"/>
</exec>
+ <copy todir="${dist.dir}/contrib" includeEmptyDirs="false" flatten="true">
+ <fileset dir="build/contrib">
+ <include name="*/*.jar"/>
+ </fileset>
+ </copy>
+
<copy todir="${dist.dir}/webapps">
<fileset dir="${build.webapps}"/>
</copy>
View
2  docs/about.html
@@ -81,7 +81,7 @@
<a href="hdfs_design.html">Hadoop File System</a>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.3', 'skin/')" id="menu_1.3Title" class="menutitle">Resources</div>
View
2  docs/credits.html
@@ -81,7 +81,7 @@
<a href="hdfs_design.html">Hadoop File System</a>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.3', 'skin/')" id="menu_1.3Title" class="menutitle">Resources</div>
View
4 docs/hdfs_design.html
@@ -83,7 +83,7 @@
<div class="menupagetitle">Hadoop File System</div>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.3', 'skin/')" id="menu_1.3Title" class="menutitle">Resources</div>
@@ -532,7 +532,7 @@ <h2 class="h3"> References </h2>
<div class="section">
<p>
-<a href="http://lucene.apache.org/hadoop/docs/api/">
+<a href="http://lucene.apache.org/hadoop/api/">
Browse the HDFS Java Interface
</a>
View
BIN  docs/hdfs_design.pdf
Binary file not shown
View
4 docs/index.html
@@ -81,7 +81,7 @@
<a href="hdfs_design.html">Hadoop File System</a>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.3', 'skin/')" id="menu_1.3Title" class="menutitle">Resources</div>
@@ -324,7 +324,7 @@ <h3 class="h4">6 February, 2006: nightly builds</h3>
<a href="http://cvs.apache.org/dist/lucene/hadoop/nightly/">downloadable version of Hadoop every
night</a>. All unit tests must pass, or a message is sent to
the developers mailing list and no new version is created. This
- also updates the <a href="docs/api/">javadoc</a>.</p>
+ also updates the <a href="api/index.html">javadoc</a>.</p>
<a name="N10172"></a><a name="3+February%2C+2006%3A+Hadoop+code+moved+out+of+Nutch"></a>
<h3 class="h4">3 February, 2006: Hadoop code moved out of Nutch</h3>
<p>The Hadoop code has now been moved into its own Subversion
View
BIN  docs/index.pdf
Binary file not shown
View
2  docs/issue_tracking.html
@@ -81,7 +81,7 @@
<a href="hdfs_design.html">Hadoop File System</a>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_selected_1.3', 'skin/')" id="menu_selected_1.3Title" class="menutitle" style="background-image: url('skin/images/chapter_open.gif');">Resources</div>
View
4 docs/linkmap.html
@@ -81,7 +81,7 @@
<a href="hdfs_design.html">Hadoop File System</a>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_1.3', 'skin/')" id="menu_1.3Title" class="menutitle">Resources</div>
@@ -196,7 +196,7 @@ <h2 class="h3">Table of Contents</h2>
<ul>
<li>
-<a href="docs/api/">API Docs</a>&nbsp;&nbsp;&nbsp;_________________________&nbsp;&nbsp;<em>apidocs</em>
+<a href="api/index.html">API Docs</a>&nbsp;&nbsp;&nbsp;_________________________&nbsp;&nbsp;<em>apidocs</em>
</li>
</ul>
View
2  docs/mailing_lists.html
@@ -81,7 +81,7 @@
<a href="hdfs_design.html">Hadoop File System</a>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_selected_1.3', 'skin/')" id="menu_selected_1.3Title" class="menutitle" style="background-image: url('skin/images/chapter_open.gif');">Resources</div>
View
2  docs/version_control.html
@@ -81,7 +81,7 @@
<a href="hdfs_design.html">Hadoop File System</a>
</div>
<div class="menuitem">
-<a href="docs/api/">API Docs</a>
+<a href="api/index.html">API Docs</a>
</div>
</div>
<div onclick="SwitchMenu('menu_selected_1.3', 'skin/')" id="menu_selected_1.3Title" class="menutitle" style="background-image: url('skin/images/chapter_open.gif');">Resources</div>
View
8 index.html
@@ -0,0 +1,8 @@
+<html>
+<head>
+ <title>Redirecting to docs...</title>
+ <meta http-equiv=refresh content="0; URL=docs/index.html">
+</head>
+<body>
+</body>
+</html>
View
12 src/contrib/streaming/src/java/org/apache/hadoop/streaming/UTF8ByteArrayUtils.java
@@ -32,7 +32,7 @@
public class UTF8ByteArrayUtils {
/**
* Find the first occured tab in a UTF-8 encoded string
- * @param utf: a byte array containing a UTF-8 encoded string
+ * @param utf a byte array containing a UTF-8 encoded string
* @return position that first tab occures otherwise -1
*/
public static int findTab(byte [] utf) {
@@ -47,11 +47,11 @@ public static int findTab(byte [] utf) {
/**
* split a UTF-8 byte array into key and value
* assuming that the delimilator is at splitpos.
- * @param ut: utf-8 encoded string
- * @param key: contains key upon the method is returned
- * @param val: contains value upon the method is returned
- * @param splitPos: the split pos
- * @throws IOException: when
+ * @param utf utf-8 encoded string
+ * @param key contains key upon the method is returned
+ * @param val contains value upon the method is returned
+ * @param splitPos the split pos
+ * @throws IOException
*/
public static void splitKeyVal(byte[] utf, Text key, Text val, int splitPos)
throws IOException {
View
2  src/docs/src/documentation/content/xdocs/hdfs_design.xml
@@ -334,7 +334,7 @@ A user can Undelete a file after deleting it as long as it remains in the /trash
<section>
<title> References </title>
<p>
- <a href="http://lucene.apache.org/hadoop/docs/api/">
+ <a href="http://lucene.apache.org/hadoop/api/">
Browse the HDFS Java Interface
</a>
</p>
View
2  src/docs/src/documentation/content/xdocs/index.xml
@@ -190,7 +190,7 @@
<a href="ext:nightly">downloadable version of Hadoop every
night</a>. All unit tests must pass, or a message is sent to
the developers mailing list and no new version is created. This
- also updates the <a href="docs/api/">javadoc</a>.</p>
+ also updates the <a href="api/index.html">javadoc</a>.</p>
</section>
<section>
View
2  src/docs/src/documentation/content/xdocs/site.xml
@@ -27,7 +27,7 @@ See http://forrest.apache.org/docs/linking.html for more info.
<faq label="FAQ" href="ext:faq" />
<wiki label="Wiki" href="ext:wiki" />
<HDFS label="Hadoop File System" href="hdfs_design.html" />
- <apidocs label="API Docs" href="docs/api/" />
+ <apidocs label="API Docs" href="api/index.html" />
</docs>
<resources label="Resources">
View
7 src/examples/org/apache/hadoop/examples/ExampleDriver.java
@@ -34,10 +34,9 @@ public static void main(String argv[]){
pgd.addClass("grep", Grep.class,
"A map/reduce program that counts the matches of a regex in the input.");
pgd.addClass("randomwriter", RandomWriter.class,
- "A random writer benchmark that writes 10GB per node.");
- pgd.addClass("sort", Sort.class, "A sort benchmark that sorts the data written by the random writer.");
- pgd.addClass("pi", PiBenchmark.class, "A benchmark that estimates Pi using monte-carlo method.");
- pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode");
+ "A map/reduce program that writes 10GB of random data per node.");
+ pgd.addClass("sort", Sort.class, "A map/reduce program that sorts the data written by the random writer.");
+ pgd.addClass("pi", PiEstimator.class, "A map/reduce program that estimates Pi using monte-carlo method.");
pgd.driver(argv);
}
catch(Throwable e){
View
4 ...mples/org/apache/hadoop/examples/PiBenchmark.java → ...mples/org/apache/hadoop/examples/PiEstimator.java
@@ -37,7 +37,7 @@
*
* @author Milind Bhandarkar
*/
-public class PiBenchmark {
+public class PiEstimator {
/**
* Mappper class for Pi estimation.
@@ -144,7 +144,7 @@ static double launch(int numMaps, long numPoints, String jt, String dfs)
throws IOException {
Configuration conf = new Configuration();
- JobConf jobConf = new JobConf(conf, PiBenchmark.class);
+ JobConf jobConf = new JobConf(conf, PiEstimator.class);
if (jt != null) { jobConf.set("mapred.job.tracker", jt); }
if (dfs != null) { jobConf.set("fs.default.name", dfs); }
jobConf.setJobName("test-mini-mr");
View
2  src/examples/org/apache/hadoop/examples/NNBench.java → src/test/org/apache/hadoop/dfs/NNBench.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.examples;
+package org.apache.hadoop.dfs;
import java.io.IOException;
import java.util.Date;
View
2  src/test/org/apache/hadoop/test/AllTestDriver.java
@@ -23,6 +23,7 @@
import org.apache.hadoop.mapred.TestTextInputFormat;
import org.apache.hadoop.mapred.TestSequenceFileInputFormat;
import org.apache.hadoop.dfs.ClusterTestDFS;
+import org.apache.hadoop.dfs.NNBench;
import org.apache.hadoop.fs.TestFileSystem;
import org.apache.hadoop.io.TestArrayFile;
import org.apache.hadoop.io.TestSetFile;
@@ -43,6 +44,7 @@
public static void main(String argv[]){
ProgramDriver pgd = new ProgramDriver();
try {
+ pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode.");
pgd.addClass("mapredtest", TestMapRed.class, "A map/reduce test check.");
pgd.addClass("clustertestdfs", ClusterTestDFS.class, "A pseudo distributed test for DFS.");
pgd.addClass("testfilesystem", TestFileSystem.class, "A test for FileSystem read/write.");
Please sign in to comment.
Something went wrong with that request. Please try again.