Permalink
Browse files

HIVE-82 Support for building tarballs and javadocs

(Ashish Thusoo via rmurthy)



git-svn-id: https://svn.apache.org/repos/asf/hadoop/hive/trunk@763877 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
1 parent bc5ae00 commit 5ba213e1719cea616fa130576d9c1f35b14106cc Raghotham Murthy committed Apr 10, 2009
Showing with 815 additions and 148 deletions.
  1. +3 −0 CHANGES.txt
  2. +0 −4 ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
  3. +14 −19 build-common.xml
  4. +14 −0 build.properties
  5. +175 −3 build.xml
  6. +170 −0 docs/changes/ChangesFancyStyle.css
  7. +49 −0 docs/changes/ChangesSimpleStyle.css
  8. +282 −0 docs/changes/changes2html.pl
  9. BIN docs/images/hive-logo.jpg
  10. +1 −1 hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
  11. +4 −4 hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java
  12. +1 −1 metastore/src/java/org/apache/hadoop/hive/metastore/FileStore.java
  13. +24 −27 metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
  14. +14 −16 metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
  15. +1 −1 metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java
  16. +11 −11 metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreServer.java
  17. +7 −9 metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
  18. +1 −1 metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
  19. +0 −1 metastore/src/java/org/apache/hadoop/hive/metastore/ROTable.java
  20. +1 −3 metastore/src/java/org/apache/hadoop/hive/metastore/RWTable.java
  21. +1 −1 metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
  22. +1 −1 metastore/src/model/org/apache/hadoop/hive/metastore/model/MFieldSchema.java
  23. +2 −2 ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
  24. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
  25. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java
  26. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java
  27. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/io/FlatFileInputFormat.java
  28. +2 −3 ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
  29. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java
  30. +0 −1 ql/src/java/org/apache/hadoop/hive/ql/lib/PreOrderWalker.java
  31. +7 −9 ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
  32. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
  33. +5 −5 ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
  34. +1 −3 ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
  35. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java
  36. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
  37. +2 −2 ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
  38. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
  39. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
  40. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
  41. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
  42. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
  43. +2 −1 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
  44. +2 −1 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
  45. +1 −1 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java
  46. +1 −1 serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
  47. +1 −1 serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyInteger.java
  48. +1 −1 serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyLong.java
  49. +1 −1 serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java
View
3 CHANGES.txt
@@ -49,6 +49,9 @@ Release 0.3.0 - Unreleased
HIVE-299. Include php packages for thrift service.
(Raghotham Murthy via zshao)
+ HIVE-82 Support for building tarballs and javadocs
+ (Ashish Thusoo via rmurthy)
+
IMPROVEMENTS
HIVE-132. Show table and describe results to be read via FetchTask.
View
4 ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
@@ -156,10 +156,6 @@ public String getQueryFile() {
return this.queryFile;
}
- /**
- * Invoke {@link org.apache.hadoop.fs.FsShell#doMain FsShell.doMain} after a
- * few cursory checks of the configuration.
- */
public void execute() throws BuildException {
if (templatePath == null) {
View
33 build-common.xml
@@ -20,25 +20,21 @@
<project xmlns:ivy="antlib:org.apache.ivy.ant" name="hivecommon" default="jar">
- <property name="name" value="${ant.project.name}"/>
-
<property name="hive.root" location="${basedir}/.."/>
+ <property file="${hive.root}/build.properties"/>
+ <property file="${user.home}/build.properties" />
+ <property file="${basedir}/build.properties" />
+
<property name="hive.conf.dir" value="${hive.root}/conf"/>
<property name="dist.dir" location="${hive.root}"/>
- <property name="src.dir.hive" location="${hive.root}"/>
<property name="build.dir.hive" location="${hive.root}/build"/>
<property name="build.dir.hadoop" location="${build.dir.hive}/hadoopcore"/>
- <property name="build.dir" location="${build.dir.hive}/${name}"/>
+ <property name="build.dir" location="${build.dir.hive}/${ant.project.name}"/>
<property name="build.classes" location="${build.dir}/classes"/>
<property name="build.encoding" value="ISO-8859-1"/>
<property name="deploy.dir" location="${build.dir.hive}"/>
- <property name="hadoop.mirror" value="http://archive.apache.org/dist"/>
- <property name="hadoop.version" value="0.19.0"/>
- <property name="hadoop.root.default" location="${build.dir.hadoop}/hadoop-${hadoop.version}"/>
- <property name="hadoop.root" value="${hadoop.root.default}"/>
- <property name="hadoop.jar" location="${hadoop.root}/hadoop-${hadoop.version}-core.jar"/>
<property name="hadoop.conf.dir" location="${hadoop.root}/conf"/>
<property name="javac.debug" value="on"/>
@@ -50,13 +46,12 @@
<!-- configuration needed for tests -->
<property name="test.src.dir" value="${basedir}/src/test"/>
- <property name="test.src.data.dir" value="${src.dir.hive}/data"/>
+ <property name="test.src.data.dir" value="${hive.root}/data"/>
<property name="test.build.dir" value="${build.dir}/test"/>
<property name="test.log.dir" value="${test.build.dir}/logs"/>
<property name="test.data.dir" value="${test.build.dir}/data"/>
<property name="test.build.src" value="${test.build.dir}/src"/>
<property name="test.build.classes" value="${test.build.dir}/classes"/>
- <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
<property name="test.include" value="Test*"/>
<property name="test.classpath.id" value="test.classpath"/>
<property name="test.output" value="true"/>
@@ -107,9 +102,9 @@
<!-- I am not sure whether we need this target any more since that package does what is needed -->
<target name="deploy" depends="jar">
- <echo message="hive: ${name}"/>
+ <echo message="hive: ${ant.project.name}"/>
<mkdir dir="${deploy.dir}"/>
- <copy file="${build.dir}/hive_${name}.jar"
+ <copy file="${build.dir}/hive_${ant.project.name}.jar"
todir="${deploy.dir}"/>
</target>
@@ -118,8 +113,8 @@
<pathelement location="${hadoop.jar}"/>
<pathelement location="${build.dir.hive}/classes"/>
<fileset dir="${build.dir.hive}" includes="hive_*.jar"/>
- <fileset dir="${src.dir.hive}/lib" includes="*.jar"/>
- <fileset dir="${src.dir.hive}/ql/lib" includes="*.jar"/>
+ <fileset dir="${hive.root}/lib" includes="*.jar"/>
+ <fileset dir="${hive.root}/ql/lib" includes="*.jar"/>
</path>
<path id="classpath">
@@ -158,7 +153,7 @@
</target>
<target name="compile" depends="init, install-hadoopcore">
- <echo message="Compiling: ${name}"/>
+ <echo message="Compiling: ${ant.project.name}"/>
<javac
encoding="${build.encoding}"
srcdir="${src.dir}"
@@ -172,9 +167,9 @@
</target>
<target name="jar" depends="compile">
- <echo message="Jar: ${name}"/>
+ <echo message="Jar: ${ant.project.name}"/>
<jar
- jarfile="${build.dir}/hive_${name}.jar"
+ jarfile="${build.dir}/hive_${ant.project.name}.jar"
basedir="${build.classes}"
/>
</target>
@@ -293,7 +288,7 @@
</target>
<target name="clean">
- <echo message="Cleaning: ${name}"/>
+ <echo message="Cleaning: ${ant.project.name}"/>
<delete dir="${build.dir}"/>
</target>
View
14 build.properties
@@ -0,0 +1,14 @@
+Name=Hive
+name=hive
+version=0.4.0
+year=2009
+
+hadoop.version=0.19.0
+hadoop.mirror=http://archive.apache.org/dist
+
+build.dir.hive=${hive.root}/build
+build.dir.hadoop=${build.dir.hive}/hadoopcore
+
+hadoop.root.default=${build.dir.hadoop}/hadoop-${hadoop.version}
+hadoop.root=${hadoop.root.default}
+hadoop.jar=${hadoop.root}/hadoop-${hadoop.version}-core.jar
View
178 build.xml
@@ -21,7 +21,10 @@
<project name="hive" default="deploy" >
<property name="hive.root" location="${basedir}"/>
- <property name="build.dir.hive" location="${hive.root}/build"/>
+ <property file="${hive.root}/build.properties"/>
+ <property file="${user.home}/build.properties" />
+ <property file="${basedir}/build.properties" />
+
<property name="target.dir" location="${build.dir.hive}/dist"/>
<property name="target.lib.dir" location="${target.dir}/lib"/>
<property name="target.conf.dir" location="${target.dir}/conf"/>
@@ -30,6 +33,52 @@
<property name="ql.test.query.dir" location="${basedir}/ql/src/test/queries"/>
<property name="test.data.dir" location="${basedir}/data"/>
<property name="test.build.dir" value="${build.dir.hive}/test"/>
+ <property name="build.docs" value="${target.dir}/docs"/>
+ <property name="build.javadoc" value="${build.docs}/api"/>
+ <property name="docs.src" value="${hive.root}/docs"/>
+ <property name="changes.src" value="${docs.src}/changes"/>
+ <property name="images.src" value="${docs.src}/images"/>
+ <property name="javadoc.link.java"
+ value="http://java.sun.com/javase/6/docs/api/"/>
+ <property name="final.name" value="${name}-${version}-hadoop-${hadoop.version}"/>
+ <property name="dev.final.name" value="${final.name}-dev"/>
+ <property name="bin.final.name" value="${final.name}-bin"/>
+
+ <!-- ====================================================== -->
+ <!-- Macro definitions -->
+ <!-- ====================================================== -->
+ <macrodef name="macro_tar" description="Worker Macro for tar">
+ <attribute name="param.destfile"/>
+ <element name="param.listofitems"/>
+ <sequential>
+ <tar compression="gzip" longfile="gnu"
+ destfile="@{param.destfile}">
+ <param.listofitems/>
+ </tar>
+ </sequential>
+ </macrodef>
+
+ <!-- the normal classpath -->
+ <path id="common-classpath">
+ <pathelement location="${hadoop.jar}"/>
+ <pathelement location="${build.dir.hive}/classes"/>
+ <fileset dir="${hive.root}" includes="hive_*.jar"/>
+ <fileset dir="${hive.root}/lib" includes="*.jar"/>
+ <fileset dir="${hive.root}/ql/lib" includes="*.jar"/>
+ </path>
+
+ <path id="classpath">
+ <pathelement location="${build.dir.hive}/common/classes"/>
+ <pathelement location="${build.dir.hive}/serde/classes"/>
+ <pathelement location="${build.dir.hive}/metastore/classes"/>
+ <pathelement location="${build.dir.hive}/ql/classes"/>
+ <pathelement location="${build.dir.hive}/cli/classes"/>
+ <fileset dir="${hive.root}/data" includes="files/*.jar"/>
+ <fileset dir="${hive.root}/ql" includes="lib/*.jar"/>
+ <fileset dir="${hive.root}/cli" includes="lib/*.jar"/>
+ <fileset dir="${hive.root}/service" includes="lib/*.jar"/>
+ <path refid="common-classpath"/>
+ </path>
<!-- ====================================================== -->
<!-- Initialize for running junit tests -->
@@ -111,7 +160,7 @@
</target>
<!-- ====================================================== -->
- <!-- Test all the contribs. -->
+ <!-- Test everything. -->
<!-- ====================================================== -->
<target name="test" depends="clean-test,deploy">
<subant target="test">
@@ -137,7 +186,7 @@
</target>
<!-- ====================================================== -->
- <!-- Clean all the contribs. -->
+ <!-- Clean everything. -->
<!-- ====================================================== -->
<target name="clean">
<subant target="clean">
@@ -212,6 +261,9 @@
</chmod>
</target>
+ <!-- ====================================================== -->
+ <!-- Generate files for eclipse. -->
+ <!-- ====================================================== -->
<target name="eclipse-files" depends="init"
description="Generate files for Eclipse">
@@ -252,4 +304,124 @@
</delete>
</target>
+ <!-- ================================================================== -->
+ <!-- Documentation -->
+ <!-- ================================================================== -->
+
+ <target name="docs">
+ <antcall target="changes-to-html"/>
+ </target>
+
+ <target name="changes-to-html" description="Convert CHANGES.txt into an html file">
+ <mkdir dir="${build.docs}"/>
+ <exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true">
+ <arg value="${changes.src}/changes2html.pl"/>
+ </exec>
+ <copy todir="${build.docs}">
+ <fileset dir="${changes.src}" includes="*.css"/>
+ </copy>
+ <copy todir="${build.docs}/images">
+ <fileset dir="${images.src}" includes="*.jpg"/>
+ </copy>
+ </target>
+
+ <target name="javadoc" depends="package" description="Generate javadoc">
+
+ <mkdir dir="${build.javadoc}"/>
+ <javadoc
+ packagenames="org.apache.hadoop.hive.*"
+ destdir="${build.javadoc}"
+ author="true"
+ version="true"
+ use="true"
+ windowtitle="${Name} ${version} API"
+ doctitle="${Name} ${version} API"
+ bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
+ >
+ <packageset dir="ant/src"/>
+ <packageset dir="hwi/src/java"/>
+ <packageset dir="hwi/src/test"/>
+ <packageset dir="common/src/java"/>
+ <packageset dir="service/src/java"/>
+ <packageset dir="service/src/test"/>
+ <packageset dir="service/src/gen-javabean"/>
+ <packageset dir="serde/src/java"/>
+ <packageset dir="serde/src/test"/>
+ <packageset dir="serde/src/gen-java"/>
+ <packageset dir="jdbc/src/java"/>
+ <packageset dir="jdbc/src/test"/>
+ <packageset dir="metastore/src/java"/>
+ <packageset dir="metastore/src/test"/>
+ <packageset dir="metastore/src/gen-javabean"/>
+ <packageset dir="metastore/src/model"/>
+ <packageset dir="cli/src/java"/>
+ <packageset dir="${build.dir.hive}/ql/java"/>
+ <packageset dir="${build.dir.hive}/ql/gen-java"/>
+
+ <link href="${javadoc.link.java}"/>
+
+ <classpath >
+ <fileset dir="${hadoop.root}/lib">
+ <include name="**/*.jar" />
+ <exclude name="**/excluded/" />
+ </fileset>
+ <path refid="classpath" />
+ <pathelement path="${java.class.path}"/>
+ </classpath>
+
+ <group title="Hive" packages="org.apache.*"/>
+ </javadoc>
+
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Make release tarball -->
+ <!-- ================================================================== -->
+ <target name="tar" depends="package, docs, javadoc" description="Make release tarball">
+ <macro_tar param.destfile="${build.dir.hive}/${dev.final.name}.tar.gz">
+ <param.listofitems>
+ <tarfileset dir="${build.dir.hive}/dist" mode="755" prefix="${dev.final.name}">
+ <include name="bin/**"/>
+ </tarfileset>
+ <tarfileset dir="${build.dir.hive}/dist" mode="755" prefix="${dev.final.name}">
+ <include name="lib/py/**/*-remote"/>
+ </tarfileset>
+ <tarfileset dir="${build.dir.hive}/dist" mode="664" prefix="${dev.final.name}">
+ <include name="**"/>
+ <exclude name="bin/**"/>
+ <exclude name="lib/py/**/*-remote"/>
+ </tarfileset>
+ <tarfileset dir="${hive.root}" mode="664" prefix="${dev.final.name}/src">
+ <exclude name="build/**" />
+ <exclude name="bin/**" />
+ <exclude name="**/py/**/*-remote" />
+ </tarfileset>
+ <tarfileset dir="${hive.root}" mode="755" prefix="${dev.final.name}/src">
+ <exclude name="build/**" />
+ <include name="bin/**" />
+ <include name="**/py/**/*-remote" />
+ </tarfileset>
+ </param.listofitems>
+ </macro_tar>
+ </target>
+
+ <target name="binary" depends="package, docs, javadoc" description="Make tarball without source and documentation">
+ <macro_tar param.destfile="${build.dir.hive}/${bin.final.name}.tar.gz">
+ <param.listofitems>
+ <tarfileset dir="${build.dir.hive}/dist" mode="755" prefix="${bin.final.name}">
+ <include name="bin/**"/>
+ </tarfileset>
+ <tarfileset dir="${build.dir.hive}/dist" mode="755" prefix="${bin.final.name}">
+ <include name="lib/py/**/*-remote"/>
+ </tarfileset>
+ <tarfileset dir="${build.dir.hive}/dist" mode="664" prefix="${bin.final.name}">
+ <include name="**"/>
+ <exclude name="bin/**"/>
+ <exclude name="docs/**"/>
+ <exclude name="lib/py/**/*-remote"/>
+ </tarfileset>
+ </param.listofitems>
+ </macro_tar>
+ </target>
+
</project>
View
170 docs/changes/ChangesFancyStyle.css
@@ -0,0 +1,170 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+/**
+ * General
+ */
+
+img { border: 0; }
+
+#content table {
+ border: 0;
+ width: 100%;
+}
+/*Hack to get IE to render the table at 100%*/
+* html #content table { margin-left: -3px; }
+
+#content th,
+#content td {
+ margin: 0;
+ padding: 0;
+ vertical-align: top;
+}
+
+.clearboth {
+ clear: both;
+}
+
+.note, .warning, .fixme {
+ border: solid black 1px;
+ margin: 1em 3em;
+}
+
+.note .label {
+ background: #369;
+ color: white;
+ font-weight: bold;
+ padding: 5px 10px;
+}
+.note .content {
+ background: #F0F0FF;
+ color: black;
+ line-height: 120%;
+ font-size: 90%;
+ padding: 5px 10px;
+}
+.warning .label {
+ background: #C00;
+ color: white;
+ font-weight: bold;
+ padding: 5px 10px;
+}
+.warning .content {
+ background: #FFF0F0;
+ color: black;
+ line-height: 120%;
+ font-size: 90%;
+ padding: 5px 10px;
+}
+.fixme .label {
+ background: #C6C600;
+ color: black;
+ font-weight: bold;
+ padding: 5px 10px;
+}
+.fixme .content {
+ padding: 5px 10px;
+}
+
+/**
+ * Typography
+ */
+
+body {
+ font-family: verdana, "Trebuchet MS", arial, helvetica, sans-serif;
+ font-size: 100%;
+}
+
+#content {
+ font-family: Georgia, Palatino, Times, serif;
+ font-size: 95%;
+}
+#tabs {
+ font-size: 70%;
+}
+#menu {
+ font-size: 80%;
+}
+#footer {
+ font-size: 70%;
+}
+
+h1, h2, h3, h4, h5, h6 {
+ font-family: "Trebuchet MS", verdana, arial, helvetica, sans-serif;
+ font-weight: bold;
+ margin-top: 1em;
+ margin-bottom: .5em;
+}
+
+h1 {
+ margin-top: 0;
+ margin-bottom: 1em;
+ font-size: 1.4em;
+ background-color: 73CAFF
+}
+#content h1 {
+ font-size: 160%;
+ margin-bottom: .5em;
+}
+#menu h1 {
+ margin: 0;
+ padding: 10px;
+ background: #336699;
+ color: white;
+}
+h2 {
+ font-size: 120%;
+ background-color: 73CAFF
+}
+h3 { font-size: 100%; }
+h4 { font-size: 90%; }
+h5 { font-size: 80%; }
+h6 { font-size: 75%; }
+
+p {
+ line-height: 120%;
+ text-align: left;
+ margin-top: .5em;
+ margin-bottom: 1em;
+}
+
+#content li,
+#content th,
+#content td,
+#content li ul,
+#content li ol{
+ margin-top: .5em;
+ margin-bottom: .5em;
+}
+
+
+#content li li,
+#minitoc-area li{
+ margin-top: 0em;
+ margin-bottom: 0em;
+}
+
+#content .attribution {
+ text-align: right;
+ font-style: italic;
+ font-size: 85%;
+ margin-top: 1em;
+}
+
+.codefrag {
+ font-family: "Courier New", Courier, monospace;
+ font-size: 110%;
+}
View
49 docs/changes/ChangesSimpleStyle.css
@@ -0,0 +1,49 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+body {
+ font-family: Courier New, monospace;
+ font-size: 10pt;
+}
+
+h1 {
+ font-family: Courier New, monospace;
+ font-size: 10pt;
+}
+
+h2 {
+ font-family: Courier New, monospace;
+ font-size: 10pt;
+}
+
+h3 {
+ font-family: Courier New, monospace;
+ font-size: 10pt;
+}
+
+a:link {
+ color: blue;
+}
+
+a:visited {
+ color: purple;
+}
+
+li {
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
View
282 docs/changes/changes2html.pl
@@ -0,0 +1,282 @@
+#!/usr/bin/perl
+#
+# Transforms Lucene Java's CHANGES.txt into Changes.html
+#
+# Input is on STDIN, output is to STDOUT
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+use strict;
+use warnings;
+
+my $jira_url_prefix = 'http://issues.apache.org/jira/browse/';
+my $title = undef;
+my $release = undef;
+my $sections = undef;
+my $items = undef;
+my $first_relid = undef;
+my $second_relid = undef;
+my @releases = ();
+
+my @lines = <>; # Get all input at once
+
+#
+# Parse input and build hierarchical release structure in @releases
+#
+for (my $line_num = 0 ; $line_num <= $#lines ; ++$line_num) {
+ $_ = $lines[$line_num];
+ next unless (/\S/); # Skip blank lines
+
+ unless ($title) {
+ if (/\S/) {
+ s/^\s+//; # Trim leading whitespace
+ s/\s+$//; # Trim trailing whitespace
+ }
+ $title = $_;
+ next;
+ }
+
+ if (/^(Release)|(Trunk)/) { # Release headings
+ $release = $_;
+ $sections = [];
+ push @releases, [ $release, $sections ];
+ ($first_relid = lc($release)) =~ s/\s+/_/g if ($#releases == 0);
+ ($second_relid = lc($release)) =~ s/\s+/_/g if ($#releases == 1);
+ $items = undef;
+ next;
+ }
+
+ # Section heading: 2 leading spaces, words all capitalized
+ if (/^ ([A-Z]+)\s*/) {
+ my $heading = $_;
+ $items = [];
+ push @$sections, [ $heading, $items ];
+ next;
+ }
+
+ # Handle earlier releases without sections - create a headless section
+ unless ($items) {
+ $items = [];
+ push @$sections, [ undef, $items ];
+ }
+
+ my $type;
+ if (@$items) { # A list item has been encountered in this section before
+ $type = $items->[0]; # 0th position of items array is list type
+ } else {
+ $type = get_list_type($_);
+ push @$items, $type;
+ }
+
+ if ($type eq 'numbered') { # The modern items list style
+ # List item boundary is another numbered item or an unindented line
+ my $line;
+ my $item = $_;
+ $item =~ s/^(\s{0,2}\d+\.\s*)//; # Trim the leading item number
+ my $leading_ws_width = length($1);
+ $item =~ s/\s+$//; # Trim trailing whitespace
+ $item .= "\n";
+
+ while ($line_num < $#lines
+ and ($line = $lines[++$line_num]) !~ /^(?:\s{0,2}\d+\.\s*\S|\S)/) {
+ $line =~ s/^\s{$leading_ws_width}//; # Trim leading whitespace
+ $line =~ s/\s+$//; # Trim trailing whitespace
+ $item .= "$line\n";
+ }
+ $item =~ s/\n+\Z/\n/; # Trim trailing blank lines
+ push @$items, $item;
+ --$line_num unless ($line_num == $#lines);
+ } elsif ($type eq 'paragraph') { # List item boundary is a blank line
+ my $line;
+ my $item = $_;
+ $item =~ s/^(\s+)//;
+ my $leading_ws_width = defined($1) ? length($1) : 0;
+ $item =~ s/\s+$//; # Trim trailing whitespace
+ $item .= "\n";
+
+ while ($line_num < $#lines and ($line = $lines[++$line_num]) =~ /\S/) {
+ $line =~ s/^\s{$leading_ws_width}//; # Trim leading whitespace
+ $line =~ s/\s+$//; # Trim trailing whitespace
+ $item .= "$line\n";
+ }
+ push @$items, $item;
+ --$line_num unless ($line_num == $#lines);
+ } else { # $type is one of the bulleted types
+ # List item boundary is another bullet or a blank line
+ my $line;
+ my $item = $_;
+ $item =~ s/^(\s*$type\s*)//; # Trim the leading bullet
+ my $leading_ws_width = length($1);
+ $item =~ s/\s+$//; # Trim trailing whitespace
+ $item .= "\n";
+
+ while ($line_num < $#lines
+ and ($line = $lines[++$line_num]) !~ /^\s*(?:$type|\Z)/) {
+ $line =~ s/^\s{$leading_ws_width}//; # Trim leading whitespace
+ $line =~ s/\s+$//; # Trim trailing whitespace
+ $item .= "$line\n";
+ }
+ push @$items, $item;
+ --$line_num unless ($line_num == $#lines);
+ }
+}
+
+#
+# Print HTML-ified version to STDOUT
+#
+print<<"__HTML_HEADER__";
+<!--
+**********************************************************
+** WARNING: This file is generated from CHANGES.txt by the
+** Perl script 'changes2html.pl'.
+** Do *not* edit this file!
+**********************************************************
+
+****************************************************************************
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+****************************************************************************
+-->
+<html>
+<head>
+ <title>$title</title>
+ <link rel="stylesheet" href="ChangesFancyStyle.css" title="Fancy">
+ <link rel="alternate stylesheet" href="ChangesSimpleStyle.css" title="Simple">
+ <META http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
+ <SCRIPT>
+ function toggleList(e) {
+ element = document.getElementById(e).style;
+ element.display == 'none' ? element.display = 'block' : element.display='none';
+ }
+ function collapse() {
+ for (var i = 0; i < document.getElementsByTagName("ul").length; i++) {
+ var list = document.getElementsByTagName("ul")[i];
+ if (list.id != '$first_relid' && list.id != '$second_relid') {
+ list.style.display = "none";
+ }
+ }
+ for (var i = 0; i < document.getElementsByTagName("ol").length; i++) {
+ document.getElementsByTagName("ol")[i].style.display = "none";
+ }
+ }
+ window.onload = collapse;
+ </SCRIPT>
+</head>
+<body>
+
+<a href="http://hadoop.apache.org/hive/"><img class="logoImage" alt="Hive" src="images/hive-logo.jpg" title="SQL and Data Warehousing Platform on Hadoop"></a>
+<h1>$title</h1>
+
+__HTML_HEADER__
+
+my $heading;
+my $relcnt = 0;
+my $header = 'h2';
+for my $rel (@releases) {
+ if (++$relcnt == 3) {
+ $header = 'h3';
+ print "<h2><a href=\"javascript:toggleList('older')\">";
+ print "Older Releases";
+ print "</a></h2>\n";
+ print "<ul id=\"older\">\n"
+ }
+
+ ($release, $sections) = @$rel;
+
+ # The first section heading is undefined for the older sectionless releases
+ my $has_release_sections = $sections->[0][0];
+
+ (my $relid = lc($release)) =~ s/\s+/_/g;
+ print "<$header><a href=\"javascript:toggleList('$relid')\">";
+ print "$release";
+ print "</a></$header>\n";
+ print "<ul id=\"$relid\">\n"
+ if ($has_release_sections);
+
+ for my $section (@$sections) {
+ ($heading, $items) = @$section;
+ (my $sectid = lc($heading)) =~ s/\s+/_/g;
+ my $numItemsStr = $#{$items} > 0 ? "($#{$items})" : "(none)";
+
+ print " <li><a href=\"javascript:toggleList('$relid.$sectid')\">",
+ ($heading || ''), "</a>&nbsp;&nbsp;&nbsp;$numItemsStr\n"
+ if ($has_release_sections);
+
+ my $list_type = $items->[0] || '';
+ my $list = ($has_release_sections || $list_type eq 'numbered' ? 'ol' : 'ul');
+ my $listid = $sectid ? "$relid.$sectid" : $relid;
+ print " <$list id=\"$listid\">\n";
+
+ for my $itemnum (1..$#{$items}) {
+ my $item = $items->[$itemnum];
+ $item =~ s:&:&amp;:g; # Escape HTML metachars
+ $item =~ s:<:&lt;:g;
+ $item =~ s:>:&gt;:g;
+
+ $item =~ s:\s*(\([^)"]+?\))\s*$:<br />$1:; # Separate attribution
+ $item =~ s:\n{2,}:\n<p/>\n:g; # Keep paragraph breaks
+ $item =~ s{(?:${jira_url_prefix})?(HADOOP-\d+)} # Link to JIRA
+ {<a href="${jira_url_prefix}$1">$1</a>}g;
+ print " <li>$item</li>\n";
+ }
+ print " </$list>\n";
+ print " </li>\n" if ($has_release_sections);
+ }
+ print "</ul>\n" if ($has_release_sections);
+}
+print "</ul>\n" if ($relcnt > 3);
+print "</body>\n</html>\n";
+
+
+#
+# Subroutine: get_list_type
+#
+# Takes one parameter:
+#
+# - The first line of a sub-section/point
+#
+# Returns one scalar:
+#
+# - The list type: 'numbered'; or one of the bulleted types '-', or '.' or
+# 'paragraph'.
+#
+sub get_list_type {
+ my $first_list_item_line = shift;
+ my $type = 'paragraph'; # Default to paragraph type
+
+ if ($first_list_item_line =~ /^\s{0,2}\d+\.\s+\S+/) {
+ $type = 'numbered';
+ } elsif ($first_list_item_line =~ /^\s*([-.])\s+\S+/) {
+ $type = $1;
+ }
+ return $type;
+}
+
+1;
View
BIN docs/images/hive-logo.jpg
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
View
2 hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
@@ -196,7 +196,7 @@ protected void killIt() {
*
* @param wanted
* a ConfVar
- * @return
+ * @return Value of the configuration variable.
*/
public String getHiveConfVar(HiveConf.ConfVars wanted) throws HWIException {
String result = null;
View
8 hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java
@@ -119,10 +119,10 @@ protected void setItems(TreeMap<HWIAuth, Set<HWISessionItem>> items) {
* because we need to set parameters the client is not aware of. One such
* parameter is the command line arguments the server was started with.
*
- * @param sessionname
+ * @param a
+ * Authenticated user
+ * @param sessionName
* Represents the session name
- * @param sessionpass
- * the session password, can be ""
* @return a new SessionItem or null if a session with that name already
* exists
*/
@@ -201,4 +201,4 @@ public HWISessionItem findSessionItemByName(HWIAuth auth, String sessionname) {
return this.items.get(auth);
}
-}
+}
View
2 metastore/src/java/org/apache/hadoop/hive/metastore/FileStore.java
@@ -178,7 +178,7 @@ public void drop(DB db, String tableName) throws IOException {
*
* Looks at metastore directories
*
- * @param db
+ * @param parent
* @param tablePattern
* @return the list of tables
* @exception MetaException
View
51 metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -198,7 +198,7 @@ public void dropTable(String tableName, boolean deleteData) throws MetaException
/**
* @param new_part
- * @return
+ * @return the added partition
* @throws InvalidObjectException
* @throws AlreadyExistsException
* @throws MetaException
@@ -214,7 +214,7 @@ public Partition add_partition(Partition new_part) throws InvalidObjectException
* @param table_name
* @param db_name
* @param part_vals
- * @return
+ * @return the appended partition
* @throws InvalidObjectException
* @throws AlreadyExistsException
* @throws MetaException
@@ -229,7 +229,7 @@ public Partition appendPartition(String db_name, String table_name, List<String>
/**
* @param name
* @param location_uri
- * @return
+ * @return true or false
* @throws AlreadyExistsException
* @throws MetaException
* @throws TException
@@ -254,7 +254,7 @@ public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectE
/**
* @param type
- * @return
+ * @return true or false
* @throws AlreadyExistsException
* @throws InvalidObjectException
* @throws MetaException
@@ -268,7 +268,7 @@ public boolean createType(Type type) throws AlreadyExistsException, InvalidObjec
/**
* @param name
- * @return
+ * @return true or false
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_database(java.lang.String)
@@ -281,11 +281,11 @@ public boolean dropDatabase(String name) throws MetaException, TException {
* @param tbl_name
* @param db_name
* @param part_vals
- * @return
+ * @return true or false
* @throws NoSuchObjectException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, java.lang.String, java.util.List)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, java.lang.String, java.util.List, boolean)
*/
public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals)
throws NoSuchObjectException, MetaException, TException {
@@ -297,11 +297,11 @@ public boolean dropPartition(String db_name, String tbl_name, List<String> part_
* @param tbl_name
* @param part_vals
* @param deleteData delete the underlying data or just delete the table in metadata
- * @return
+ * @return true or false
* @throws NoSuchObjectException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, java.lang.String, java.util.List)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, java.lang.String, java.util.List, boolean)
*/
public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData)
throws NoSuchObjectException, MetaException, TException {
@@ -311,12 +311,11 @@ public boolean dropPartition(String db_name, String tbl_name, List<String> part_
/**
* @param name
* @param dbname
- * @return
* @throws NoSuchObjectException
* @throws ExistingDependentsException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String, java.lang.String)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String, java.lang.String, boolean)
*/
public void dropTable(String dbname, String name) throws NoSuchObjectException,
ExistingDependentsException, MetaException, TException {
@@ -327,12 +326,11 @@ public void dropTable(String dbname, String name) throws NoSuchObjectException,
* @param dbname
* @param name
* @param deleteData delete the underlying data or just delete the table in metadata
- * @return
* @throws NoSuchObjectException
* @throws ExistingDependentsException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String, java.lang.String)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String, java.lang.String, boolean)
*/
public void dropTable(String dbname, String name, boolean deleteData, boolean ignoreUknownTab) throws
ExistingDependentsException, MetaException, TException, NoSuchObjectException {
@@ -347,7 +345,7 @@ public void dropTable(String dbname, String name, boolean deleteData, boolean ig
/**
* @param type
- * @return
+ * @return true if the type is dropped
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_type(java.lang.String)
@@ -358,7 +356,7 @@ public boolean dropType(String type) throws MetaException, TException {
/**
* @param name
- * @return
+ * @return map of types
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_type_all(java.lang.String)
@@ -368,10 +366,10 @@ public boolean dropType(String type) throws MetaException, TException {
}
/**
- * @return
+ * @return the list of databases
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#list_databases()
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_databases()
*/
public List<String> getDatabases() throws MetaException, TException {
return client.get_databases();
@@ -381,11 +379,10 @@ public boolean dropType(String type) throws MetaException, TException {
* @param tbl_name
* @param db_name
* @param max_parts
- * @return
+ * @return list of partitions
* @throws NoSuchObjectException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#list_partitions(java.lang.String, java.lang.String, short)
*/
public List<Partition> listPartitions(String db_name, String tbl_name, short max_parts)
throws NoSuchObjectException, MetaException, TException {
@@ -394,11 +391,11 @@ public boolean dropType(String type) throws MetaException, TException {
/**
* @param name
- * @return
+ * @return the database
* @throws NoSuchObjectException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#lookup_database(java.lang.String)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_database(java.lang.String)
*/
public Database getDatabase(String name) throws NoSuchObjectException, MetaException,
TException {
@@ -409,10 +406,10 @@ public Database getDatabase(String name) throws NoSuchObjectException, MetaExcep
* @param tbl_name
* @param db_name
* @param part_vals
- * @return
+ * @return the partition
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#lookup_partition(java.lang.String, java.lang.String, java.util.List)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_partition(java.lang.String, java.lang.String, java.util.List)
*/
public Partition getPartition(String db_name, String tbl_name, List<String> part_vals)
throws MetaException, TException {
@@ -422,23 +419,23 @@ public Partition getPartition(String db_name, String tbl_name, List<String> part
/**
* @param name
* @param dbname
- * @return
+ * @return the table
* @throws NoSuchObjectException
* @throws MetaException
* @throws TException
* @throws NoSuchObjectException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#lookup_table(java.lang.String, java.lang.String)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_table(java.lang.String, java.lang.String)
*/
public Table getTable(String dbname, String name) throws MetaException, TException, NoSuchObjectException {
return client.get_table(dbname, name);
}
/**
* @param name
- * @return
+ * @return the type
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#lookup_type(java.lang.String)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_type(java.lang.String)
*/
public Type getType(String name) throws MetaException, TException {
return client.get_type(name);
View
30 metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -58,7 +58,7 @@ public void dropTable(String tableName, boolean deleteData)
/**
* Drop the table.
- * @param dbName The database for this table
+ * @param dbname The database for this table
* @param tableName The table to drop
* @throws MetaException Could not drop table properly.
* @throws NoSuchObjectException The table wasn't found.
@@ -98,10 +98,10 @@ public Table getTable(String dbName, String tableName)
throws MetaException, TException, NoSuchObjectException;
/**
- * @param table_name
- * @param db_name
- * @param part_vals
- * @return
+ * @param tableName
+ * @param dbName
+ * @param partVals
+ * @return the partition object
* @throws InvalidObjectException
* @throws AlreadyExistsException
* @throws MetaException
@@ -125,13 +125,13 @@ public Partition add_partition(Partition partition)
MetaException, TException;
/**
- * @param tbl_name
- * @param db_name
- * @param part_vals
- * @return
+ * @param tblName
+ * @param dbName
+ * @param partVals
+ * @return the partition object
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#lookup_partition(java.lang.String, java.lang.String, java.util.List)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_partition(java.lang.String, java.lang.String, java.util.List)
*/
public Partition getPartition(String tblName, String dbName, List<String> partVals)
throws MetaException, TException ;
@@ -140,11 +140,10 @@ public Partition getPartition(String tblName, String dbName, List<String> partVa
* @param tbl_name
* @param db_name
* @param max_parts
- * @return
+ * @return the list of partitions
* @throws NoSuchObjectException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#list_partitions(java.lang.String, java.lang.String, short)
*/
public List<Partition> listPartitions(String db_name, String tbl_name, short max_parts)
throws NoSuchObjectException, MetaException, TException;
@@ -153,7 +152,6 @@ public Partition getPartition(String tblName, String dbName, List<String> partVa
throws MetaException, TException;
/**
* @param tbl
- * @return
* @throws AlreadyExistsException
* @throws InvalidObjectException
* @throws MetaException
@@ -173,12 +171,12 @@ public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectE
* @param tbl_name
* @param part_vals
* @param deleteData delete the underlying data or just delete the table in metadata
- * @return
+ * @return true or false
* @throws NoSuchObjectException
* @throws MetaException
* @throws TException
- * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, java.lang.String, java.util.List)
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, java.lang.String, java.util.List, boolean)
*/
public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData)
throws NoSuchObjectException, MetaException, TException;
-}
+}
View
2 metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java
@@ -453,7 +453,7 @@ public boolean dropPartition(String db_name, String tbl_name, List<String> part_
/**
* This operation is unsupported in this metastore.
- * @see MetaStoreClient#dropTable(String, String)
+ * @see IMetaStoreClient#dropTable(String, String, boolean, boolean)
*/
public void dropTable(String dbname, String name, boolean deleteData, boolean ignoreUknownTab) throws
ExistingDependentsException, MetaException, TException, NoSuchObjectException {
View
22 metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreServer.java
@@ -186,9 +186,8 @@ public ThriftMetaStoreHandler(String name, Configuration configuration) {
*
* return a table's partitions
*
- * @param tableName - a valid table name
- * @param low - low index
- * @param high - high index
+ * @param dbName the database name
+ * @param tableName a valid table name
* @return a string (including '\n's) of the rows
* @exception TException if thrift problem
* @exception MetaException if internal problem or bad input
@@ -235,8 +234,9 @@ public boolean table_exists(String dbName, String tableName) throws TException,
*
* For a columnsetSerDe table, add column names to it
*
- * @param tableName - a valid existing table name
- * @param columns - ordered list of column names
+ * @param dbName the database
+ * @param tableName a valid existing table name
+ * @param schema The schema information for the table
* @exception TException if thrift problem
* @exception MetaException if internal problem or bad input
* @exception UnknownTableException if table does not exist already
@@ -267,8 +267,8 @@ public void alter_table(String dbName, String tableName, Map<String, String> s
*
* Create names columns for a columnset type table
*
+ * @param dbName a valid database name
* @param tableName - a valid table name
- * @param columns - ordered list of column names
* @exception TException if thrift problem
* @exception MetaException if internal problem or bad input
*/
@@ -364,8 +364,8 @@ public void create_table(String dbName, String tableName, Map<String, String> sc
*
* drop a table
*
- * @param tableName - a valid existing table name
- * @param delete_data - should the store auto delete the data.
+ * @param dbName the name of a database
+ * @param tableName a valid existing table name
* @exception TException if thrift problem
* @exception MetaException if internal problem or bad input
* @exception UnknownTableException if table does not exist already
@@ -391,8 +391,8 @@ public void drop_table(String dbName, String tableName) throws TException,MetaE
*
* drop a table
*
- * @param tableName - a valid existing table name
- * @param delete_data - should the store auto delete the data.
+ * @param dbName a valid database name
+ * @param tableName a valid existing table name
* @exception TException if thrift problem
* @exception MetaException if internal problem or bad input
* @exception UnknownTableException if table does not exist already
@@ -418,7 +418,7 @@ public void truncate_table(String dbName, String tableName, String partition) t
*
* Gets the (opaque) schema which is currently represented as a key=>value map.
*
- * @param name - the name of the table
+ * @param tableName - the name of the table
* @return the key/value of the opaque schema
* @exception MetaException if internal problem
* @exception UnknownTableException if the table doesn't exist
View
16 metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -138,9 +138,8 @@ static public void recursiveDelete(File f) throws IOException {
*
* Get the Deserializer for a table given its name and properties.
*
- * @param name the name of the table
- * @param conf - hadoop config
- * @param p - the properties to use to instantiate the schema
+ * @param conf hadoop config
+ * @param schema the properties to use to instantiate the deserializer
* @return the Deserializer
* @exception MetaException if any problems instantiating the Deserializer
*
@@ -163,11 +162,10 @@ static public Deserializer getDeserializer(Configuration conf, Properties schema
/**
* getDeserializer
*
- * Get the Deserializer for a table given its name and properties.
+ * Get the Deserializer for a table.
*
- * @param name the name of the table
* @param conf - hadoop config
- * @param p - SerDe info
+ * @param table the table
* @return the Deserializer
* @exception MetaException if any problems instantiating the Deserializer
*
@@ -237,8 +235,8 @@ static public void deleteWHDirectory(Path path,Configuration conf, boolean use_t
* Checks the name conforms to our standars which are: "[a-zA-z_0-9]+".
* checks this is just characters and numbers and _
*
- * @param tableName the name to validate
- * @return none
+ * @param name the name to validate
+ * @return true or false depending on conformance
* @exception MetaException if it doesn't match the pattern.
*/
static public boolean validateName(String name) {
@@ -541,7 +539,7 @@ static void logAndThrowMetaException(Exception e) throws MetaException {
/**
* @param tableName
* @param deserializer
- * @return
+ * @return the list of fields
* @throws SerDeException
* @throws MetaException
*/
View
2 metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -221,7 +221,7 @@ public boolean openTransaction() {
/**
* if this is the commit of the first open call then an actual commit is called.
- * @return
+ * @return Always returns true
*/
@SuppressWarnings("nls")
public boolean commitTransaction() {
View
1 metastore/src/java/org/apache/hadoop/hive/metastore/ROTable.java
@@ -99,7 +99,6 @@ public Path getPath() {
* Scan the file system and find all the partitions of this table
* Not recursive right now - needs to be!
*
- * @param the table name
* @return a list of partitions - not full paths
* @exception MetaException if gneneral problem or this table does not exist.
*/
View
4 metastore/src/java/org/apache/hadoop/hive/metastore/RWTable.java
@@ -104,7 +104,6 @@ public void drop() throws MetaException {
* delete the data, but not the schema
* Can be applied on a partition by partition basis
*
- * @param partition partition in that table or "" or null
* @exception MetaException if any problems instantiating this object
*
*/
@@ -135,8 +134,7 @@ public void truncate(String partition) throws MetaException {
*
* Add column names to a column set ser de table.
*
- * @param tableName the name of the table to alter
- * @param columns the name of the columns
+ * @param schema the property value pairs for the table
* @exception MetaException if any problems altering the table
*
*/
View
2 metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -43,7 +43,7 @@
/**
* if this is the commit of the first open call then an actual commit is called.
- * @return
+ * @return true or false
*/
public abstract boolean commitTransaction();
View
2 metastore/src/model/org/apache/hadoop/hive/metastore/model/MFieldSchema.java
@@ -71,7 +71,7 @@ public String getType() {
return type;
}
/**
- * @param type the type to set
+ * @param field the type to set
*/
public void setType(String field) {
this.type = field;
View
4 ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -277,7 +277,7 @@ public static boolean implicitConvertable(Class<?> from, Class<?> to) {
* Get the UDF method for the name and argumentClasses.
* @param name the name of the UDF
* @param argumentClasses
- * @return
+ * @return The UDF method
*/
public static Method getUDFMethod(String name, List<Class<?>> argumentClasses) {
Class<? extends UDF> udf = getUDFClass(name);
@@ -298,7 +298,7 @@ public static Method getUDFMethod(String name, List<Class<?>> argumentClasses) {
* Get the UDAF evaluator for the name and argumentClasses.
* @param name the name of the UDAF
* @param argumentClasses
- * @return
+ * @return The UDAF evaluator
*/
public static Class<? extends UDAFEvaluator> getUDAFEvaluator(String name, List<Class<?>> argumentClasses) {
Class<? extends UDAF> udf = getUDAF(name);
View
2 ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
@@ -307,7 +307,7 @@ public void close(boolean abort) throws HiveException {
* jobClose is called from the jobclient side once the job has completed
*
* @param conf Configuration with with which job was submitted
- * @param succes whether the job was completed successfully or not
+ * @param success whether the job was completed successfully or not
*/
public void jobClose(Configuration conf, boolean success) throws HiveException {
if(childOperators == null)
View
2 ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java
@@ -81,7 +81,7 @@ public UDAF(UDAFEvaluatorResolver rslv) {
/**
* Sets the resolver
*
- * @param The method resolver to use for method resolution.
+ * @param rslv The method resolver to use for method resolution.
*/
public void setResolver(UDAFEvaluatorResolver rslv) {
this.rslv = rslv;
View
2 ql/src/java/org/apache/hadoop/hive/ql/exec/UDF.java
@@ -60,7 +60,7 @@ protected UDF(UDFMethodResolver rslv) {
/**
* Sets the resolver
*
- * @param The method resolver to use for method resolution.
+ * @param rslv The method resolver to use for method resolution.
*/
public void setResolver(UDFMethodResolver rslv) {
this.rslv = rslv;
View
2 ql/src/java/org/apache/hadoop/hive/ql/io/FlatFileInputFormat.java
@@ -46,7 +46,7 @@
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.util.ReflectionUtils;
-/** An {@link InputFormat} for Plain files with {@link Deserializer} records */
+/** An {@link org.apache.hadoop.mapred.InputFormat} for Plain files with {@link Deserializer} records */
public class FlatFileInputFormat<T> extends FileInputFormat<Void, FlatFileInputFormat.RowContainer<T>> {
/**
View
5 ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
@@ -43,7 +43,6 @@
/**
* Constructor
- * @param ctx graph of operators to walk
* @param disp dispatcher to call for each op encountered
*/
public DefaultGraphWalker(Dispatcher disp) {
@@ -67,8 +66,8 @@ public DefaultGraphWalker(Dispatcher disp) {
/**
* Dispatch the current operator
- * @param op operator being walked
- * @param opStack stack of operators encountered
+ * @param nd node being walked
+ * @param ndStack stack of nodes encountered
* @throws SemanticException
*/
public void dispatch(Node nd, Stack<Node> ndStack) throws SemanticException {
View
2 ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java
@@ -31,7 +31,7 @@
/**
* Dispatcher function.
* @param nd operator to process.
- * @param Stack operator stack to process.
+ * @param stack operator stack to process.
* @param nodeOutputs The argument list of outputs from processing other nodes that are
* passed to this dispatcher from the walker.
* @return Object The return object from the processing call.
View
1 ql/src/java/org/apache/hadoop/hive/ql/lib/PreOrderWalker.java
@@ -42,7 +42,6 @@
/**
* Constructor
- * @param ctx graph of operators to walk
* @param disp dispatcher to call for each op encountered
*/
public PreOrderWalker(Dispatcher disp) {
View
16 ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -87,11 +87,11 @@ public synchronized void remove() {
};
/**
- * Returns hive object for the current thread. If one is not initialized then a new one is created
+ * Gets hive object for the current thread. If one is not initialized then a new one is created
* If the new configuration is different in metadata conf vars then a new one is created.
* @param c new Hive Configuration
* @return Hive object for current thread
- * @exception
+ * @throws HiveException
*
*/
public static Hive get(HiveConf c) throws HiveException {
@@ -114,7 +114,7 @@ public static Hive get(HiveConf c) throws HiveException {
* get a connection to metastore. see get(HiveConf) function for comments
* @param c new conf
* @param needsRefresh if true then creates a new one
- * @return
+ * @return The connection to the metastore
* @throws HiveException
*/
public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException {
@@ -471,8 +471,8 @@ public Table getTable(final String dbName, final String tableName,
/**
* @param name
- * @param location_uri
- * @return
+ * @param locationUri
+ * @return true or false
* @throws AlreadyExistsException
* @throws MetaException
* @throws TException
@@ -485,7 +485,7 @@ protected boolean createDatabase(String name, String locationUri) throws Already
/**
* @param name
- * @return
+ * @return true or false
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.HiveMetaStoreClient#dropDatabase(java.lang.String)
@@ -500,7 +500,6 @@ protected boolean dropDatabase(String name) throws MetaException, TException {
* - If he partition does not exist - one is created
* - files in loadPath are moved into Hive. But the directory itself is not removed.
*
- * @param jc Job configuration
* @param loadPath Directory containing files to load into Table
* @param tableName name of table to be loaded.
* @param partSpec defines which partition needs to be loaded
@@ -524,7 +523,6 @@ public void loadPartition(Path loadPath, String tableName,
* - If table does not exist - an exception is thrown
* - files in loadPath are moved into Hive. But the directory itself is not removed.
*
- * @param jc Job configuration
* @param loadPath Directory containing files to load into Table
* @param tableName name of table to be loaded.
* @param replace if true - replace files in the table, otherwise add files to table
@@ -583,7 +581,7 @@ public Partition createPartition(Table tbl, Map<String, String> partSpec,
/**
* Returns partition metadata
- * @param tableName name of the partition's table
+ * @param tbl the partition's table
* @param partSpec partition keys and values
* @param forceCreate if this is true and partition doesn't exist then a partition is created
* @return result partition object or null if there is no partition
View
2 ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
@@ -62,7 +62,7 @@
private LinkedHashMap<String, String> spec;
/**
- * @return
+ * @return The values of the partition
* @see org.apache.hadoop.hive.metastore.api.Partition#getValues()
*/
public List<String> getValues() {
View
10 ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -83,7 +83,7 @@ protected Table() throws HiveException {
*
* @param name the name of this table in the metadb
* @param schema an object that represents the schema that this SerDe must know
- * @param serDe a Class to be used for serializing and deserializing the data
+ * @param deserializer a Class to be used for deserializing the data
* @param dataLocation where is the table ? (e.g., dfs://hadoop001.sf2p.facebook.com:9000/user/facebook/warehouse/example) NOTE: should not be hardcoding this, but ok for now
*
* @exception HiveException on internal error. Note not possible now, but in the future reserve the right to throw an exception
@@ -317,7 +317,7 @@ public void setSchema(Properties schema) {
}
/**
- * @param serDe the serDe to set
+ * @param deserializer the deserializer to set
*/
public void setDeserializer(Deserializer deserializer) {
this.deserializer = deserializer;
@@ -501,23 +501,23 @@ public void setNumBuckets(int nb) {
}
/**
- * @return
+ * @return The owner of the table.
* @see org.apache.hadoop.hive.metastore.api.Table#getOwner()
*/
public String getOwner() {
return tTable.getOwner();
}
/**
- * @return
+ * @return The table parameters.
* @see org.apache.hadoop.hive.metastore.api.Table#getParameters()
*/
public Map<String, String> getParameters() {
return tTable.getParameters();
}
/**
- * @return
+ * @return The retention on the table.
* @see org.apache.hadoop.hive.metastore.api.Table#getRetention()
*/
public int getRetention() {
View
4 ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
@@ -103,8 +103,6 @@ public String getCurrAliasId() {
* @param rootTasks root tasks for the plan
* @param mvTask the final move task
* @param scratchDir directory for temp destinations
- * @param randomId identifier used for temp destinations
- * @param pathId identifier used for temp destinations
* @param mapCurrCtx operator to task mappings
*/
public GenMRProcContext (
@@ -240,7 +238,7 @@ public int getRandomId() {
}
/**
- * @param randomId identifier used for temp destinations
+ * @param randomid identifier used for temp destinations
*/
public void setRandomId(int randomid) {
this.randomid = randomid;
View
2 ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java
@@ -53,7 +53,7 @@ public UnionProcessor() { }
/**
* Transform the query tree. For each union, store the fact whether both the
* sub-queries are map-only
- * @param pactx the current parse context
+ * @param pCtx the current parse context
*/
public ParseContext transform(ParseContext pCtx) throws SemanticException {
// create a walker which walks the tree in a DFS manner while maintaining the operator stack.
View
2 ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -374,7 +374,7 @@ public static String getFunctionText(ASTNode expr, boolean isFunction) {
* Get the exprNodeDesc
* @param name
* @param children
- * @return
+ * @return The expression node descriptor
*/
public static exprNodeDesc getFuncExprNodeDesc(String name, exprNodeDesc... children) {
return getFuncExprNodeDesc(name, Arrays.asList(children));
View
4 ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
@@ -126,7 +126,7 @@ public void addConvertedNode(exprNodeDesc oldNode, exprNodeDesc newNode) {
/**
* Returns true if the specified expression is pushdown candidate else false
* @param expr
- * @return
+ * @return true or false
*/
public boolean isCandidate(exprNodeDesc expr) {
ExprInfo ei = exprInfoMap.get(expr);
@@ -151,7 +151,7 @@ public void setIsCandidate(exprNodeDesc expr, boolean b) {
/**
* Returns the alias of the specified expr
* @param expr
- * @return
+ * @return The alias of the expression
*/
public String getAlias(exprNodeDesc expr) {
ExprInfo ei = exprInfoMap.get(expr);
View
2 ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
@@ -226,7 +226,7 @@ public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext,
* @param opContext operator context used for resolving column references
* @param op operator of the predicates being processed
* @param preds
- * @return
+ * @return The expression walker information
* @throws SemanticException
*/
public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext,
View
2 ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
@@ -279,7 +279,7 @@ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
/**
* @param nd
- * @param extractPushdownPreds
+ * @param ewi
*/
protected void logExpr(Node nd, ExprWalkerInfo ewi) {
for (Entry<String, List<exprNodeFuncDesc>> e : ewi.getFinalCandidates().entrySet()) {
View
2 ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -187,7 +187,7 @@ public static SessionState get() {
/**
* get hiveHitsory object which does structured logging
- * @return
+ * @return The hive history object
*/
public HiveHistory getHiveHistory(){
return hiveHist;
View
2 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
@@ -25,7 +25,7 @@
/**
* This constructor sets the resolver to be used for comparison operators.
- * See {@link UDFMethodResolver}
+ * See {@link org.apache.hadoop.hive.ql.exec.UDFMethodResolver}
*/
public UDFBaseCompare() {
super(null);
View
2 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
@@ -30,7 +30,7 @@
/**
* Constructor.
* This constructor sets the resolver to be used for comparison operators.
- * See {@link UDFMethodResolver}
+ * See {@link org.apache.hadoop.hive.ql.exec.UDFMethodResolver}
*/
public UDFBaseNumericOp() {
super(null);
View
3 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateAdd.java
@@ -46,7 +46,8 @@ public UDFDateAdd() {
* NOTE: This is a subset of what MySQL offers as:
* http://dev.mysql.com/doc/refman/5.1/en/date-and-time-functions.html#function_date-add
*
- * @param date1 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
+ * @param dateString1 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
+ * @param days The number of days to add.
* @return the date in the format of "yyyy-MM-dd".
*/
public String evaluate(String dateString1, int days) {
View
3 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateSub.java
@@ -46,7 +46,8 @@ public UDFDateSub() {
* NOTE: This is a subset of what MySQL offers as:
* http://dev.mysql.com/doc/refman/5.1/en/date-and-time-functions.html#function_date-sub
*
- * @param date1 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
+ * @param dateString1 the date string in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
+ * @param days the number of days to subtract.
* @return the date in the format of "yyyy-MM-dd".
*/
public String evaluate(String dateString1, int days) {
View
2 ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java
@@ -55,7 +55,7 @@ public UDFJson() {
* Syntax not supported that's worth noticing:
* '' : Zero length string as key
* .. : Recursive descent
- * @ : Current object/element
+ * &amp;#064; : Current object/element
* () : Script expression
* ?() : Filter (script) expression.
* [,] : Union operator
View
2 serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
@@ -125,7 +125,7 @@ public void initialize(Configuration job, Properties tbl) throws SerDeException
/**
* Split the row into columns.
* @param limit up to limit columns will be produced (the last column takes all the rest), -1 for unlimited.
- * @return
+ * @return The ColumnSet object
* @throws Exception
*/
public static Object deserialize(ColumnSet c, String row,