Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

HIVE-2935 : Implement HiveServer2 Core code changes (4th patch of 4) …

…(Carl Steinbach and others via Ashutosh Chauhan)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1455659 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
commit 6b27df548a0bd3f7f444dc79f74aefef3cb5a194 1 parent fa8dec3
@ashutoshc ashutoshc authored
Showing with 22,019 additions and 624 deletions.
  1. +32 −9 ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
  2. +2 −3 bin/ext/beeline.sh
  3. +33 −0 bin/ext/hiveserver2.sh
  4. +21 −0 bin/hiveserver2
  5. +38 −3 build-common.xml
  6. +13 −4 build.properties
  7. +134 −115 build.xml
  8. +5 −0 cli/build.xml
  9. +5 −0 common/build.xml
  10. +7 −0 common/src/gen/org/apache/hive/common/package-info.java
  11. +48 −0 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
  12. +78 −0 common/src/java/org/apache/hive/common/HiveVersionAnnotation.java
  13. +810 −0 common/src/java/org/apache/hive/common/util/HiveStringUtils.java
  14. +126 −0 common/src/java/org/apache/hive/common/util/HiveVersionInfo.java
  15. +205 −0 common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
  16. +74 −0 common/src/scripts/saveVersion.sh
  17. +66 −0 common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java
  18. +90 −4 conf/hive-default.xml.template
  19. +6 −3 contrib/build.xml
  20. +25 −0 data/files/types/primitives/090101.txt
  21. +25 −0 data/files/types/primitives/090201.txt
  22. +25 −0 data/files/types/primitives/090301.txt
  23. +25 −0 data/files/types/primitives/090401.txt
  24. +10 −0 data/scripts/q_test_cleanup.sql
  25. +132 −0 data/scripts/q_test_init.sql
  26. +51 −0 eclipse-templates/BeeLine.launchtemplate
  27. +3 −3 eclipse-templates/{HiveBeeLine.launchtemplate → HiveServer2.launchtemplate}
  28. +43 −0 eclipse-templates/TestBeeLineDriver.launchtemplate
  29. +43 −0 eclipse-templates/TestEmbeddedThriftCLIService.launchtemplate
  30. +43 −0 eclipse-templates/TestHiveServer.launchtemplate
  31. +44 −0 eclipse-templates/TestJdbc2.launchtemplate
  32. +43 −0 eclipse-templates/TestRemoteThriftCLIService.launchtemplate
  33. +6 −3 hbase-handler/build.xml
  34. +1 −1  ivy/ivysettings.xml
  35. +4 −2 ivy/libraries.properties
  36. +1,106 −0 jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
  37. +2,442 −0 jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
  38. +743 −0 jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
  39. +131 −0 jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java
  40. +1,097 −0 jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
  41. +297 −0 jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
  42. +53 −0 jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java
  43. +1,280 −0 jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
  44. +277 −0 jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
  45. +195 −0 jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
  46. +566 −0 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
  47. +182 −0 jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
  48. +59 −0 jdbc/src/java/org/apache/hive/jdbc/JdbcTable.java
  49. +252 −0 jdbc/src/java/org/apache/hive/jdbc/Utils.java
  50. +0 −112 jdbc/src/java/org/apache/hive/jdbc/beeline/HiveBeeline.java
  51. +0 −266 jdbc/src/java/org/apache/hive/jdbc/beeline/OptionsProcessor.java
  52. +1,255 −0 jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
  53. +0 −9 metastore/build.xml
  54. +31 −17 ql/build.xml
  55. +2 −0  ql/ivy.xml
  56. +6 −1 ql/src/java/org/apache/hadoop/hive/ql/Driver.java
  57. +2 −3 ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
  58. +24 −0 ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
  59. +18 −10 ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
  60. +8 −3 ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
  61. +0 −21 serde/build.xml
  62. +0 −9 service/build.xml
  63. +997 −0 service/if/TCLIService.thrift
  64. +184 −0 service/src/java/org/apache/hive/service/AbstractService.java
  65. +121 −0 service/src/java/org/apache/hive/service/BreakableService.java
  66. +133 −0 service/src/java/org/apache/hive/service/CompositeService.java
  67. +83 −0 service/src/java/org/apache/hive/service/FilterService.java
  68. +122 −0 service/src/java/org/apache/hive/service/Service.java
  69. +38 −0 service/src/java/org/apache/hive/service/ServiceException.java
  70. +141 −0 service/src/java/org/apache/hive/service/ServiceOperations.java
  71. +46 −0 service/src/java/org/apache/hive/service/ServiceStateChangeListener.java
  72. +31 −0 service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
  73. +64 −0 service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
  74. +48 −0 service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
  75. +129 −0 service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
  76. +77 −0 service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
  77. +70 −0 service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
  78. +37 −0 service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
  79. +141 −0 service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
  80. +189 −0 service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
  81. +78 −0 service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
  82. +65 −0 service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java
  83. +328 −0 service/src/java/org/apache/hive/service/cli/CLIService.java
  84. +152 −0 service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
  85. +69 −0 service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
  86. +97 −0 service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
  87. +168 −0 service/src/java/org/apache/hive/service/cli/ColumnValue.java
  88. +179 −0 service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
  89. +54 −0 service/src/java/org/apache/hive/service/cli/FetchOrientation.java
  90. +96 −0 service/src/java/org/apache/hive/service/cli/GetInfoType.java
  91. +82 −0 service/src/java/org/apache/hive/service/cli/GetInfoValue.java
  92. +78 −0 service/src/java/org/apache/hive/service/cli/Handle.java
  93. +113 −0 service/src/java/org/apache/hive/service/cli/HandleIdentifier.java
  94. +124 −0 service/src/java/org/apache/hive/service/cli/HiveSQLException.java
  95. +90 −0 service/src/java/org/apache/hive/service/cli/ICLIService.java
  96. +95 −0 service/src/java/org/apache/hive/service/cli/OperationHandle.java
  97. +92 −0 service/src/java/org/apache/hive/service/cli/OperationState.java
  98. +58 −0 service/src/java/org/apache/hive/service/cli/OperationType.java
  99. +47 −0 service/src/java/org/apache/hive/service/cli/PatternOrIdentifier.java
  100. +78 −0 service/src/java/org/apache/hive/service/cli/Row.java
  101. +124 −0 service/src/java/org/apache/hive/service/cli/RowSet.java
  102. +58 −0 service/src/java/org/apache/hive/service/cli/SessionHandle.java
  103. +94 −0 service/src/java/org/apache/hive/service/cli/TableSchema.java
  104. +391 −0 service/src/java/org/apache/hive/service/cli/Type.java
  105. +71 −0 service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
  106. +38 −0 service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java
  107. +38 −0 service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java
  108. +38 −0 service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java
  109. +59 −0 service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java
  110. +68 −0 service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
  111. +198 −0 service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
  112. +120 −0 service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
  113. +89 −0 service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
  114. +81 −0 service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
  115. +115 −0 service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
  116. +135 −0 service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
  117. +202 −0 service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
  118. +99 −0 service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
  119. +127 −0 service/src/java/org/apache/hive/service/cli/operation/Operation.java
  120. +172 −0 service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
  121. +256 −0 service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
  122. +38 −0 service/src/java/org/apache/hive/service/cli/operation/SetOperation.java
  123. +167 −0 service/src/java/org/apache/hive/service/cli/session/HiveSession.java
  124. +344 −0 service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
  125. +167 −0 service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
  126. +87 −0 service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java
  127. +153 −0 service/src/java/org/apache/hive/service/cli/session/SessionManager.java
  128. +37 −0 service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftCLIService.java
  129. +423 −0 service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
  130. +373 −0 service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
  131. +94 −0 service/src/java/org/apache/hive/service/server/HiveServer2.java
  132. +116 −0 service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
  133. +60 −0 service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftCLIService.java
  134. +103 −0 service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java
  135. +23 −2 shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
  136. +32 −5 shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
  137. +54 −14 shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
  138. +32 −2 shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
  139. +7 −0 shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
View
41 ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
@@ -108,6 +108,8 @@ public boolean accept(File filePath) {
}
private List<String> templatePaths = new ArrayList<String>();
+
+ private String hiveRootDirectory;
private String outputDirectory;
@@ -185,6 +187,18 @@ public String getTemplate() {
return template;
}
+ public void setHiveRootDirectory(File hiveRootDirectory) {
+ try {
+ this.hiveRootDirectory = hiveRootDirectory.getCanonicalPath();
+ } catch (IOException ioe) {
+ throw new BuildException(ioe);
+ }
+ }
+
+ public String getHiveRootDirectory() {
+ return hiveRootDirectory;
+ }
+
public void setTemplatePath(String templatePath) throws Exception {
templatePaths.clear();
for (String relativePath : templatePath.split(",")) {
@@ -302,14 +316,15 @@ public void execute() throws BuildException {
List<File> qFiles = new ArrayList<File>();
HashMap<String, String> qFilesMap = new HashMap<String, String>();
+ File hiveRootDir = null;
+ File queryDir = null;
File outDir = null;
File resultsDir = null;
File logDir = null;
try {
- File inpDir = null;
if (queryDirectory != null) {
- inpDir = new File(queryDirectory);
+ queryDir = new File(queryDirectory);
}
if (queryFile != null && !queryFile.equals("")) {
@@ -318,31 +333,37 @@ public void execute() throws BuildException {
if (includeOnly != null && !includeOnly.contains(qFile)) {
continue;
}
- if (null != inpDir) {
- qFiles.add(new File(inpDir, qFile));
+ if (null != queryDir) {
+ qFiles.add(new File(queryDir, qFile));
} else {
qFiles.add(new File(qFile));
}
}
} else if (queryFileRegex != null && !queryFileRegex.equals("")) {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(
new QFileRegexFilter(queryFileRegex, includeOnly))));
} else if (runDisabled != null && runDisabled.equals("true")) {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter(includeOnly))));
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(new DisabledQFileFilter(includeOnly))));
} else {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileFilter(includeOnly))));
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(new QFileFilter(includeOnly))));
}
if (excludeQueryFile != null && !excludeQueryFile.equals("")) {
// Exclude specified query files, comma separated
for (String qFile : excludeQueryFile.split(",")) {
- if (null != inpDir) {
- qFiles.remove(new File(inpDir, qFile));
+ if (null != queryDir) {
+ qFiles.remove(new File(queryDir, qFile));
} else {
qFiles.remove(new File(qFile));
}
}
}
+
+ hiveRootDir = new File(hiveRootDirectory);
+ if (!hiveRootDir.exists()) {
+ throw new BuildException("Hive Root Directory "
+ + hiveRootDir.getCanonicalPath() + " does not exist");
+ }
Collections.sort(qFiles);
for (File qFile : qFiles) {
@@ -397,6 +418,8 @@ public void execute() throws BuildException {
// For each of the qFiles generate the test
VelocityContext ctx = new VelocityContext();
ctx.put("className", className);
+ ctx.put("hiveRootDir", getEscapedCanonicalPath(hiveRootDir));
+ ctx.put("queryDir", getEscapedCanonicalPath(queryDir));
ctx.put("qfiles", qFiles);
ctx.put("qfilesMap", qFilesMap);
ctx.put("resultsDir", getEscapedCanonicalPath(resultsDir));
View
5 bin/ext/beeline.sh
@@ -18,13 +18,12 @@ THISSERVICE=beeline
export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
beeline () {
-
- CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+ CLASS=org.apache.hive.beeline.BeeLine;
execHiveCmd $CLASS "$@"
}
beeline_help () {
- CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+ CLASS=org.apache.hive.beeline.BeeLine;
execHiveCmd $CLASS "--help"
}
View
33 bin/ext/hiveserver2.sh
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hiveserver2
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hiveserver2() {
+ echo "Starting HiveServer2"
+ CLASS=org.apache.hive.service.server.HiveServer2
+ if $cygwin; then
+ HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+ fi
+ JAR=${HIVE_LIB}/hive-service-*.jar
+
+ exec $HADOOP jar $JAR $CLASS "$@"
+}
+
+hiveserver2_help() {
+ hiveserver2 -h
+}
+
View
21 bin/hiveserver2
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hive --service hiveserver2 "$@"
View
41 build-common.xml
@@ -38,7 +38,9 @@
<property name="build.classes" location="${build.dir}/classes"/>
<property name="build.encoding" value="ISO-8859-1"/>
+ <!-- Thrift codegen properties -->
<property name="thrift.args" value="-I ${thrift.home} --gen java:beans,hashcode --gen cpp --gen php --gen py --gen rb"/>
+ <property name="thrift.gen.dir" value="${basedir}/src/gen/thrift"/>
<property name="hadoop.conf.dir" location="${hadoop.root}/conf"/>
@@ -77,6 +79,7 @@
<pathelement location="" />
<pathelement location="${test.src.data.dir}/conf"/>
<pathelement location="${hive.conf.dir}"/>
+ <pathelement location="${build.dir.hive}/beeline/test/classes"/>
<pathelement location="${build.dir.hive}/cli/test/classes"/>
<pathelement location="${build.dir.hive}/common/test/classes"/>
<pathelement location="${build.dir.hive}/hbase-handler/test/classes"/>
@@ -97,6 +100,7 @@
<!-- test directory may contain hadoop jars used by tests only (e.g. mini cluster) -->
<fileset dir="${hive.root}/build/ivy/lib/test" includes="*.jar" erroronmissingdir="false"
excludes="**/hive_*.jar,**/hive-*.jar"/>
+ <fileset dir="${hive.root}/build/ivy/lib/test" includes="hive-testutils*.jar" Erroronmissingdir="false"/>
<!-- we strip out hadoop jars present in places other than the hadoop shimmed dir-->
<fileset dir="${hive.root}/build/ivy/lib/default" includes="*.jar" erroronmissingdir="false"
@@ -181,6 +185,7 @@
<pathelement location="${build.dir.hive}/classes"/>
<fileset dir="${build.dir.hive}" includes="*/*.jar"/>
<fileset dir="${hive.root}/lib" includes="*.jar"/>
+ <fileset dir="${build.ivy.lib.dir}/default" includes="junit*.jar" />
<fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
<fileset dir="${build.ivy.lib.dir}/default" includes="*.jar"
excludes="**/hadoop-*.jar"
@@ -193,6 +198,7 @@
<pathelement location="${build.dir.hive}/serde/classes"/>
<pathelement location="${build.dir.hive}/metastore/classes"/>
<pathelement location="${build.dir.hive}/ql/classes"/>
+ <pathelement location="${build.dir.hive}/beeline/classes"/>
<pathelement location="${build.dir.hive}/cli/classes"/>
<pathelement location="${build.dir.hive}/shims/classes"/>
<pathelement location="${build.dir.hive}/hwi/classes"/>
@@ -226,6 +232,7 @@
<echo message="Project: ${ant.project.name}"/>
<mkdir dir="${test.data.dir}"/>
<mkdir dir="${test.log.dir}/clientpositive"/>
+ <mkdir dir="${test.log.dir}/beelinepositive"/>
<mkdir dir="${test.log.dir}/clientnegative"/>
<mkdir dir="${test.log.dir}/positive"/>
<mkdir dir="${test.log.dir}/negative"/>
@@ -280,7 +287,7 @@
<javac
encoding="${build.encoding}"
srcdir="${test.src.dir}"
- includes="org/apache/hadoop/**/*.java"
+ includes="org/apache/**/hive/**/*.java"
excludes="**/TestSerDe.java"
destdir="${test.build.classes}"
debug="${javac.debug}"
@@ -295,7 +302,7 @@
<javac
encoding="${build.encoding}"
srcdir="${test.build.src}"
- includes="org/apache/hadoop/**/*.java"
+ includes="org/apache/**/hive/**/*.java"
destdir="${test.build.classes}"
debug="${javac.debug}"
optimize="${javac.optimize}"
@@ -352,6 +359,12 @@
</not>
</condition>
+ <condition property="disableserver" value="false">
+ <not>
+ <isset property="disableserver"/>
+ </not>
+ </condition>
+
<condition property="clustermode" value="">
<not>
<isset property="clustermode"/>
@@ -413,8 +426,10 @@
</then>
</if>
<junit showoutput="${test.output}" printsummary="yes" haltonfailure="no"
- fork="yes" maxmemory="512m" dir="${basedir}" timeout="${test.junit.timeout}"
+ fork="yes" maxmemory="${test.junit.maxmemory}" dir="${basedir}" timeout="${test.junit.timeout}"
errorProperty="tests.failed" failureProperty="tests.failed" filtertrace="off">
+ <jvmarg value="-XX:+HeapDumpOnOutOfMemoryError"/>
+ <jvmarg value="-XX:HeapDumpPath=${hive.root}"/>
<env key="LANG" value="${test.lang}"/>
<env key="HIVE_HADOOP_TEST_CLASSPATH" value="${hadoop.testcp}"/>
<env key="HADOOP_HOME" value="${hadoop.root}"/>
@@ -422,6 +437,7 @@
<env key="TZ" value="US/Pacific"/>
<sysproperty key="test.output.overwrite" value="${overwrite}"/>
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
+ <sysproperty key="test.service.disable.server" value="${disableserver}"/>
<sysproperty key="log4j.configuration" value="file:///${test.src.data.dir}/conf/hive-log4j.properties"/>
<sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
<sysproperty key="hive.aux.jars.path" value="file:///${test.build.dir}/test-udfs.jar"/>
@@ -440,9 +456,11 @@
<sysproperty key="build.dir.hive" value="${build.dir.hive}"/>
<sysproperty key="build.ivy.lib.dir" value="${build.ivy.lib.dir}"/>
<sysproperty key="derby.version" value="${derby.version}"/>
+ <sysproperty key="hive.root" value="${hive.root}"/>
<sysproperty key="hive.version" value="${version}"/>
<sysproperty key="java.net.preferIPv4Stack" value="${java.net.preferIPv4Stack}"/>
<sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}${junit.script.extension}"/>
+ <sysproperty key="test.concurrency.num.threads" value="${test.concurrency.num.threads}"/>
<jvmarg line="${jvm.args}"/>
<classpath refid="test.local.classpath"/>
@@ -486,6 +504,23 @@
</condition>
</target>
+ <target name="thriftif" depends="check-thrift-home">
+ <echo message="Project: ${ant.project.name}"/>
+ <delete dir="${thrift.gen.dir}"/>
+ <mkdir dir="${thrift.gen.dir}"/>
+ <for param="thrift.file">
+ <path>
+ <fileset dir="." includes="if/*.thrift,if/test/*.thrift" />
+ </path>
+ <sequential>
+ <echo message="Generating Thrift code for @{thrift.file}"/>
+ <exec executable="${thrift.home}/bin/thrift" failonerror="true" dir=".">
+ <arg line="${thrift.args} -I ${basedir}/include -I ${basedir}/.. -o ${thrift.gen.dir} @{thrift.file} " />
+ </exec>
+ </sequential>
+ </for>
+ </target>
+
<target name="check-ivy" depends="ivy-init-settings">
<echo message="Project: ${ant.project.name}"/>
<available file="${basedir}/ivy.xml" property="ivy.present"/>
View
17 build.properties
@@ -72,8 +72,8 @@ jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar
common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
# module names needed for build process
-iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,hwi,hbase-handler,pdk,builtins
-iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,hwi,hbase-handler,pdk,builtins
+iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
+iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,metastore,odbc,serde,service
iterate.hive.thrift=ql,service,metastore,serde
iterate.hive.protobuf=ql
@@ -92,7 +92,16 @@ test.junit.timeout=43200000
# Use this property to selectively disable tests from the command line:
# ant test -Dtest.junit.exclude="**/TestCliDriver.class"
# ant test -Dtest.junit.exclude="**/Test*CliDriver.class,**/TestPartitions.class"
-test.junit.exclude=
+test.junit.exclude="**/TestBeeLineDriver.class, **/TestHiveServer2Concurrency.class"
+test.continue.on.failure=false
+
+test.submodule.exclude=
+test.junit.maxmemory=512m
+
+test.concurrency.num.threads=1
+#test.beelinepositive.exclude=add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q
+
+
#
# Ivy Properties
@@ -108,7 +117,7 @@ ivy.changingPattern=.*SNAPSHOT
ivy.publish.pattern=[artifact]-[revision].[ext]
ivy.artifact.retrieve.pattern=[conf]/[artifact]-[revision](-[classifier]).[ext]
ivysettings.xml=${ivy.conf.dir}/ivysettings.xml
-ivyresolvelog=download-only
+ivyresolvelog=default
ivy.mvn.repo=http://repo2.maven.org/maven2
ivy_repo_url=${ivy.mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar
hive.ivy.org=org.apache.hive
View
249 build.xml
@@ -138,6 +138,7 @@
<pathelement location="${build.dir.hive}/metastore/classes"/>
<pathelement location="${build.dir.hive}/ql/classes"/>
<pathelement location="${build.dir.hive}/cli/classes"/>
+ <pathelement location="${build.dir.hive}/beeline/classes"/>
<fileset dir="${hive.root}/data" includes="files/*.jar"/>
<fileset dir="${hive.root}/ql" includes="lib/*.jar"/>
<fileset dir="${hive.root}/cli" includes="lib/*.jar"/>
@@ -391,16 +392,18 @@
<mkdir dir="${target.lib.dir}/php"/>
<copy file="${hive.root}/bin/hive" todir="${target.bin.dir}"/>
<copy file="${hive.root}/bin/metatool" todir="${target.bin.dir}"/>
+ <copy file="${hive.root}/bin/beeline" todir="${target.bin.dir}"/>
+ <copy file="${hive.root}/bin/hiveserver2" todir="${target.bin.dir}"/>
+ <copy file="${hive.root}/bin/hive-config.sh" todir="${target.bin.dir}"/>
+ <copy todir="${target.bin.dir}/ext">
+ <fileset dir="${hive.root}/bin/ext" excludes="${vcs.excludes}"/>
+ </copy>
<copy todir="${target.scripts.dir}/metastore/upgrade">
<fileset dir="${hive.root}/metastore/scripts/upgrade" excludes="${vcs.excludes}"/>
</copy>
<copy todir="${target.scripts.dir}/pdk">
<fileset dir="${hive.root}/pdk/scripts" excludes="${vcs.excludes},README"/>
</copy>
- <copy todir="${target.bin.dir}/ext">
- <fileset dir="${hive.root}/bin/ext" excludes="${vcs.excludes}"/>
- </copy>
- <copy file="${hive.root}/bin/hive-config.sh" todir="${target.bin.dir}"/>
<copy file="${basedir}/conf/hive-default.xml.template" todir="${target.conf.dir}">
<filterset>
<filter token="VERSION" value="${version}"/>
@@ -447,6 +450,9 @@
<copy todir="${target.lib.dir}/py/hive_service">
<fileset dir="${hive.root}/service/src/gen/thrift/gen-py/hive_service" excludes="${vcs.excludes}"/>
</copy>
+ <copy todir="${target.lib.dir}/py/TCLIService">
+ <fileset dir="${hive.root}/service/src/gen/thrift/gen-py/TCLIService" excludes="${vcs.excludes}"/>
+ </copy>
<copy todir="${target.lib.dir}/py/queryplan">
<fileset dir="${hive.root}/ql/src/gen/thrift/gen-py/queryplan" excludes="${vcs.excludes}"/>
</copy>
@@ -460,14 +466,15 @@
<exclude name="build/hadoopcore/*.jar"/>
<exclude name="**/ant-contrib*.jar"/>
<exclude name="**/hive-anttasks*.jar"/>
+ <exclude name="**/hive-testutils*.jar"/>
</fileset>
+ <fileset file="${build.dir.hive}/beeline/hive-beeline-${version}.jar"/>
<fileset file="${build.dir.hive}/cli/hive-cli-${version}.jar"/>
<fileset file="${build.dir.hive}/common/hive-common-${version}.jar"/>
<fileset file="${build.dir.hive}/ql/hive-exec-${version}.jar"/>
<fileset file="${build.dir.hive}/metastore/hive-metastore-${version}.jar"/>
<fileset file="${build.dir.hive}/hwi/hive-hwi-${version}.war"/>
<fileset file="${build.dir.hive}/contrib/hive-contrib-${version}.jar"/>
- <fileset file="${build.dir.hive}/contrib/hive-hbase_handler-${version}.jar"/>
<fileset dir="${build.dir.hive}/ivy/lib/default">
<include name="*.jar"/>
<exclude name="*.tar.gz"/>
@@ -660,6 +667,7 @@
<packageset dir="metastore/src/gen/thrift/gen-javabean"/>
<packageset dir="metastore/src/model"/>
<packageset dir="cli/src/java"/>
+ <packageset dir="beeline/src/java"/>
<packageset dir="ql/src/java"/>
<packageset dir="ql/src/test"/>
<packageset dir="ql/src/gen/thrift/gen-javabean"/>
@@ -884,89 +892,91 @@
</available>
</target>
- <!-- ================================================================== -->
- <!-- Findbugs -->
- <!-- ================================================================== -->
-
-
- <target name="ivy-resolve-findbugs" depends="ivy-init-settings">
- <echo message="Project: ${ant.project.name}" />
- <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="findbugs" log="${ivyresolvelog}" />
- </target>
-
- <property name="ivy.findbugs.retrieve.pattern" value="[conf]/[artifact].[ext]" />
-
- <target name="ivy-retrieve-findbugs" depends="ivy-resolve-findbugs" description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
- <echo message="Project: ${ant.project.name}" />
- <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" pattern="${build.ivy.lib.dir}/${ivy.findbugs.retrieve.pattern}" log="${ivyresolvelog}" />
- </target>
-
- <target name="check-for-findbugs">
- <echo message="Project: ${ant.project.name}" />
- <path id="findbugs.classpath">
- <fileset dir="${build.ivy.lib.dir}/findbugs">
- <include name="*.jar" />
- </fileset>
- </path>
- <pathconvert property="findbugs.classpath" refid="findbugs.classpath" />
- </target>
-
- <target name="findbugs" depends="init,ivy-retrieve-findbugs,check-for-findbugs" description="Run findbugs on source files">
- <echo message="Project: ${ant.project.name}" />
-
- <property name="findbugs.conf.dir" location="${hive.root}/findbugs" />
- <property name="findbugs.build.dir" location="${build.dir.hive}/findbugs" />
- <property name="findbugs.exclude.file" value="${findbugs.conf.dir}/findbugs-exclude.xml"/>
- <property name="findbugs.report.htmlfile" value="${findbugs.build.dir}/findbugs-report.html"/>
- <property name="findbugs.report.xmlfile" value="${findbugs.build.dir}/findbugs-report.xml"/>
-
- <mkdir dir="${findbugs.build.dir}" />
- <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask" classpath="${build.ivy.lib.dir}/findbugs/findbugs-ant.jar" />
-
- <findbugs classpath="${findbugs.classpath}" pluginList="" effort="max" output="xml" outputFile="${findbugs.report.xmlfile}" excludeFilter="${findbugs.exclude.file}">
-
- <auxClasspath>
- <fileset dir="${build.dir.hive}">
- <include name="**/*.jar" />
- </fileset>
- </auxClasspath>
-
- <sourcePath path="${hive.root}/ant" />
- <sourcePath path="${hive.root}/builtins" />
- <sourcePath path="${hive.root}/cli" />
- <sourcePath path="${hive.root}/common" />
- <sourcePath path="${hive.root}/contrib" />
- <sourcePath path="${hive.root}/hbase-handler" />
- <sourcePath path="${hive.root}/hwi" />
- <sourcePath path="${hive.root}/jdbc" />
- <sourcePath path="${hive.root}/metastore" />
- <sourcePath path="${hive.root}/odbc" />
- <sourcePath path="${hive.root}/pdk" />
- <sourcePath path="${hive.root}/ql" />
- <sourcePath path="${hive.root}/serde" />
- <sourcePath path="${hive.root}/service" />
- <sourcePath path="${hive.root}/shims" />
- <class location="${build.dir.hive}/anttasks/classes" />
- <class location="${build.dir.hive}/builtins/classes" />
- <class location="${build.dir.hive}/cli/classes" />
- <class location="${build.dir.hive}/common/classes" />
- <class location="${build.dir.hive}/contrib/classes" />
- <class location="${build.dir.hive}/hbase-handler/classes" />
- <class location="${build.dir.hive}/hwi/classes" />
- <class location="${build.dir.hive}/jdbc/classes" />
- <class location="${build.dir.hive}/metastore/classes" />
- <class location="${build.dir.hive}/pdk/classes" />
- <class location="${build.dir.hive}/ql/classes" />
- <class location="${build.dir.hive}/serde/classes" />
- <class location="${build.dir.hive}/service/classes" />
- <class location="${build.dir.hive}/shims/classes" />
- </findbugs>
- <!--
+ <!-- ================================================================== -->
+ <!-- Findbugs -->
+ <!-- ================================================================== -->
+
+
+ <target name="ivy-resolve-findbugs" depends="ivy-init-settings">
+ <echo message="Project: ${ant.project.name}" />
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="findbugs" log="${ivyresolvelog}" />
+ </target>
+
+ <property name="ivy.findbugs.retrieve.pattern" value="[conf]/[artifact].[ext]" />
+
+ <target name="ivy-retrieve-findbugs" depends="ivy-resolve-findbugs" description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
+ <echo message="Project: ${ant.project.name}" />
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings" pattern="${build.ivy.lib.dir}/${ivy.findbugs.retrieve.pattern}" log="${ivyresolvelog}" />
+ </target>
+
+ <target name="check-for-findbugs">
+ <echo message="Project: ${ant.project.name}" />
+ <path id="findbugs.classpath">
+ <fileset dir="${build.ivy.lib.dir}/findbugs">
+ <include name="*.jar" />
+ </fileset>
+ </path>
+ <pathconvert property="findbugs.classpath" refid="findbugs.classpath" />
+ </target>
+
+ <target name="findbugs" depends="init,ivy-retrieve-findbugs,check-for-findbugs" description="Run findbugs on source files">
+ <echo message="Project: ${ant.project.name}" />
+
+ <property name="findbugs.conf.dir" location="${hive.root}/findbugs" />
+ <property name="findbugs.build.dir" location="${build.dir.hive}/findbugs" />
+ <property name="findbugs.exclude.file" value="${findbugs.conf.dir}/findbugs-exclude.xml"/>
+ <property name="findbugs.report.htmlfile" value="${findbugs.build.dir}/findbugs-report.html"/>
+ <property name="findbugs.report.xmlfile" value="${findbugs.build.dir}/findbugs-report.xml"/>
+
+ <mkdir dir="${findbugs.build.dir}" />
+ <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask" classpath="${build.ivy.lib.dir}/findbugs/findbugs-ant.jar" />
+
+ <findbugs classpath="${findbugs.classpath}" pluginList="" effort="max" output="xml" outputFile="${findbugs.report.xmlfile}" excludeFilter="${findbugs.exclude.file}">
+
+ <auxClasspath>
+ <fileset dir="${build.dir.hive}">
+ <include name="**/*.jar" />
+ </fileset>
+ </auxClasspath>
+
+ <sourcePath path="${hive.root}/ant" />
+ <sourcePath path="${hive.root}/beeline" />
+ <sourcePath path="${hive.root}/builtins" />
+ <sourcePath path="${hive.root}/cli" />
+ <sourcePath path="${hive.root}/common" />
+ <sourcePath path="${hive.root}/contrib" />
+ <sourcePath path="${hive.root}/hbase-handler" />
+ <sourcePath path="${hive.root}/hwi" />
+ <sourcePath path="${hive.root}/jdbc" />
+ <sourcePath path="${hive.root}/metastore" />
+ <sourcePath path="${hive.root}/odbc" />
+ <sourcePath path="${hive.root}/pdk" />
+ <sourcePath path="${hive.root}/ql" />
+ <sourcePath path="${hive.root}/serde" />
+ <sourcePath path="${hive.root}/service" />
+ <sourcePath path="${hive.root}/shims" />
+ <class location="${build.dir.hive}/anttasks/classes" />
+ <class location="${build.dir.hive}/beeline/classes" />
+ <class location="${build.dir.hive}/builtins/classes" />
+ <class location="${build.dir.hive}/cli/classes" />
+ <class location="${build.dir.hive}/common/classes" />
+ <class location="${build.dir.hive}/contrib/classes" />
+ <class location="${build.dir.hive}/hbase-handler/classes" />
+ <class location="${build.dir.hive}/hwi/classes" />
+ <class location="${build.dir.hive}/jdbc/classes" />
+ <class location="${build.dir.hive}/metastore/classes" />
+ <class location="${build.dir.hive}/pdk/classes" />
+ <class location="${build.dir.hive}/ql/classes" />
+ <class location="${build.dir.hive}/serde/classes" />
+ <class location="${build.dir.hive}/service/classes" />
+ <class location="${build.dir.hive}/shims/classes" />
+ </findbugs>
+ <!--
<xslt style="${findbugs.conf.dir}/default.xsl" in="${findbugs.report.xmlfile}" out="${findbugs.report.htmlfile}"/>
-->
- </target>
+ </target>
<target name="ivy-docs" depends="ivy-init-settings"
- description="Resolve, Retrieve Ivy-managed artifacts for docs configuration">
+ description="Resolve, Retrieve Ivy-managed artifacts for docs configuration">
<echo message="Project: ${ant.project.name}"/>
<ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="docs"/>
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
@@ -1038,6 +1048,10 @@
<!-- copy the jars -->
<copy file="${build.dir.hive}/anttasks/hive-anttasks-${version}.jar"
todir="${mvn.jar.dir}" />
+ <copy file="${build.dir.hive}/beeline/hive-beeline-${version}.jar"
+ todir="${mvn.jar.dir}" />
+ <copy file="${build.dir.hive}/builtins/hive-builtins-${version}.jar"
+ todir="${mvn.jar.dir}" />
<copy file="${build.dir.hive}/cli/hive-cli-${version}.jar"
todir="${mvn.jar.dir}" />
<copy file="${build.dir.hive}/common/hive-common-${version}.jar"
@@ -1052,6 +1066,8 @@
todir="${mvn.jar.dir}" />
<copy file="${build.dir.hive}/metastore/hive-metastore-${version}.jar"
todir="${mvn.jar.dir}" />
+ <copy file="${build.dir.hive}/pdk/hive-pdk-${version}.jar"
+ todir="${mvn.jar.dir}" />
<copy file="${build.dir.hive}/ql/hive-exec-${version}.jar"
todir="${mvn.jar.dir}" />
<copy file="${build.dir.hive}/serde/hive-serde-${version}.jar"
@@ -1060,14 +1076,14 @@
todir="${mvn.jar.dir}" />
<copy file="${build.dir.hive}/shims/hive-shims-${version}.jar"
todir="${mvn.jar.dir}" />
- <copy file="${build.dir.hive}/pdk/hive-pdk-${version}.jar"
- todir="${mvn.jar.dir}" />
- <copy file="${build.dir.hive}/builtins/hive-builtins-${version}.jar"
- todir="${mvn.jar.dir}" />
<!-- copy over maven pom files created using the make-pom target and rename to maven convention -->
<copy file="${build.dir.hive}/anttasks/pom.xml"
tofile="${mvn.pom.dir}/hive-anttasks-${version}.pom" />
+ <copy file="${build.dir.hive}/beeline/pom.xml"
+ tofile="${mvn.pom.dir}/hive-beeline-${version}.pom" />
+ <copy file="${build.dir.hive}/builtins/pom.xml"
+ tofile="${mvn.pom.dir}/hive-builtins-${version}.pom" />
<copy file="${build.dir.hive}/cli/pom.xml"
tofile="${mvn.pom.dir}/hive-cli-${version}.pom" />
<copy file="${build.dir.hive}/common/pom.xml"
@@ -1082,6 +1098,8 @@
tofile="${mvn.pom.dir}/hive-jdbc-${version}.pom" />
<copy file="${build.dir.hive}/metastore/pom.xml"
tofile="${mvn.pom.dir}/hive-metastore-${version}.pom" />
+ <copy file="${build.dir.hive}/pdk/pom.xml"
+ tofile="${mvn.pom.dir}/hive-pdk-${version}.pom" />
<copy file="${build.dir.hive}/ql/pom.xml"
tofile="${mvn.pom.dir}/hive-exec-${version}.pom" />
<copy file="${build.dir.hive}/serde/pom.xml"
@@ -1090,10 +1108,8 @@
tofile="${mvn.pom.dir}/hive-service-${version}.pom" />
<copy file="${build.dir.hive}/shims/pom.xml"
tofile="${mvn.pom.dir}/hive-shims-${version}.pom" />
- <copy file="${build.dir.hive}/pdk/pom.xml"
- tofile="${mvn.pom.dir}/hive-pdk-${version}.pom" />
- <copy file="${build.dir.hive}/builtins/pom.xml"
- tofile="${mvn.pom.dir}/hive-builtins-${version}.pom" />
+
+
<!-- copy over licence -->
<copy file="${hive.root}/LICENSE" todir="${mvn.license.dir}" />
@@ -1169,6 +1185,12 @@
<param name="hive.project" value="anttasks" />
</antcall>
<antcall target="maven-publish-artifact">
+ <param name="hive.project" value="beeline" />
+ </antcall>
+ <antcall target="maven-publish-artifact">
+ <param name="hive.project" value="builtins" />
+ </antcall>
+ <antcall target="maven-publish-artifact">
<param name="hive.project" value="cli" />
</antcall>
<antcall target="maven-publish-artifact">
@@ -1187,18 +1209,15 @@
<param name="hive.project" value="hwi" />
</antcall>
<antcall target="maven-publish-artifact">
- <param name="hive.project" value="pdk" />
- </antcall>
- <antcall target="maven-publish-artifact">
- <param name="hive.project" value="builtins" />
- </antcall>
- <antcall target="maven-publish-artifact">
<param name="hive.project" value="jdbc" />
</antcall>
<antcall target="maven-publish-artifact">
<param name="hive.project" value="metastore" />
</antcall>
<antcall target="maven-publish-artifact">
+ <param name="hive.project" value="pdk" />
+ </antcall>
+ <antcall target="maven-publish-artifact">
<param name="hive.project" value="serde" />
</antcall>
<antcall target="maven-publish-artifact">
@@ -1240,6 +1259,14 @@
output.file="${mvn.pom.dir}/hive-anttasks-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact
+ input.file="${mvn.jar.dir}/hive-beeline-${version}.jar"
+ output.file="${mvn.jar.dir}/hive-beeline-${version}.jar.asc"
+ gpg.passphrase="${gpg.passphrase}"/>
+ <sign-artifact
+ input.file="${mvn.pom.dir}/hive-beeline-${version}.pom"
+ output.file="${mvn.pom.dir}/hive-beeline-${version}.pom.asc"
+ gpg.passphrase="${gpg.passphrase}"/>
+ <sign-artifact
input.file="${mvn.jar.dir}/hive-cli-${version}.jar"
output.file="${mvn.jar.dir}/hive-cli-${version}.jar.asc"
gpg.passphrase="${gpg.passphrase}"/>
@@ -1264,6 +1291,14 @@
output.file="${mvn.pom.dir}/hive-contrib-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact
+ input.file="${mvn.jar.dir}/hive-exec-${version}.jar"
+ output.file="${mvn.jar.dir}/hive-exec-${version}.jar.asc"
+ gpg.passphrase="${gpg.passphrase}"/>
+ <sign-artifact
+ input.file="${mvn.pom.dir}/hive-exec-${version}.pom"
+ output.file="${mvn.pom.dir}/hive-exec-${version}.pom.asc"
+ gpg.passphrase="${gpg.passphrase}"/>
+ <sign-artifact
input.file="${mvn.jar.dir}/hive-hbase-handler-${version}.jar"
output.file="${mvn.jar.dir}/hive-hbase-handler-${version}.jar.asc"
gpg.passphrase="${gpg.passphrase}"/>
@@ -1296,14 +1331,6 @@
output.file="${mvn.pom.dir}/hive-metastore-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact
- input.file="${mvn.jar.dir}/hive-exec-${version}.jar"
- output.file="${mvn.jar.dir}/hive-exec-${version}.jar.asc"
- gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact
- input.file="${mvn.pom.dir}/hive-exec-${version}.pom"
- output.file="${mvn.pom.dir}/hive-exec-${version}.pom.asc"
- gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact
input.file="${mvn.jar.dir}/hive-pdk-${version}.jar"
output.file="${mvn.jar.dir}/hive-pdk-${version}.jar.asc"
gpg.passphrase="${gpg.passphrase}"/>
@@ -1312,14 +1339,6 @@
output.file="${mvn.pom.dir}/hive-pdk-${version}.pom.asc"
gpg.passphrase="${gpg.passphrase}"/>
<sign-artifact
- input.file="${mvn.jar.dir}/hive-builtins-${version}.jar"
- output.file="${mvn.jar.dir}/hive-builtins-${version}.jar.asc"
- gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact
- input.file="${mvn.pom.dir}/hive-builtins-${version}.pom"
- output.file="${mvn.pom.dir}/hive-builtins-${version}.pom.asc"
- gpg.passphrase="${gpg.passphrase}"/>
- <sign-artifact
input.file="${mvn.jar.dir}/hive-serde-${version}.jar"
output.file="${mvn.jar.dir}/hive-serde-${version}.jar.asc"
gpg.passphrase="${gpg.passphrase}"/>
View
5 cli/build.xml
@@ -40,6 +40,11 @@ to call at top-level: ant deploy-contrib compile-core-test
<compilerarg line="${javac.args} ${javac.args.warnings}" />
<classpath refid="classpath"/>
</javac>
+ <copy todir="${build.classes}" failonerror="false">
+ <fileset dir="${src.dir}">
+ <include name="**/*.properties"/>
+ </fileset>
+ </copy>
</target>
</project>
View
5 common/build.xml
@@ -29,6 +29,11 @@ to call at top-level: ant deploy-contrib compile-core-test
<target name="compile" depends="init, setup, ivy-retrieve">
<echo message="Project: ${ant.project.name}"/>
+ <exec executable="bash" failonerror="true">
+ <arg value="${basedir}/src/scripts/saveVersion.sh"/>
+ <arg value="${version}"/>
+ <arg value="${basedir}/src"/>
+ </exec>
<javac
encoding="${build.encoding}"
srcdir="${src.dir}"
View
7 common/src/gen/org/apache/hive/common/package-info.java
@@ -0,0 +1,7 @@
+/*
+ * Generated by saveVersion.sh
+ */
+@HiveVersionAnnotation(version="0.11.0-SNAPSHOT", revision="1455560", branch="trunk",
+ user="hortonas", date="Tue Mar 12 18:15:21 UTC 2013", url="https://svn.apache.org/repos/asf/hive/trunk",
+ srcChecksum="adaf44e8486f911a7ef879a83d8b2ea7")
+package org.apache.hive.common;
View
48 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -24,8 +24,10 @@
import java.io.InputStream;
import java.io.PrintStream;
import java.net.URL;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
@@ -57,6 +59,7 @@
private static byte[] confVarByteArray = null;
private static final Map<String, ConfVars> vars = new HashMap<String, ConfVars>();
+ private final List<String> restrictList = new ArrayList<String>();
static {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
@@ -146,6 +149,16 @@
};
/**
+ * The conf variables that depends on current user
+ */
+ public static final HiveConf.ConfVars[] userVars = {
+ HiveConf.ConfVars.SCRATCHDIR,
+ HiveConf.ConfVars.LOCALSCRATCHDIR,
+ HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR,
+ HiveConf.ConfVars.HIVEHISTORYFILELOC
+ };
+
+ /**
* ConfVars.
*
* These are the default configuration properties for Hive. Each HiveConf
@@ -688,6 +701,24 @@
HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null),
HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"),
+ HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5),
+ HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100),
+
+ HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000),
+ HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""),
+
+
+ // HiveServer2 auth configuration
+ HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE"),
+ HIVE_SERVER2_KERBEROS_KEYTAB("hive.server2.authentication.kerberos.keytab", ""),
+ HIVE_SERVER2_KERBEROS_PRINCIPAL("hive.server2.authentication.kerberos.principal", ""),
+ HIVE_SERVER2_PLAIN_LDAP_URL("hive.server2.authentication.ldap.url", null),
+ HIVE_SERVER2_PLAIN_LDAP_BASEDN("hive.server2.authentication.ldap.baseDN", null),
+ HIVE_SERVER2_KERBEROS_IMPERSONATION("hive.server2.enable.impersonation", false),
+ HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS("hive.server2.custom.authentication.class", null),
+
+ HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", null),
+
// If this is set all move tasks at the end of a multi-insert query will only begin once all
// outputs are ready
HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES(
@@ -870,6 +901,13 @@ private static synchronized InputStream getConfVarInputStream() {
return new LoopingByteArrayInputStream(confVarByteArray);
}
+ public void verifyAndSet(String name, String value) throws IllegalArgumentException {
+ if (restrictList.contains(name)) {
+ throw new IllegalArgumentException("Cann't modify " + name + " at runtime");
+ }
+ set(name, value);
+ }
+
public static int getIntVar(Configuration conf, ConfVars var) {
assert (var.valClass == Integer.class);
return conf.getInt(var.varname, var.defaultIntVal);
@@ -1057,8 +1095,18 @@ private void initialize(Class<?> cls) {
if (auxJars == null) {
auxJars = this.get(ConfVars.HIVEAUXJARS.varname);
}
+
+ // setup list of conf vars that are not allowed to change runtime
+ String restrictListStr = this.get(ConfVars.HIVE_CONF_RESTRICTED_LIST.toString());
+ if (restrictListStr != null) {
+ for (String entry : restrictListStr.split(",")) {
+ restrictList.add(entry);
+ }
+ }
+ restrictList.add(ConfVars.HIVE_CONF_RESTRICTED_LIST.toString());
}
+
/**
* Apply system properties to this object if the property name is defined in ConfVars
* and the value is non-null and not an empty string.
View
78 common/src/java/org/apache/hive/common/HiveVersionAnnotation.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+
+/**
+ * HiveVersionAnnotation.
+ *
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.PACKAGE)
+@InterfaceStability.Unstable
+public @interface HiveVersionAnnotation {
+
+ /**
+ * Get the Hive version
+ * @return the version string "0.6.3-dev"
+ */
+ String version();
+
+ /**
+ * Get the username that compiled Hive.
+ */
+ String user();
+
+ /**
+ * Get the date when Hive was compiled.
+ * @return the date in unix 'date' format
+ */
+ String date();
+
+ /**
+ * Get the url for the subversion repository.
+ */
+ String url();
+
+ /**
+ * Get the subversion revision.
+ * @return the revision number as a string (eg. "451451")
+ */
+ String revision();
+
+ /**
+ * Get the branch from which this was compiled.
+ * @return The branch name, e.g. "trunk" or "branches/branch-0.20"
+ */
+ String branch();
+
+ /**
+ * Get a checksum of the source files from which
+ * Hive was compiled.
+ * @return a string that uniquely identifies the source
+ **/
+ String srcChecksum();
+
+}
View
810 common/src/java/org/apache/hive/common/util/HiveStringUtils.java
@@ -0,0 +1,810 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.net.InetAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.UnknownHostException;
+import java.text.DateFormat;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+
+/**
+ * HiveStringUtils
+ * General string utils
+ *
+ * Originally copied from o.a.hadoop.util.StringUtils
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class HiveStringUtils {
+
+ /**
+ * Priority of the StringUtils shutdown hook.
+ */
+ public static final int SHUTDOWN_HOOK_PRIORITY = 0;
+
+ private static final DecimalFormat decimalFormat;
+ static {
+ NumberFormat numberFormat = NumberFormat.getNumberInstance(Locale.ENGLISH);
+ decimalFormat = (DecimalFormat) numberFormat;
+ decimalFormat.applyPattern("#.##");
+ }
+
+ /**
+ * Make a string representation of the exception.
+ * @param e The exception to stringify
+ * @return A string with exception name and call stack.
+ */
+ public static String stringifyException(Throwable e) {
+ StringWriter stm = new StringWriter();
+ PrintWriter wrt = new PrintWriter(stm);
+ e.printStackTrace(wrt);
+ wrt.close();
+ return stm.toString();
+ }
+
+ /**
+ * Given a full hostname, return the word upto the first dot.
+ * @param fullHostname the full hostname
+ * @return the hostname to the first dot
+ */
+ public static String simpleHostname(String fullHostname) {
+ int offset = fullHostname.indexOf('.');
+ if (offset != -1) {
+ return fullHostname.substring(0, offset);
+ }
+ return fullHostname;
+ }
+
+ private static DecimalFormat oneDecimal = new DecimalFormat("0.0");
+
+ /**
+ * Given an integer, return a string that is in an approximate, but human
+ * readable format.
+ * It uses the bases 'k', 'm', and 'g' for 1024, 1024**2, and 1024**3.
+ * @param number the number to format
+ * @return a human readable form of the integer
+ */
+ public static String humanReadableInt(long number) {
+ long absNumber = Math.abs(number);
+ double result = number;
+ String suffix = "";
+ if (absNumber < 1024) {
+ // since no division has occurred, don't format with a decimal point
+ return String.valueOf(number);
+ } else if (absNumber < 1024 * 1024) {
+ result = number / 1024.0;
+ suffix = "k";
+ } else if (absNumber < 1024 * 1024 * 1024) {
+ result = number / (1024.0 * 1024);
+ suffix = "m";
+ } else {
+ result = number / (1024.0 * 1024 * 1024);
+ suffix = "g";
+ }
+ return oneDecimal.format(result) + suffix;
+ }
+
+ /**
+ * Format a percentage for presentation to the user.
+ * @param done the percentage to format (0.0 to 1.0)
+ * @param digits the number of digits past the decimal point
+ * @return a string representation of the percentage
+ */
+ public static String formatPercent(double done, int digits) {
+ DecimalFormat percentFormat = new DecimalFormat("0.00%");
+ double scale = Math.pow(10.0, digits+2);
+ double rounded = Math.floor(done * scale);
+ percentFormat.setDecimalSeparatorAlwaysShown(false);
+ percentFormat.setMinimumFractionDigits(digits);
+ percentFormat.setMaximumFractionDigits(digits);
+ return percentFormat.format(rounded / scale);
+ }
+
+ /**
+ * Given an array of strings, return a comma-separated list of its elements.
+ * @param strs Array of strings
+ * @return Empty string if strs.length is 0, comma separated list of strings
+ * otherwise
+ */
+
+ public static String arrayToString(String[] strs) {
+ if (strs.length == 0) { return ""; }
+ StringBuilder sbuf = new StringBuilder();
+ sbuf.append(strs[0]);
+ for (int idx = 1; idx < strs.length; idx++) {
+ sbuf.append(",");
+ sbuf.append(strs[idx]);
+ }
+ return sbuf.toString();
+ }
+
+ /**
+ * Given an array of bytes it will convert the bytes to a hex string
+ * representation of the bytes
+ * @param bytes
+ * @param start start index, inclusively
+ * @param end end index, exclusively
+ * @return hex string representation of the byte array
+ */
+ public static String byteToHexString(byte[] bytes, int start, int end) {
+ if (bytes == null) {
+ throw new IllegalArgumentException("bytes == null");
+ }
+ StringBuilder s = new StringBuilder();
+ for(int i = start; i < end; i++) {
+ s.append(String.format("%02x", bytes[i]));
+ }
+ return s.toString();
+ }
+
+ /** Same as byteToHexString(bytes, 0, bytes.length). */
+ public static String byteToHexString(byte bytes[]) {
+ return byteToHexString(bytes, 0, bytes.length);
+ }
+
+ /**
+ * Given a hexstring this will return the byte array corresponding to the
+ * string
+ * @param hex the hex String array
+ * @return a byte array that is a hex string representation of the given
+ * string. The size of the byte array is therefore hex.length/2
+ */
+ public static byte[] hexStringToByte(String hex) {
+ byte[] bts = new byte[hex.length() / 2];
+ for (int i = 0; i < bts.length; i++) {
+ bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16);
+ }
+ return bts;
+ }
+ /**
+ *
+ * @param uris
+ */
+ public static String uriToString(URI[] uris){
+ if (uris == null) {
+ return null;
+ }
+ StringBuilder ret = new StringBuilder(uris[0].toString());
+ for(int i = 1; i < uris.length;i++){
+ ret.append(",");
+ ret.append(uris[i].toString());
+ }
+ return ret.toString();
+ }
+
+ /**
+ * @param str
+ * The string array to be parsed into an URI array.
+ * @return <tt>null</tt> if str is <tt>null</tt>, else the URI array
+ * equivalent to str.
+ * @throws IllegalArgumentException
+ * If any string in str violates RFC&nbsp;2396.
+ */
+ public static URI[] stringToURI(String[] str){
+ if (str == null) {
+ return null;
+ }
+ URI[] uris = new URI[str.length];
+ for (int i = 0; i < str.length;i++){
+ try{
+ uris[i] = new URI(str[i]);
+ }catch(URISyntaxException ur){
+ throw new IllegalArgumentException(
+ "Failed to create uri for " + str[i], ur);
+ }
+ }
+ return uris;
+ }
+
+ /**
+ *
+ * @param str
+ */
+ public static Path[] stringToPath(String[] str){
+ if (str == null) {
+ return null;
+ }
+ Path[] p = new Path[str.length];
+ for (int i = 0; i < str.length;i++){
+ p[i] = new Path(str[i]);
+ }
+ return p;
+ }
+ /**
+ *
+ * Given a finish and start time in long milliseconds, returns a
+ * String in the format Xhrs, Ymins, Z sec, for the time difference between two times.
+ * If finish time comes before start time then negative valeus of X, Y and Z wil return.
+ *
+ * @param finishTime finish time
+ * @param startTime start time
+ */
+ public static String formatTimeDiff(long finishTime, long startTime){
+ long timeDiff = finishTime - startTime;
+ return formatTime(timeDiff);
+ }
+
+ /**
+ *
+ * Given the time in long milliseconds, returns a
+ * String in the format Xhrs, Ymins, Z sec.
+ *
+ * @param timeDiff The time difference to format
+ */
+ public static String formatTime(long timeDiff){
+ StringBuilder buf = new StringBuilder();
+ long hours = timeDiff / (60*60*1000);
+ long rem = (timeDiff % (60*60*1000));
+ long minutes = rem / (60*1000);
+ rem = rem % (60*1000);
+ long seconds = rem / 1000;
+
+ if (hours != 0){
+ buf.append(hours);
+ buf.append("hrs, ");
+ }
+ if (minutes != 0){
+ buf.append(minutes);
+ buf.append("mins, ");
+ }
+ // return "0sec if no difference
+ buf.append(seconds);
+ buf.append("sec");
+ return buf.toString();
+ }
+ /**
+ * Formats time in ms and appends difference (finishTime - startTime)
+ * as returned by formatTimeDiff().
+ * If finish time is 0, empty string is returned, if start time is 0
+ * then difference is not appended to return value.
+ * @param dateFormat date format to use
+ * @param finishTime fnish time
+ * @param startTime start time
+ * @return formatted value.
+ */
+ public static String getFormattedTimeWithDiff(DateFormat dateFormat,
+ long finishTime, long startTime){
+ StringBuilder buf = new StringBuilder();
+ if (0 != finishTime) {
+ buf.append(dateFormat.format(new Date(finishTime)));
+ if (0 != startTime){
+ buf.append(" (" + formatTimeDiff(finishTime , startTime) + ")");
+ }
+ }
+ return buf.toString();
+ }
+
+ /**
+ * Returns an arraylist of strings.
+ * @param str the comma seperated string values
+ * @return the arraylist of the comma seperated string values
+ */
+ public static String[] getStrings(String str){
+ Collection<String> values = getStringCollection(str);
+ if(values.size() == 0) {
+ return null;
+ }
+ return values.toArray(new String[values.size()]);
+ }
+
+ /**
+ * Returns a collection of strings.
+ * @param str comma seperated string values
+ * @return an <code>ArrayList</code> of string values
+ */
+ public static Collection<String> getStringCollection(String str){
+ List<String> values = new ArrayList<String>();
+ if (str == null) {
+ return values;
+ }
+ StringTokenizer tokenizer = new StringTokenizer (str,",");
+ values = new ArrayList<String>();
+ while (tokenizer.hasMoreTokens()) {
+ values.add(tokenizer.nextToken());
+ }
+ return values;
+ }
+
+ /**
+ * Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value.
+ * @param str a comma separated <String> with values
+ * @return a <code>Collection</code> of <code>String</code> values
+ */
+ public static Collection<String> getTrimmedStringCollection(String str){
+ return new ArrayList<String>(
+ Arrays.asList(getTrimmedStrings(str)));
+ }
+
+ /**
+ * Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value.
+ * @param str a comma separated <String> with values
+ * @return an array of <code>String</code> values
+ */
+ public static String[] getTrimmedStrings(String str){
+ if (null == str || "".equals(str.trim())) {
+ return emptyStringArray;
+ }
+
+ return str.trim().split("\\s*,\\s*");
+ }
+
+ final public static String[] emptyStringArray = {};
+ final public static char COMMA = ',';
+ final public static String COMMA_STR = ",";
+ final public static char ESCAPE_CHAR = '\\';
+
+ /**
+ * Split a string using the default separator
+ * @param str a string that may have escaped separator
+ * @return an array of strings
+ */
+ public static String[] split(String str) {
+ return split(str, ESCAPE_CHAR, COMMA);
+ }
+
+ /**
+ * Split a string using the given separator
+ * @param str a string that may have escaped separator
+ * @param escapeChar a char that be used to escape the separator
+ * @param separator a separator char
+ * @return an array of strings
+ */
+ public static String[] split(
+ String str, char escapeChar, char separator) {
+ if (str==null) {
+ return null;
+ }
+ ArrayList<String> strList = new ArrayList<String>();
+ StringBuilder split = new StringBuilder();
+ int index = 0;
+ while ((index = findNext(str, separator, escapeChar, index, split)) >= 0) {
+ ++index; // move over the separator for next search
+ strList.add(split.toString());
+ split.setLength(0); // reset the buffer
+ }
+ strList.add(split.toString());
+ // remove trailing empty split(s)
+ int last = strList.size(); // last split
+ while (--last>=0 && "".equals(strList.get(last))) {
+ strList.remove(last);
+ }
+ return strList.toArray(new String[strList.size()]);
+ }
+
+ /**
+ * Split a string using the given separator, with no escaping performed.
+ * @param str a string to be split. Note that this may not be null.
+ * @param separator a separator char
+ * @return an array of strings
+ */
+ public static String[] split(
+ String str, char separator) {
+ // String.split returns a single empty result for splitting the empty
+ // string.
+ if ("".equals(str)) {
+ return new String[]{""};
+ }
+ ArrayList<String> strList = new ArrayList<String>();
+ int startIndex = 0;
+ int nextIndex = 0;
+ while ((nextIndex = str.indexOf((int)separator, startIndex)) != -1) {
+ strList.add(str.substring(startIndex, nextIndex));
+ startIndex = nextIndex + 1;
+ }
+ strList.add(str.substring(startIndex));
+ // remove trailing empty split(s)
+ int last = strList.size(); // last split
+ while (--last>=0 && "".equals(strList.get(last))) {
+ strList.remove(last);
+ }
+ return strList.toArray(new String[strList.size()]);
+ }
+
+ /**
+ * Finds the first occurrence of the separator character ignoring the escaped
+ * separators starting from the index. Note the substring between the index
+ * and the position of the separator is passed.
+ * @param str the source string
+ * @param separator the character to find
+ * @param escapeChar character used to escape
+ * @param start from where to search
+ * @param split used to pass back the extracted string
+ */
+ public static int findNext(String str, char separator, char escapeChar,
+ int start, StringBuilder split) {
+ int numPreEscapes = 0;
+ for (int i = start; i < str.length(); i++) {
+ char curChar = str.charAt(i);
+ if (numPreEscapes == 0 && curChar == separator) { // separator
+ return i;
+ } else {
+ split.append(curChar);
+ numPreEscapes = (curChar == escapeChar)
+ ? (++numPreEscapes) % 2
+ : 0;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Escape commas in the string using the default escape char
+ * @param str a string
+ * @return an escaped string
+ */
+ public static String escapeString(String str) {
+ return escapeString(str, ESCAPE_CHAR, COMMA);
+ }
+
+ /**
+ * Escape <code>charToEscape</code> in the string
+ * with the escape char <code>escapeChar</code>
+ *
+ * @param str string
+ * @param escapeChar escape char
+ * @param charToEscape the char to be escaped
+ * @return an escaped string
+ */
+ public static String escapeString(
+ String str, char escapeChar, char charToEscape) {
+ return escapeString(str, escapeChar, new char[] {charToEscape});
+ }
+
+ // check if the character array has the character
+ private static boolean hasChar(char[] chars, char character) {
+ for (char target : chars) {
+ if (character == target) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * @param charsToEscape array of characters to be escaped
+ */
+ public static String escapeString(String str, char escapeChar,
+ char[] charsToEscape) {
+ if (str == null) {
+ return null;
+ }
+ StringBuilder result = new StringBuilder();
+ for (int i=0; i<str.length(); i++) {
+ char curChar = str.charAt(i);
+ if (curChar == escapeChar || hasChar(charsToEscape, curChar)) {
+ // special char
+ result.append(escapeChar);
+ }
+ result.append(curChar);
+ }
+ return result.toString();
+ }
+
+ /**
+ * Unescape commas in the string using the default escape char
+ * @param str a string
+ * @return an unescaped string
+ */
+ public static String unEscapeString(String str) {
+ return unEscapeString(str, ESCAPE_CHAR, COMMA);
+ }
+
+ /**
+ * Unescape <code>charToEscape</code> in the string
+ * with the escape char <code>escapeChar</code>
+ *
+ * @param str string
+ * @param escapeChar escape char
+ * @param charToEscape the escaped char
+ * @return an unescaped string
+ */
+ public static String unEscapeString(
+ String str, char escapeChar, char charToEscape) {
+ return unEscapeString(str, escapeChar, new char[] {charToEscape});
+ }
+
+ /**
+ * @param charsToEscape array of characters to unescape
+ */
+ public static String unEscapeString(String str, char escapeChar,
+ char[] charsToEscape) {
+ if (str == null) {
+ return null;
+ }
+ StringBuilder result = new StringBuilder(str.length());
+ boolean hasPreEscape = false;
+ for (int i=0; i<str.length(); i++) {
+ char curChar = str.charAt(i);
+ if (hasPreEscape) {
+ if (curChar != escapeChar && !hasChar(charsToEscape, curChar)) {
+ // no special char
+ throw new IllegalArgumentException("Illegal escaped string " + str +
+ " unescaped " + escapeChar + " at " + (i-1));
+ }
+ // otherwise discard the escape char
+ result.append(curChar);
+ hasPreEscape = false;
+ } else {
+ if (hasChar(charsToEscape, curChar)) {
+ throw new IllegalArgumentException("Illegal escaped string " + str +
+ " unescaped " + curChar + " at " + i);
+ } else if (curChar == escapeChar) {
+ hasPreEscape = true;
+ } else {
+ result.append(curChar);
+ }
+ }
+ }
+ if (hasPreEscape ) {
+ throw new IllegalArgumentException("Illegal escaped string " + str +
+ ", not expecting " + escapeChar + " in the end." );
+ }
+ return result.toString();
+ }
+
+ /**
+ * Return a message for logging.
+ * @param prefix prefix keyword for the message
+ * @param msg content of the message
+ * @return a message for logging
+ */
+ private static String toStartupShutdownString(String prefix, String [] msg) {
+ StringBuilder b = new StringBuilder(prefix);
+ b.append("\n/************************************************************");
+ for(String s : msg) {
+ b.append("\n" + prefix + s);
+ }
+ b.append("\n************************************************************/");
+ return b.toString();
+ }
+
+ /**
+ * Print a log message for starting up and shutting down
+ * @param clazz the class of the server
+ * @param args arguments
+ * @param LOG the target log object
+ */
+ public static void startupShutdownMessage(Class<?> clazz, String[] args,
+ final org.apache.commons.logging.Log LOG) {
+ final String hostname = getHostname();
+ final String classname = clazz.getSimpleName();
+ LOG.info(
+ toStartupShutdownString("STARTUP_MSG: ", new String[] {
+ "Starting " + classname,
+ " host = " + hostname,
+ " args = " + Arrays.asList(args),
+ " version = " + HiveVersionInfo.getVersion(),
+ " classpath = " + System.getProperty("java.class.path"),
+ " build = " + HiveVersionInfo.getUrl() + " -r "
+ + HiveVersionInfo.getRevision()
+ + "; compiled by '" + HiveVersionInfo.getUser()
+ + "' on " + HiveVersionInfo.getDate()}
+ )
+ );
+
+ ShutdownHookManager.addShutdownHook(
+ new Runnable() {
+ @Override
+ public void run() {
+ LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{
+ "Shutting down " + classname + " at " + hostname}));
+ }
+ }, SHUTDOWN_HOOK_PRIORITY);
+
+ }
+
+ /**
+ * Return hostname without throwing exception.
+ * @return hostname
+ */
+ public static String getHostname() {
+ try {return "" + InetAddress.getLocalHost();}
+ catch(UnknownHostException uhe) {return "" + uhe;}
+ }
+
+
+ /**
+ * The traditional binary prefixes, kilo, mega, ..., exa,
+ * which can be represented by a 64-bit integer.
+ * TraditionalBinaryPrefix symbol are case insensitive.
+ */
+ public static enum TraditionalBinaryPrefix {
+ KILO(1024),
+ MEGA(KILO.value << 10),
+ GIGA(MEGA.value << 10),
+ TERA(GIGA.value << 10),
+ PETA(TERA.value << 10),
+ EXA(PETA.value << 10);
+
+ public final long value;
+ public final char symbol;
+
+ TraditionalBinaryPrefix(long value) {
+ this.value = value;
+ this.symbol = toString().charAt(0);
+ }
+
+ /**
+ * @return The TraditionalBinaryPrefix object corresponding to the symbol.
+ */
+ public static TraditionalBinaryPrefix valueOf(char symbol) {
+ symbol = Character.toUpperCase(symbol);
+ for(TraditionalBinaryPrefix prefix : TraditionalBinaryPrefix.values()) {
+ if (symbol == prefix.symbol) {
+ return prefix;
+ }
+ }
+ throw new IllegalArgumentException("Unknown symbol '" + symbol + "'");
+ }
+
+ /**
+ * Convert a string to long.
+ * The input string is first be trimmed
+ * and then it is parsed with traditional binary prefix.
+ *
+ * For example,
+ * "-1230k" will be converted to -1230 * 1024 = -1259520;
+ * "891g" will be converted to 891 * 1024^3 = 956703965184;
+ *
+ * @param s input string
+ * @return a long value represented by the input string.
+ */
+ public static long string2long(String s) {
+ s = s.trim();
+ final int lastpos = s.length() - 1;
+ final char lastchar = s.charAt(lastpos);
+ if (Character.isDigit(lastchar)) {
+ return Long.parseLong(s);
+ } else {
+ long prefix;
+ try {
+ prefix = TraditionalBinaryPrefix.valueOf(lastchar).value;
+ } catch (IllegalArgumentException e) {
+ throw new IllegalArgumentException("Invalid size prefix '" + lastchar
+ + "' in '" + s
+ + "'. Allowed prefixes are k, m, g, t, p, e(case insensitive)");
+ }
+ long num = Long.parseLong(s.substring(0, lastpos));
+ if (num > (Long.MAX_VALUE/prefix) || num < (Long.MIN_VALUE/prefix)) {
+ throw new IllegalArgumentException(s + " does not fit in a Long");
+ }
+ return num * prefix;
+ }
+ }
+ }
+
+ /**
+ * Escapes HTML Special characters present in the string.
+ * @param string
+ * @return HTML Escaped String representation
+ */
+ public static String escapeHTML(String string) {
+ if(string == null) {
+ return null;
+ }
+ StringBuilder sb = new StringBuilder();
+ boolean lastCharacterWasSpace = false;
+ char[] chars = string.toCharArray();
+ for(char c : chars) {
+ if(c == ' ') {
+ if(lastCharacterWasSpace){
+ lastCharacterWasSpace = false;
+ sb.append("&nbsp;");
+ }else {
+ lastCharacterWasSpace=true;
+ sb.append(" ");
+ }
+ }else {
+ lastCharacterWasSpace = false;
+ switch(c) {
+ case '<': sb.append("&lt;"); break;
+ case '>': sb.append("&gt;"); break;
+ case '&': sb.append("&amp;"); break;
+ case '"': sb.append("&quot;"); break;
+ default : sb.append(c);break;
+ }
+ }
+ }
+
+ return sb.toString();
+ }
+
+ /**
+ * Return an abbreviated English-language desc of the byte length
+ */
+ public static String byteDesc(long len) {
+ double val = 0.0;
+ String ending = "";
+ if (len < 1024 * 1024) {
+ val = (1.0 * len) / 1024;
+ ending = " KB";
+ } else if (len < 1024 * 1024 * 1024) {
+ val = (1.0 * len) / (1024 * 1024);
+ ending = " MB";
+ } else if (len < 1024L * 1024 * 1024 * 1024) {
+ val = (1.0 * len) / (1024 * 1024 * 1024);
+ ending = " GB";
+ } else if (len < 1024L * 1024 * 1024 * 1024 * 1024) {
+ val = (1.0 * len) / (1024L * 1024 * 1024 * 1024);
+ ending = " TB";
+ } else {
+ val = (1.0 * len) / (1024L * 1024 * 1024 * 1024 * 1024);
+ ending = " PB";
+ }
+ return limitDecimalTo2(val) + ending;
+ }
+
+ public static synchronized String limitDecimalTo2(double d) {
+ return decimalFormat.format(d);
+ }
+
+ /**
+ * Concatenates strings, using a separator.
+ *
+ * @param separator Separator to join with.
+ * @param strings Strings to join.
+ */
+ public static String join(CharSequence separator, Iterable<?> strings) {
+ Iterator<?> i = strings.iterator();
+ if (!i.hasNext()) {
+ return "";
+ }
+ StringBuilder sb = new StringBuilder(i.next().toString());
+ while (i.hasNext()) {
+ sb.append(separator);
+ sb.append(i.next().toString());
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Convert SOME_STUFF to SomeStuff
+ *
+ * @param s input string
+ * @return camelized string
+ */
+ public static String camelize(String s) {
+ StringBuilder sb = new StringBuilder();
+ String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
+
+ for (String word : words) {
+ sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
+ }
+
+ return sb.toString();
+ }
+
+}
View
126 common/src/java/org/apache/hive/common/util/HiveVersionInfo.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+import org.apache.hive.common.HiveVersionAnnotation;
+
+/**
+ * HiveVersionInfo.
+ *
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class HiveVersionInfo {
+ private static final Log LOG = LogFactory.getLog(HiveVersionInfo.class);
+
+ private static Package myPackage;
+ private static HiveVersionAnnotation version;
+
+ static {
+ myPackage = HiveVersionAnnotation.class.getPackage();
+ version = myPackage.getAnnotation(HiveVersionAnnotation.class);
+ }
+
+ /**
+ * Get the meta-data for the Hive package.
+ * @return
+ */
+ static Package getPackage() {
+ return myPackage;
+ }
+
+ /**
+ * Get the Hive version.
+ * @return the Hive version string, eg. "0.6.3-dev"
+ */
+ public static String getVersion() {
+ return version != null ? version.version() : "Unknown";
+ }
+
+ /**
+ * Get the subversion revision number for the root directory
+ * @return the revision number, eg. "451451"
+ */
+ public static String getRevision() {
+ return version != null ? version.revision() : "Unknown";
+ }
+
+ /**
+ * Get the branch on which this originated.
+ * @return The branch name, e.g. "trunk" or "branches/branch-0.20"
+ */
+ public static String getBranch() {
+ return version != null ? version.branch() : "Unknown";
+ }
+
+ /**
+ * The date that Hive was compiled.
+ * @return the compilation date in unix date format
+ */
+ public static String getDate() {
+ return version != null ? version.date() : "Unknown";
+ }
+
+ /**
+ * The user that compiled Hive.
+ * @return the username of the user
+ */
+ public static String getUser() {
+ return version != null ? version.user() : "Unknown";
+ }
+
+ /**
+ * Get the subversion URL for the root Hive directory.
+ */
+ public static String getUrl() {
+ return version != null ? version.url() : "Unknown";
+ }
+
+ /**
+ * Get the checksum of the source files from which Hive was