Permalink
Browse files

HIVE-2935 : Implement HiveServer2 Core code changes (4th patch of 4) …

…(Carl Steinbach and others via Ashutosh Chauhan)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1455659 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
1 parent fa8dec3 commit 6b27df548a0bd3f7f444dc79f74aefef3cb5a194 @ashutoshc ashutoshc committed Mar 12, 2013
Showing with 22,019 additions and 624 deletions.
  1. +32 −9 ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
  2. +2 −3 bin/ext/beeline.sh
  3. +33 −0 bin/ext/hiveserver2.sh
  4. +21 −0 bin/hiveserver2
  5. +38 −3 build-common.xml
  6. +13 −4 build.properties
  7. +134 −115 build.xml
  8. +5 −0 cli/build.xml
  9. +5 −0 common/build.xml
  10. +7 −0 common/src/gen/org/apache/hive/common/package-info.java
  11. +48 −0 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
  12. +78 −0 common/src/java/org/apache/hive/common/HiveVersionAnnotation.java
  13. +810 −0 common/src/java/org/apache/hive/common/util/HiveStringUtils.java
  14. +126 −0 common/src/java/org/apache/hive/common/util/HiveVersionInfo.java
  15. +205 −0 common/src/java/org/apache/hive/common/util/ShutdownHookManager.java
  16. +74 −0 common/src/scripts/saveVersion.sh
  17. +66 −0 common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java
  18. +90 −4 conf/hive-default.xml.template
  19. +6 −3 contrib/build.xml
  20. +25 −0 data/files/types/primitives/090101.txt
  21. +25 −0 data/files/types/primitives/090201.txt
  22. +25 −0 data/files/types/primitives/090301.txt
  23. +25 −0 data/files/types/primitives/090401.txt
  24. +10 −0 data/scripts/q_test_cleanup.sql
  25. +132 −0 data/scripts/q_test_init.sql
  26. +51 −0 eclipse-templates/BeeLine.launchtemplate
  27. +3 −3 eclipse-templates/{HiveBeeLine.launchtemplate → HiveServer2.launchtemplate}
  28. +43 −0 eclipse-templates/TestBeeLineDriver.launchtemplate
  29. +43 −0 eclipse-templates/TestEmbeddedThriftCLIService.launchtemplate
  30. +43 −0 eclipse-templates/TestHiveServer.launchtemplate
  31. +44 −0 eclipse-templates/TestJdbc2.launchtemplate
  32. +43 −0 eclipse-templates/TestRemoteThriftCLIService.launchtemplate
  33. +6 −3 hbase-handler/build.xml
  34. +1 −1 ivy/ivysettings.xml
  35. +4 −2 ivy/libraries.properties
  36. +1,106 −0 jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
  37. +2,442 −0 jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java
  38. +743 −0 jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
  39. +131 −0 jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java
  40. +1,097 −0 jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
  41. +297 −0 jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
  42. +53 −0 jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java
  43. +1,280 −0 jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
  44. +277 −0 jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
  45. +195 −0 jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
  46. +566 −0 jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
  47. +182 −0 jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
  48. +59 −0 jdbc/src/java/org/apache/hive/jdbc/JdbcTable.java
  49. +252 −0 jdbc/src/java/org/apache/hive/jdbc/Utils.java
  50. +0 −112 jdbc/src/java/org/apache/hive/jdbc/beeline/HiveBeeline.java
  51. +0 −266 jdbc/src/java/org/apache/hive/jdbc/beeline/OptionsProcessor.java
  52. +1,255 −0 jdbc/src/test/org/apache/hive/jdbc/TestJdbcDriver2.java
  53. +0 −9 metastore/build.xml
  54. +31 −17 ql/build.xml
  55. +2 −0 ql/ivy.xml
  56. +6 −1 ql/src/java/org/apache/hadoop/hive/ql/Driver.java
  57. +2 −3 ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
  58. +24 −0 ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
  59. +18 −10 ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
  60. +8 −3 ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
  61. +0 −21 serde/build.xml
  62. +0 −9 service/build.xml
  63. +997 −0 service/if/TCLIService.thrift
  64. +184 −0 service/src/java/org/apache/hive/service/AbstractService.java
  65. +121 −0 service/src/java/org/apache/hive/service/BreakableService.java
  66. +133 −0 service/src/java/org/apache/hive/service/CompositeService.java
  67. +83 −0 service/src/java/org/apache/hive/service/FilterService.java
  68. +122 −0 service/src/java/org/apache/hive/service/Service.java
  69. +38 −0 service/src/java/org/apache/hive/service/ServiceException.java
  70. +141 −0 service/src/java/org/apache/hive/service/ServiceOperations.java
  71. +46 −0 service/src/java/org/apache/hive/service/ServiceStateChangeListener.java
  72. +31 −0 service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
  73. +64 −0 service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
  74. +48 −0 service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
  75. +129 −0 service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
  76. +77 −0 service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
  77. +70 −0 service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
  78. +37 −0 service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
  79. +141 −0 service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
  80. +189 −0 service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
  81. +78 −0 service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
  82. +65 −0 service/src/java/org/apache/hive/service/auth/TUGIContainingProcessor.java
  83. +328 −0 service/src/java/org/apache/hive/service/cli/CLIService.java
  84. +152 −0 service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
  85. +69 −0 service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
  86. +97 −0 service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
  87. +168 −0 service/src/java/org/apache/hive/service/cli/ColumnValue.java
  88. +179 −0 service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
  89. +54 −0 service/src/java/org/apache/hive/service/cli/FetchOrientation.java
  90. +96 −0 service/src/java/org/apache/hive/service/cli/GetInfoType.java
  91. +82 −0 service/src/java/org/apache/hive/service/cli/GetInfoValue.java
  92. +78 −0 service/src/java/org/apache/hive/service/cli/Handle.java
  93. +113 −0 service/src/java/org/apache/hive/service/cli/HandleIdentifier.java
  94. +124 −0 service/src/java/org/apache/hive/service/cli/HiveSQLException.java
  95. +90 −0 service/src/java/org/apache/hive/service/cli/ICLIService.java
  96. +95 −0 service/src/java/org/apache/hive/service/cli/OperationHandle.java
  97. +92 −0 service/src/java/org/apache/hive/service/cli/OperationState.java
  98. +58 −0 service/src/java/org/apache/hive/service/cli/OperationType.java
  99. +47 −0 service/src/java/org/apache/hive/service/cli/PatternOrIdentifier.java
  100. +78 −0 service/src/java/org/apache/hive/service/cli/Row.java
  101. +124 −0 service/src/java/org/apache/hive/service/cli/RowSet.java
  102. +58 −0 service/src/java/org/apache/hive/service/cli/SessionHandle.java
  103. +94 −0 service/src/java/org/apache/hive/service/cli/TableSchema.java
  104. +391 −0 service/src/java/org/apache/hive/service/cli/Type.java
  105. +71 −0 service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
  106. +38 −0 service/src/java/org/apache/hive/service/cli/operation/AddResourceOperation.java
  107. +38 −0 service/src/java/org/apache/hive/service/cli/operation/DeleteResourceOperation.java
  108. +38 −0 service/src/java/org/apache/hive/service/cli/operation/DfsOperation.java
  109. +59 −0 service/src/java/org/apache/hive/service/cli/operation/ExecuteStatementOperation.java
  110. +68 −0 service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
  111. +198 −0 service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
  112. +120 −0 service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
  113. +89 −0 service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
  114. +81 −0 service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
  115. +115 −0 service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
  116. +135 −0 service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
  117. +202 −0 service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
  118. +99 −0 service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
  119. +127 −0 service/src/java/org/apache/hive/service/cli/operation/Operation.java
  120. +172 −0 service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
  121. +256 −0 service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
  122. +38 −0 service/src/java/org/apache/hive/service/cli/operation/SetOperation.java
  123. +167 −0 service/src/java/org/apache/hive/service/cli/session/HiveSession.java
  124. +344 −0 service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
  125. +167 −0 service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
  126. +87 −0 service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java
  127. +153 −0 service/src/java/org/apache/hive/service/cli/session/SessionManager.java
  128. +37 −0 service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftCLIService.java
  129. +423 −0 service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
  130. +373 −0 service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
  131. +94 −0 service/src/java/org/apache/hive/service/server/HiveServer2.java
  132. +116 −0 service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
  133. +60 −0 service/src/test/org/apache/hive/service/cli/TestEmbeddedThriftCLIService.java
  134. +103 −0 service/src/test/org/apache/hive/service/server/TestHiveServer2Concurrency.java
  135. +23 −2 shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
  136. +32 −5 shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
  137. +54 −14 shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
  138. +32 −2 shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
  139. +7 −0 shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -108,6 +108,8 @@ public boolean accept(File filePath) {
}
private List<String> templatePaths = new ArrayList<String>();
+
+ private String hiveRootDirectory;
private String outputDirectory;
@@ -185,6 +187,18 @@ public String getTemplate() {
return template;
}
+ public void setHiveRootDirectory(File hiveRootDirectory) {
+ try {
+ this.hiveRootDirectory = hiveRootDirectory.getCanonicalPath();
+ } catch (IOException ioe) {
+ throw new BuildException(ioe);
+ }
+ }
+
+ public String getHiveRootDirectory() {
+ return hiveRootDirectory;
+ }
+
public void setTemplatePath(String templatePath) throws Exception {
templatePaths.clear();
for (String relativePath : templatePath.split(",")) {
@@ -302,14 +316,15 @@ public void execute() throws BuildException {
List<File> qFiles = new ArrayList<File>();
HashMap<String, String> qFilesMap = new HashMap<String, String>();
+ File hiveRootDir = null;
+ File queryDir = null;
File outDir = null;
File resultsDir = null;
File logDir = null;
try {
- File inpDir = null;
if (queryDirectory != null) {
- inpDir = new File(queryDirectory);
+ queryDir = new File(queryDirectory);
}
if (queryFile != null && !queryFile.equals("")) {
@@ -318,31 +333,37 @@ public void execute() throws BuildException {
if (includeOnly != null && !includeOnly.contains(qFile)) {
continue;
}
- if (null != inpDir) {
- qFiles.add(new File(inpDir, qFile));
+ if (null != queryDir) {
+ qFiles.add(new File(queryDir, qFile));
} else {
qFiles.add(new File(qFile));
}
}
} else if (queryFileRegex != null && !queryFileRegex.equals("")) {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(
new QFileRegexFilter(queryFileRegex, includeOnly))));
} else if (runDisabled != null && runDisabled.equals("true")) {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter(includeOnly))));
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(new DisabledQFileFilter(includeOnly))));
} else {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileFilter(includeOnly))));
+ qFiles.addAll(Arrays.asList(queryDir.listFiles(new QFileFilter(includeOnly))));
}
if (excludeQueryFile != null && !excludeQueryFile.equals("")) {
// Exclude specified query files, comma separated
for (String qFile : excludeQueryFile.split(",")) {
- if (null != inpDir) {
- qFiles.remove(new File(inpDir, qFile));
+ if (null != queryDir) {
+ qFiles.remove(new File(queryDir, qFile));
} else {
qFiles.remove(new File(qFile));
}
}
}
+
+ hiveRootDir = new File(hiveRootDirectory);
+ if (!hiveRootDir.exists()) {
+ throw new BuildException("Hive Root Directory "
+ + hiveRootDir.getCanonicalPath() + " does not exist");
+ }
Collections.sort(qFiles);
for (File qFile : qFiles) {
@@ -397,6 +418,8 @@ public void execute() throws BuildException {
// For each of the qFiles generate the test
VelocityContext ctx = new VelocityContext();
ctx.put("className", className);
+ ctx.put("hiveRootDir", getEscapedCanonicalPath(hiveRootDir));
+ ctx.put("queryDir", getEscapedCanonicalPath(queryDir));
ctx.put("qfiles", qFiles);
ctx.put("qfilesMap", qFilesMap);
ctx.put("resultsDir", getEscapedCanonicalPath(resultsDir));
View
@@ -18,13 +18,12 @@ THISSERVICE=beeline
export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
beeline () {
-
- CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+ CLASS=org.apache.hive.beeline.BeeLine;
execHiveCmd $CLASS "$@"
}
beeline_help () {
- CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+ CLASS=org.apache.hive.beeline.BeeLine;
execHiveCmd $CLASS "--help"
}
View
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hiveserver2
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hiveserver2() {
+ echo "Starting HiveServer2"
+ CLASS=org.apache.hive.service.server.HiveServer2
+ if $cygwin; then
+ HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+ fi
+ JAR=${HIVE_LIB}/hive-service-*.jar
+
+ exec $HADOOP jar $JAR $CLASS "$@"
+}
+
+hiveserver2_help() {
+ hiveserver2 -h
+}
+
View
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hive --service hiveserver2 "$@"
View
@@ -38,7 +38,9 @@
<property name="build.classes" location="${build.dir}/classes"/>
<property name="build.encoding" value="ISO-8859-1"/>
+ <!-- Thrift codegen properties -->
<property name="thrift.args" value="-I ${thrift.home} --gen java:beans,hashcode --gen cpp --gen php --gen py --gen rb"/>
+ <property name="thrift.gen.dir" value="${basedir}/src/gen/thrift"/>
<property name="hadoop.conf.dir" location="${hadoop.root}/conf"/>
@@ -77,6 +79,7 @@
<pathelement location="" />
<pathelement location="${test.src.data.dir}/conf"/>
<pathelement location="${hive.conf.dir}"/>
+ <pathelement location="${build.dir.hive}/beeline/test/classes"/>
<pathelement location="${build.dir.hive}/cli/test/classes"/>
<pathelement location="${build.dir.hive}/common/test/classes"/>
<pathelement location="${build.dir.hive}/hbase-handler/test/classes"/>
@@ -97,6 +100,7 @@
<!-- test directory may contain hadoop jars used by tests only (e.g. mini cluster) -->
<fileset dir="${hive.root}/build/ivy/lib/test" includes="*.jar" erroronmissingdir="false"
excludes="**/hive_*.jar,**/hive-*.jar"/>
+ <fileset dir="${hive.root}/build/ivy/lib/test" includes="hive-testutils*.jar" Erroronmissingdir="false"/>
<!-- we strip out hadoop jars present in places other than the hadoop shimmed dir-->
<fileset dir="${hive.root}/build/ivy/lib/default" includes="*.jar" erroronmissingdir="false"
@@ -181,6 +185,7 @@
<pathelement location="${build.dir.hive}/classes"/>
<fileset dir="${build.dir.hive}" includes="*/*.jar"/>
<fileset dir="${hive.root}/lib" includes="*.jar"/>
+ <fileset dir="${build.ivy.lib.dir}/default" includes="junit*.jar" />
<fileset dir="${build.ivy.lib.dir}/hadoop0.${hadoop.mr.rev}.shim" includes="*.jar" erroronmissingdir="false" />
<fileset dir="${build.ivy.lib.dir}/default" includes="*.jar"
excludes="**/hadoop-*.jar"
@@ -193,6 +198,7 @@
<pathelement location="${build.dir.hive}/serde/classes"/>
<pathelement location="${build.dir.hive}/metastore/classes"/>
<pathelement location="${build.dir.hive}/ql/classes"/>
+ <pathelement location="${build.dir.hive}/beeline/classes"/>
<pathelement location="${build.dir.hive}/cli/classes"/>
<pathelement location="${build.dir.hive}/shims/classes"/>
<pathelement location="${build.dir.hive}/hwi/classes"/>
@@ -226,6 +232,7 @@
<echo message="Project: ${ant.project.name}"/>
<mkdir dir="${test.data.dir}"/>
<mkdir dir="${test.log.dir}/clientpositive"/>
+ <mkdir dir="${test.log.dir}/beelinepositive"/>
<mkdir dir="${test.log.dir}/clientnegative"/>
<mkdir dir="${test.log.dir}/positive"/>
<mkdir dir="${test.log.dir}/negative"/>
@@ -280,7 +287,7 @@
<javac
encoding="${build.encoding}"
srcdir="${test.src.dir}"
- includes="org/apache/hadoop/**/*.java"
+ includes="org/apache/**/hive/**/*.java"
excludes="**/TestSerDe.java"
destdir="${test.build.classes}"
debug="${javac.debug}"
@@ -295,7 +302,7 @@
<javac
encoding="${build.encoding}"
srcdir="${test.build.src}"
- includes="org/apache/hadoop/**/*.java"
+ includes="org/apache/**/hive/**/*.java"
destdir="${test.build.classes}"
debug="${javac.debug}"
optimize="${javac.optimize}"
@@ -352,6 +359,12 @@
</not>
</condition>
+ <condition property="disableserver" value="false">
+ <not>
+ <isset property="disableserver"/>
+ </not>
+ </condition>
+
<condition property="clustermode" value="">
<not>
<isset property="clustermode"/>
@@ -413,15 +426,18 @@
</then>
</if>
<junit showoutput="${test.output}" printsummary="yes" haltonfailure="no"
- fork="yes" maxmemory="512m" dir="${basedir}" timeout="${test.junit.timeout}"
+ fork="yes" maxmemory="${test.junit.maxmemory}" dir="${basedir}" timeout="${test.junit.timeout}"
errorProperty="tests.failed" failureProperty="tests.failed" filtertrace="off">
+ <jvmarg value="-XX:+HeapDumpOnOutOfMemoryError"/>
+ <jvmarg value="-XX:HeapDumpPath=${hive.root}"/>
<env key="LANG" value="${test.lang}"/>
<env key="HIVE_HADOOP_TEST_CLASSPATH" value="${hadoop.testcp}"/>
<env key="HADOOP_HOME" value="${hadoop.root}"/>
<env key="HADOOP_CLASSPATH" path="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/JavaEWAH-${javaewah.version}.jar:${hadoop.root}/modules/*"/> <!-- Modules needed for Hadoop 0.23 -->
<env key="TZ" value="US/Pacific"/>
<sysproperty key="test.output.overwrite" value="${overwrite}"/>
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
+ <sysproperty key="test.service.disable.server" value="${disableserver}"/>
<sysproperty key="log4j.configuration" value="file:///${test.src.data.dir}/conf/hive-log4j.properties"/>
<sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
<sysproperty key="hive.aux.jars.path" value="file:///${test.build.dir}/test-udfs.jar"/>
@@ -440,9 +456,11 @@
<sysproperty key="build.dir.hive" value="${build.dir.hive}"/>
<sysproperty key="build.ivy.lib.dir" value="${build.ivy.lib.dir}"/>
<sysproperty key="derby.version" value="${derby.version}"/>
+ <sysproperty key="hive.root" value="${hive.root}"/>
<sysproperty key="hive.version" value="${version}"/>
<sysproperty key="java.net.preferIPv4Stack" value="${java.net.preferIPv4Stack}"/>
<sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}${junit.script.extension}"/>
+ <sysproperty key="test.concurrency.num.threads" value="${test.concurrency.num.threads}"/>
<jvmarg line="${jvm.args}"/>
<classpath refid="test.local.classpath"/>
@@ -486,6 +504,23 @@
</condition>
</target>
+ <target name="thriftif" depends="check-thrift-home">
+ <echo message="Project: ${ant.project.name}"/>
+ <delete dir="${thrift.gen.dir}"/>
+ <mkdir dir="${thrift.gen.dir}"/>
+ <for param="thrift.file">
+ <path>
+ <fileset dir="." includes="if/*.thrift,if/test/*.thrift" />
+ </path>
+ <sequential>
+ <echo message="Generating Thrift code for @{thrift.file}"/>
+ <exec executable="${thrift.home}/bin/thrift" failonerror="true" dir=".">
+ <arg line="${thrift.args} -I ${basedir}/include -I ${basedir}/.. -o ${thrift.gen.dir} @{thrift.file} " />
+ </exec>
+ </sequential>
+ </for>
+ </target>
+
<target name="check-ivy" depends="ivy-init-settings">
<echo message="Project: ${ant.project.name}"/>
<available file="${basedir}/ivy.xml" property="ivy.present"/>
View
@@ -72,8 +72,8 @@ jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar
common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
# module names needed for build process
-iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,hwi,hbase-handler,pdk,builtins
-iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,hwi,hbase-handler,pdk,builtins
+iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
+iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,metastore,odbc,serde,service
iterate.hive.thrift=ql,service,metastore,serde
iterate.hive.protobuf=ql
@@ -92,7 +92,16 @@ test.junit.timeout=43200000
# Use this property to selectively disable tests from the command line:
# ant test -Dtest.junit.exclude="**/TestCliDriver.class"
# ant test -Dtest.junit.exclude="**/Test*CliDriver.class,**/TestPartitions.class"
-test.junit.exclude=
+test.junit.exclude="**/TestBeeLineDriver.class, **/TestHiveServer2Concurrency.class"
+test.continue.on.failure=false
+
+test.submodule.exclude=
+test.junit.maxmemory=512m
+
+test.concurrency.num.threads=1
+#test.beelinepositive.exclude=add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q
+
+
#
# Ivy Properties
@@ -108,7 +117,7 @@ ivy.changingPattern=.*SNAPSHOT
ivy.publish.pattern=[artifact]-[revision].[ext]
ivy.artifact.retrieve.pattern=[conf]/[artifact]-[revision](-[classifier]).[ext]
ivysettings.xml=${ivy.conf.dir}/ivysettings.xml
-ivyresolvelog=download-only
+ivyresolvelog=default
ivy.mvn.repo=http://repo2.maven.org/maven2
ivy_repo_url=${ivy.mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar
hive.ivy.org=org.apache.hive
Oops, something went wrong.

0 comments on commit 6b27df5

Please sign in to comment.