Permalink
Browse files

HIVE-3126 : Generate & build the velocity based Hive tests on windows…

… by fixing the path issues (Kanna Karanam via Ashutosh Chauhan)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1365467 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
1 parent d0c18c7 commit 39fbb41e3e96858391646c0e20897e848616e8e2 @ashutoshc ashutoshc committed Jul 25, 2012
@@ -29,6 +29,7 @@
import java.util.List;
import java.util.ArrayList;
import java.util.regex.Pattern;
+import java.util.HashMap;
import org.apache.commons.lang.StringUtils;
import org.apache.tools.ant.AntClassLoader;
@@ -259,6 +260,7 @@ public void execute() throws BuildException {
}
List<File> qFiles = new ArrayList<File>();
+ HashMap<String, String> qFilesMap = new HashMap<String, String>();
File outDir = null;
File resultsDir = null;
File logDir = null;
@@ -281,7 +283,7 @@ public void execute() throws BuildException {
} else if (queryFileRegex != null && !queryFileRegex.equals("")) {
qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileRegexFilter(queryFileRegex))));
} else if (runDisabled != null && runDisabled.equals("true")) {
- qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter())));
+ qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter())));
} else {
qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileFilter())));
}
@@ -298,6 +300,9 @@ public void execute() throws BuildException {
}
Collections.sort(qFiles);
+ for (File qFile : qFiles) {
+ qFilesMap.put(qFile.getName(), getEscapedCanonicalPath(qFile));
+ }
// Make sure the output directory exists, if it doesn't
// then create it.
@@ -348,8 +353,9 @@ public void execute() throws BuildException {
VelocityContext ctx = new VelocityContext();
ctx.put("className", className);
ctx.put("qfiles", qFiles);
- ctx.put("resultsDir", resultsDir);
- ctx.put("logDir", logDir);
+ ctx.put("qfilesMap", qFilesMap);
+ ctx.put("resultsDir", getEscapedCanonicalPath(resultsDir));
+ ctx.put("logDir", getEscapedCanonicalPath(logDir));
ctx.put("clusterMode", clusterMode);
ctx.put("hadoopVersion", hadoopVersion);
@@ -373,4 +379,17 @@ public void execute() throws BuildException {
throw new BuildException("Generation failed", e);
}
}
+
+ private static String getEscapedCanonicalPath(File file) throws IOException {
+ if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
+ // Escape the backward slash in CanonicalPath if the unit test runs on windows
+ // e.g. dir.getCanonicalPath() gets the absolute path of local
+ // directory. When we embed it directly in the generated java class it results
+ // in compiler error in windows. Reason : the canonical path contains backward
+ // slashes "C:\temp\etc\" and it is not a valid string in Java
+ // unless we escape the backward slashes.
+ return file.getCanonicalPath().replace("\\", "\\\\");
+ }
+ return file.getCanonicalPath();
+ }
}
View
@@ -109,6 +109,8 @@
<loadproperties srcfile="${ivy.conf.dir}/libraries.properties"/>
+ <osfamily property="os.family"/>
+
<condition property="offline">
<istrue value="${is-offline}"/>
</condition>
@@ -366,7 +368,7 @@
<!-- target to run the tests -->
<target name="test"
- depends="test-conditions,gen-test,compile-test,test-jar,test-init">
+ depends="test-conditions,gen-test,compile-test,test-jar,test-init">
<echo message="Project: ${ant.project.name}"/>
<property name="hadoop.testcp" refid="test.classpath"/>
<if>
@@ -378,6 +380,18 @@
<property name="hadoop.opts" value="${hadoop.opts.20}" />
</else>
</if>
+ <!-- Set the OS specific settings to run junit tests on unix as well as on windows -->
+ <if>
+ <equals arg1="windows" arg2="${os.family}"/>
+ <then>
+ <property name="junit.script.extension" value=".cmd"/>
+ <property name="junit.file.schema" value=""/>
+ </then>
+ <else>
+ <property name="junit.script.extension" value=""/>
+ <property name="junit.file.schema" value="file://"/>
+ </else>
+ </if>
<if>
<equals arg1="${test.print.classpath}" arg2="true" />
<then>
@@ -390,13 +404,13 @@
<env key="LANG" value="${test.lang}"/>
<env key="HIVE_HADOOP_TEST_CLASSPATH" value="${hadoop.testcp}"/>
<env key="HADOOP_HOME" value="${hadoop.root}"/>
- <env key="HADOOP_CLASSPATH" value="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/JavaEWAH-${javaewah.version}.jar:${hadoop.root}/modules/*"/> <!-- Modules needed for Hadoop 0.23 -->
+ <env key="HADOOP_CLASSPATH" path="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/JavaEWAH-${javaewah.version}.jar:${hadoop.root}/modules/*"/> <!-- Modules needed for Hadoop 0.23 -->
<env key="TZ" value="US/Pacific"/>
<sysproperty key="test.output.overwrite" value="${overwrite}"/>
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
- <sysproperty key="log4j.configuration" value="file://${test.src.data.dir}/conf/hive-log4j.properties"/>
+ <sysproperty key="log4j.configuration" value="${junit.file.schema}${test.src.data.dir}/conf/hive-log4j.properties"/>
<sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
- <sysproperty key="hive.aux.jars.path" value="file://${test.build.dir}/test-udfs.jar"/>
+ <sysproperty key="hive.aux.jars.path" value="${junit.file.schema}${test.build.dir}/test-udfs.jar"/>
<sysproperty key="ql.test.query.clientpositive.dir" value="${ql.test.query.clientpositive.dir}"/>
<sysproperty key="ql.test.results.clientpositive.dir" value="${ql.test.results.clientpositive.dir}"/>
<sysproperty key="test.log.dir" value="${test.log.dir}"/>
@@ -413,7 +427,7 @@
<sysproperty key="build.ivy.lib.dir" value="${build.ivy.lib.dir}"/>
<sysproperty key="derby.version" value="${derby.version}"/>
<sysproperty key="hive.version" value="${version}"/>
- <sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}"/>
+ <sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}${junit.script.extension}"/>
<classpath refid="test.local.classpath"/>
<formatter type="${test.junit.output.format}" usefile="${test.junit.output.usefile}" />
View
@@ -532,9 +532,6 @@
<fileset dir="${target.bin.dir}"/>
</chmod>
- <!-- create symlinks for libthrift.jar, libfb303.jar, etc. for backward compatibility -->
- <symlink overwrite="true" link="${target.lib.dir}/libthrift.jar" resource="libthrift-${libthrift.version}.jar"/>
- <symlink overwrite="true" link="${target.lib.dir}/libfb303.jar" resource="libfb303-${libfb303.version}.jar"/>
<symlink overwrite="true" link="${target.lib.dir}/hive_contrib.jar" resource="hive-contrib-${version}.jar"/>
<!-- special case because builtins compilation depends on packaging
up everything else first -->
@@ -40,6 +40,7 @@
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Shell;
/**
* Hive Configuration.
@@ -165,6 +166,7 @@
SCRIPTWRAPPER("hive.exec.script.wrapper", null),
PLAN("hive.exec.plan", ""),
SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive-" + System.getProperty("user.name")),
+ LOCALSCRATCHDIR("hive.exec.local.scratchdir", "/tmp/" + System.getProperty("user.name")),
SUBMITVIACHILD("hive.exec.submitviachild", false),
SCRIPTERRORLIMIT("hive.exec.script.maxerrsize", 100000),
ALLOWPARTIALCONSUMP("hive.exec.script.allow.partial.consumption", false),
@@ -692,8 +694,10 @@ private static String findHadoopBinary() {
val = System.getenv("HADOOP_PREFIX");
}
// and if all else fails we can at least try /usr/bin/hadoop
- return (val == null ? File.separator + "usr" : val)
+ val = (val == null ? File.separator + "usr" : val)
+ File.separator + "bin" + File.separator + "hadoop";
+ // Launch hadoop command file on windows.
+ return val + (Shell.WINDOWS ? ".cmd" : "");
}
enum VarType {
@@ -69,6 +69,12 @@
</property>
<property>
+ <name>hive.exec.local.scratchdir</name>
+ <value>/tmp/${user.name}</value>
+ <description>Local scratch space for Hive jobs</description>
+</property>
+
+<property>
<name>hive.test.mode</name>
<value>false</value>
<description>whether hive is running in test mode. If yes, it turns on sampling and prefixes the output tablename</description>
@@ -24,6 +24,8 @@
import junit.framework.TestCase;
+import org.apache.hadoop.util.Shell;
+
/**
* TestGenericMR.
*
@@ -61,8 +63,7 @@ public void testIdentityMap() throws Exception {
final StringWriter out = new StringWriter();
new GenericMR().map(new StringReader(in), out, identityMapper());
-
- assertEquals(in + "\n", out.toString());
+ assertEquals(in + "\n", getOsSpecificOutput(out.toString()));
}
public void testKVSplitMap() throws Exception {
@@ -79,7 +80,7 @@ public void map(String[] record, Output output) throws Exception {
}
});
- assertEquals(expected, out.toString());
+ assertEquals(expected, getOsSpecificOutput(out.toString()));
}
public void testIdentityReduce() throws Exception {
@@ -88,7 +89,7 @@ public void testIdentityReduce() throws Exception {
new GenericMR().reduce(new StringReader(in), out, identityReducer());
- assertEquals(in + "\n", out.toString());
+ assertEquals(in + "\n", getOsSpecificOutput(out.toString()));
}
public void testWordCountReduce() throws Exception {
@@ -111,7 +112,7 @@ public void reduce(String key, Iterator<String[]> records, Output output)
final String expected = "hello\t3\nokay\t12\n";
- assertEquals(expected, out.toString());
+ assertEquals(expected, getOsSpecificOutput(out.toString()));
}
private Mapper identityMapper() {
@@ -134,4 +135,9 @@ public void reduce(String key, Iterator<String[]> records, Output output)
}
};
}
+
+ private static String getOsSpecificOutput(String outStr){
+ assert outStr != null;
+ return Shell.WINDOWS ? outStr.replaceAll("\\r", "") : outStr;
+ }
}
View
@@ -47,6 +47,12 @@
</property>
<property>
+ <name>hive.exec.local.scratchdir</name>
+ <value>${build.dir}/scratchdir/local/</value>
+ <description>Local scratch space for Hive jobs</description>
+</property>
+
+<property>
<name>javax.jdo.option.ConnectionURL</name>
<!-- note: variable substituion not working here because it's loaded by jdo, not Hive -->
<value>jdbc:derby:;databaseName=../build/test/junit_metastore_db;create=true</value>
@@ -56,13 +56,10 @@ public class $className extends TestCase {
if ("$clusterMode".equals("miniMR")) {
miniMR = true;
}
-
- qt = new HBaseQTestUtil(
- "$resultsDir.getCanonicalPath()",
- "$logDir.getCanonicalPath()", miniMR, setup);
+ qt = new HBaseQTestUtil("$resultsDir", "$logDir", miniMR, setup);
#foreach ($qf in $qfiles)
- qt.addFile("$qf.getCanonicalPath()");
+ qt.addFile("$qfilesMap.get($qf.getName())");
#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
@@ -40,12 +40,10 @@ public class $className extends TestCase {
miniMR = true;
}
- qt = new HBaseQTestUtil(
- "$resultsDir.getCanonicalPath()",
- "$logDir.getCanonicalPath()", miniMR, setup);
+ qt = new HBaseQTestUtil("$resultsDir", "$logDir", miniMR, setup);
#foreach ($qf in $qfiles)
- qt.addFile("$qf.getCanonicalPath()");
+ qt.addFile("$qfilesMap.get($qf.getName())");
#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
View
@@ -29,6 +29,21 @@
<property name="make.cmd" value="make"/>
<import file="../build-common.xml"/>
+ <!--Skip the Make file execution on windows-->
+ <condition property="execute.makefile">
+ <not>
+ <equals arg1="windows" arg2="${os.family}"/>
+ </not>
+ </condition>
+
+ <!-- Only run tests if thrift.home is defined and not on windows-->
+ <condition property="execute.tests">
+ <and>
+ <istrue value="${execute.makefile}"/>
+ <istrue value="${thrift.home.defined}"/>
+ </and>
+ </condition>
+
<target name="set-test-classpath">
<path id="test.classpath">
<pathelement location="${test.build.classes}" />
@@ -49,7 +64,7 @@
</condition>
</target>
- <target name="compile-cpp" depends="init,check-word-size">
+ <target name="compile-cpp" depends="init,check-word-size" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<env key="WORD_SIZE" value="${word.size}"/>
@@ -63,7 +78,7 @@
<copy file="${basedir}/src/cpp/hiveconstants.h" todir="${build.dir.hive}/odbc/include"/>
</target>
- <target name="clean">
+ <target name="clean" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<delete dir="${build.dir.hive}/odbc/include"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
@@ -73,7 +88,7 @@
</exec>
</target>
- <target name="install" depends="check-word-size">
+ <target name="install" depends="check-word-size" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<arg line="install"/>
@@ -84,7 +99,7 @@
</exec>
</target>
- <target name="uninstall">
+ <target name="uninstall" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<arg line="uninstall"/>
@@ -94,7 +109,7 @@
</target>
<!-- Only run tests if thrift.home is defined so that we don't break other tests -->
- <target name="test" depends="check-word-size,check-thrift-home,set-test-classpath" if="thrift.home.defined">
+ <target name="test" depends="check-word-size,check-thrift-home,set-test-classpath" if="execute.tests">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<arg line="test"/>
@@ -104,7 +119,4 @@
<env key="BASE_DIR" value="${basedir}"/>
</exec>
</target>
-
-
-
</project>
@@ -99,16 +99,13 @@ public Context(Configuration conf, String executionId) {
this.conf = conf;
this.executionId = executionId;
- // non-local tmp location is configurable. however it is the same across
+ // local & non-local tmp location is configurable. however it is the same across
// all external file systems
nonLocalScratchPath =
new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR),
executionId);
-
- // local tmp location is not configurable for now
- localScratchDir = System.getProperty("java.io.tmpdir")
- + Path.SEPARATOR + System.getProperty("user.name") + Path.SEPARATOR
- + executionId;
+ localScratchDir = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR),
+ executionId).toUri().getPath();
}
/**
Oops, something went wrong.

0 comments on commit 39fbb41

Please sign in to comment.