Skip to content

Commit

Permalink
HIVE-3126 : Generate & build the velocity based Hive tests on windows…
Browse files Browse the repository at this point in the history
… by fixing the path issues (Kanna Karanam via Ashutosh Chauhan)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1365467 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
ashutoshc committed Jul 25, 2012
1 parent d0c18c7 commit 39fbb41
Show file tree
Hide file tree
Showing 20 changed files with 526 additions and 170 deletions.
25 changes: 22 additions & 3 deletions ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.List;
import java.util.ArrayList;
import java.util.regex.Pattern;
import java.util.HashMap;

import org.apache.commons.lang.StringUtils;
import org.apache.tools.ant.AntClassLoader;
Expand Down Expand Up @@ -259,6 +260,7 @@ public void execute() throws BuildException {
}

List<File> qFiles = new ArrayList<File>();
HashMap<String, String> qFilesMap = new HashMap<String, String>();
File outDir = null;
File resultsDir = null;
File logDir = null;
Expand All @@ -281,7 +283,7 @@ public void execute() throws BuildException {
} else if (queryFileRegex != null && !queryFileRegex.equals("")) {
qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileRegexFilter(queryFileRegex))));
} else if (runDisabled != null && runDisabled.equals("true")) {
qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter())));
qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter())));
} else {
qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileFilter())));
}
Expand All @@ -298,6 +300,9 @@ public void execute() throws BuildException {
}

Collections.sort(qFiles);
for (File qFile : qFiles) {
qFilesMap.put(qFile.getName(), getEscapedCanonicalPath(qFile));
}

// Make sure the output directory exists, if it doesn't
// then create it.
Expand Down Expand Up @@ -348,8 +353,9 @@ public void execute() throws BuildException {
VelocityContext ctx = new VelocityContext();
ctx.put("className", className);
ctx.put("qfiles", qFiles);
ctx.put("resultsDir", resultsDir);
ctx.put("logDir", logDir);
ctx.put("qfilesMap", qFilesMap);
ctx.put("resultsDir", getEscapedCanonicalPath(resultsDir));
ctx.put("logDir", getEscapedCanonicalPath(logDir));
ctx.put("clusterMode", clusterMode);
ctx.put("hadoopVersion", hadoopVersion);

Expand All @@ -373,4 +379,17 @@ public void execute() throws BuildException {
throw new BuildException("Generation failed", e);
}
}

private static String getEscapedCanonicalPath(File file) throws IOException {
if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
// Escape the backward slash in CanonicalPath if the unit test runs on windows
// e.g. dir.getCanonicalPath() gets the absolute path of local
// directory. When we embed it directly in the generated java class it results
// in compiler error in windows. Reason : the canonical path contains backward
// slashes "C:\temp\etc\" and it is not a valid string in Java
// unless we escape the backward slashes.
return file.getCanonicalPath().replace("\\", "\\\\");
}
return file.getCanonicalPath();
}
}
24 changes: 19 additions & 5 deletions build-common.xml
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,8 @@

<loadproperties srcfile="${ivy.conf.dir}/libraries.properties"/>

<osfamily property="os.family"/>

<condition property="offline">
<istrue value="${is-offline}"/>
</condition>
Expand Down Expand Up @@ -366,7 +368,7 @@

<!-- target to run the tests -->
<target name="test"
depends="test-conditions,gen-test,compile-test,test-jar,test-init">
depends="test-conditions,gen-test,compile-test,test-jar,test-init">
<echo message="Project: ${ant.project.name}"/>
<property name="hadoop.testcp" refid="test.classpath"/>
<if>
Expand All @@ -378,6 +380,18 @@
<property name="hadoop.opts" value="${hadoop.opts.20}" />
</else>
</if>
<!-- Set the OS specific settings to run junit tests on unix as well as on windows -->
<if>
<equals arg1="windows" arg2="${os.family}"/>
<then>
<property name="junit.script.extension" value=".cmd"/>
<property name="junit.file.schema" value=""/>
</then>
<else>
<property name="junit.script.extension" value=""/>
<property name="junit.file.schema" value="file://"/>
</else>
</if>
<if>
<equals arg1="${test.print.classpath}" arg2="true" />
<then>
Expand All @@ -390,13 +404,13 @@
<env key="LANG" value="${test.lang}"/>
<env key="HIVE_HADOOP_TEST_CLASSPATH" value="${hadoop.testcp}"/>
<env key="HADOOP_HOME" value="${hadoop.root}"/>
<env key="HADOOP_CLASSPATH" value="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/JavaEWAH-${javaewah.version}.jar:${hadoop.root}/modules/*"/> <!-- Modules needed for Hadoop 0.23 -->
<env key="HADOOP_CLASSPATH" path="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/JavaEWAH-${javaewah.version}.jar:${hadoop.root}/modules/*"/> <!-- Modules needed for Hadoop 0.23 -->
<env key="TZ" value="US/Pacific"/>
<sysproperty key="test.output.overwrite" value="${overwrite}"/>
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
<sysproperty key="log4j.configuration" value="file://${test.src.data.dir}/conf/hive-log4j.properties"/>
<sysproperty key="log4j.configuration" value="${junit.file.schema}${test.src.data.dir}/conf/hive-log4j.properties"/>
<sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
<sysproperty key="hive.aux.jars.path" value="file://${test.build.dir}/test-udfs.jar"/>
<sysproperty key="hive.aux.jars.path" value="${junit.file.schema}${test.build.dir}/test-udfs.jar"/>
<sysproperty key="ql.test.query.clientpositive.dir" value="${ql.test.query.clientpositive.dir}"/>
<sysproperty key="ql.test.results.clientpositive.dir" value="${ql.test.results.clientpositive.dir}"/>
<sysproperty key="test.log.dir" value="${test.log.dir}"/>
Expand All @@ -413,7 +427,7 @@
<sysproperty key="build.ivy.lib.dir" value="${build.ivy.lib.dir}"/>
<sysproperty key="derby.version" value="${derby.version}"/>
<sysproperty key="hive.version" value="${version}"/>
<sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}"/>
<sysproperty key="hadoop.bin.path" value="${test.hadoop.bin.path}${junit.script.extension}"/>

<classpath refid="test.local.classpath"/>
<formatter type="${test.junit.output.format}" usefile="${test.junit.output.usefile}" />
Expand Down
3 changes: 0 additions & 3 deletions build.xml
Original file line number Diff line number Diff line change
Expand Up @@ -532,9 +532,6 @@
<fileset dir="${target.bin.dir}"/>
</chmod>

<!-- create symlinks for libthrift.jar, libfb303.jar, etc. for backward compatibility -->
<symlink overwrite="true" link="${target.lib.dir}/libthrift.jar" resource="libthrift-${libthrift.version}.jar"/>
<symlink overwrite="true" link="${target.lib.dir}/libfb303.jar" resource="libfb303-${libfb303.version}.jar"/>
<symlink overwrite="true" link="${target.lib.dir}/hive_contrib.jar" resource="hive-contrib-${version}.jar"/>
<!-- special case because builtins compilation depends on packaging
up everything else first -->
Expand Down
6 changes: 5 additions & 1 deletion common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;

/**
* Hive Configuration.
Expand Down Expand Up @@ -165,6 +166,7 @@ public static enum ConfVars {
SCRIPTWRAPPER("hive.exec.script.wrapper", null),
PLAN("hive.exec.plan", ""),
SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive-" + System.getProperty("user.name")),
LOCALSCRATCHDIR("hive.exec.local.scratchdir", "/tmp/" + System.getProperty("user.name")),
SUBMITVIACHILD("hive.exec.submitviachild", false),
SCRIPTERRORLIMIT("hive.exec.script.maxerrsize", 100000),
ALLOWPARTIALCONSUMP("hive.exec.script.allow.partial.consumption", false),
Expand Down Expand Up @@ -692,8 +694,10 @@ private static String findHadoopBinary() {
val = System.getenv("HADOOP_PREFIX");
}
// and if all else fails we can at least try /usr/bin/hadoop
return (val == null ? File.separator + "usr" : val)
val = (val == null ? File.separator + "usr" : val)
+ File.separator + "bin" + File.separator + "hadoop";
// Launch hadoop command file on windows.
return val + (Shell.WINDOWS ? ".cmd" : "");
}

enum VarType {
Expand Down
6 changes: 6 additions & 0 deletions conf/hive-default.xml.template
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,12 @@
<description>Scratch space for Hive jobs</description>
</property>

<property>
<name>hive.exec.local.scratchdir</name>
<value>/tmp/${user.name}</value>
<description>Local scratch space for Hive jobs</description>
</property>

<property>
<name>hive.test.mode</name>
<value>false</value>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@

import junit.framework.TestCase;

import org.apache.hadoop.util.Shell;

/**
* TestGenericMR.
*
Expand Down Expand Up @@ -61,8 +63,7 @@ public void testIdentityMap() throws Exception {
final StringWriter out = new StringWriter();

new GenericMR().map(new StringReader(in), out, identityMapper());

assertEquals(in + "\n", out.toString());
assertEquals(in + "\n", getOsSpecificOutput(out.toString()));
}

public void testKVSplitMap() throws Exception {
Expand All @@ -79,7 +80,7 @@ public void map(String[] record, Output output) throws Exception {
}
});

assertEquals(expected, out.toString());
assertEquals(expected, getOsSpecificOutput(out.toString()));
}

public void testIdentityReduce() throws Exception {
Expand All @@ -88,7 +89,7 @@ public void testIdentityReduce() throws Exception {

new GenericMR().reduce(new StringReader(in), out, identityReducer());

assertEquals(in + "\n", out.toString());
assertEquals(in + "\n", getOsSpecificOutput(out.toString()));
}

public void testWordCountReduce() throws Exception {
Expand All @@ -111,7 +112,7 @@ public void reduce(String key, Iterator<String[]> records, Output output)

final String expected = "hello\t3\nokay\t12\n";

assertEquals(expected, out.toString());
assertEquals(expected, getOsSpecificOutput(out.toString()));
}

private Mapper identityMapper() {
Expand All @@ -134,4 +135,9 @@ public void reduce(String key, Iterator<String[]> records, Output output)
}
};
}

private static String getOsSpecificOutput(String outStr){
assert outStr != null;
return Shell.WINDOWS ? outStr.replaceAll("\\r", "") : outStr;
}
}
6 changes: 6 additions & 0 deletions data/conf/hive-site.xml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,12 @@
<description>Scratch space for Hive jobs</description>
</property>

<property>
<name>hive.exec.local.scratchdir</name>
<value>${build.dir}/scratchdir/local/</value>
<description>Local scratch space for Hive jobs</description>
</property>

<property>
<name>javax.jdo.option.ConnectionURL</name>
<!-- note: variable substituion not working here because it's loaded by jdo, not Hive -->
Expand Down
7 changes: 2 additions & 5 deletions hbase-handler/src/test/templates/TestHBaseCliDriver.vm
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,10 @@ public class $className extends TestCase {
if ("$clusterMode".equals("miniMR")) {
miniMR = true;
}

qt = new HBaseQTestUtil(
"$resultsDir.getCanonicalPath()",
"$logDir.getCanonicalPath()", miniMR, setup);
qt = new HBaseQTestUtil("$resultsDir", "$logDir", miniMR, setup);

#foreach ($qf in $qfiles)
qt.addFile("$qf.getCanonicalPath()");
qt.addFile("$qfilesMap.get($qf.getName())");
#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,10 @@ public class $className extends TestCase {
miniMR = true;
}

qt = new HBaseQTestUtil(
"$resultsDir.getCanonicalPath()",
"$logDir.getCanonicalPath()", miniMR, setup);
qt = new HBaseQTestUtil("$resultsDir", "$logDir", miniMR, setup);

#foreach ($qf in $qfiles)
qt.addFile("$qf.getCanonicalPath()");
qt.addFile("$qfilesMap.get($qf.getName())");
#end
} catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
Expand Down
28 changes: 20 additions & 8 deletions odbc/build.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,21 @@
<property name="make.cmd" value="make"/>
<import file="../build-common.xml"/>

<!--Skip the Make file execution on windows-->
<condition property="execute.makefile">
<not>
<equals arg1="windows" arg2="${os.family}"/>
</not>
</condition>

<!-- Only run tests if thrift.home is defined and not on windows-->
<condition property="execute.tests">
<and>
<istrue value="${execute.makefile}"/>
<istrue value="${thrift.home.defined}"/>
</and>
</condition>

<target name="set-test-classpath">
<path id="test.classpath">
<pathelement location="${test.build.classes}" />
Expand All @@ -49,7 +64,7 @@
</condition>
</target>

<target name="compile-cpp" depends="init,check-word-size">
<target name="compile-cpp" depends="init,check-word-size" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<env key="WORD_SIZE" value="${word.size}"/>
Expand All @@ -63,7 +78,7 @@
<copy file="${basedir}/src/cpp/hiveconstants.h" todir="${build.dir.hive}/odbc/include"/>
</target>

<target name="clean">
<target name="clean" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<delete dir="${build.dir.hive}/odbc/include"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
Expand All @@ -73,7 +88,7 @@
</exec>
</target>

<target name="install" depends="check-word-size">
<target name="install" depends="check-word-size" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<arg line="install"/>
Expand All @@ -84,7 +99,7 @@
</exec>
</target>

<target name="uninstall">
<target name="uninstall" if="execute.makefile">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<arg line="uninstall"/>
Expand All @@ -94,7 +109,7 @@
</target>

<!-- Only run tests if thrift.home is defined so that we don't break other tests -->
<target name="test" depends="check-word-size,check-thrift-home,set-test-classpath" if="thrift.home.defined">
<target name="test" depends="check-word-size,check-thrift-home,set-test-classpath" if="execute.tests">
<echo message="Project: ${ant.project.name}"/>
<exec dir="." executable="${make.cmd}" failonerror="true">
<arg line="test"/>
Expand All @@ -104,7 +119,4 @@
<env key="BASE_DIR" value="${basedir}"/>
</exec>
</target>



</project>
9 changes: 3 additions & 6 deletions ql/src/java/org/apache/hadoop/hive/ql/Context.java
Original file line number Diff line number Diff line change
Expand Up @@ -99,16 +99,13 @@ public Context(Configuration conf, String executionId) {
this.conf = conf;
this.executionId = executionId;

// non-local tmp location is configurable. however it is the same across
// local & non-local tmp location is configurable. however it is the same across
// all external file systems
nonLocalScratchPath =
new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR),
executionId);

// local tmp location is not configurable for now
localScratchDir = System.getProperty("java.io.tmpdir")
+ Path.SEPARATOR + System.getProperty("user.name") + Path.SEPARATOR
+ executionId;
localScratchDir = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR),
executionId).toUri().getPath();
}

/**
Expand Down
Loading

0 comments on commit 39fbb41

Please sign in to comment.