Skip to content
This repository has been archived by the owner on Jul 15, 2019. It is now read-only.

Commit

Permalink
HIVE-204. Provide option to run tests with MiniMRCluster.
Browse files Browse the repository at this point in the history
(Namit Jain via athusoo)



git-svn-id: https://svn.apache.org/repos/asf/hadoop/hive/trunk@779275 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Ashish Thusoo committed May 27, 2009
1 parent 6286673 commit fdf7a47
Show file tree
Hide file tree
Showing 8 changed files with 153 additions and 9 deletions.
3 changes: 3 additions & 0 deletions CHANGES.txt
Expand Up @@ -47,6 +47,9 @@ Trunk - Unreleased
HIVE-502. Optimize RCFile's performance by removeing all synchronized
modifiers. (Yongqiang He via zshao)

HIVE-204. Provide option to run tests with MiniMRCluster.
(Namit Jain via athusoo)

OPTIMIZATIONS

HIVE-279. Predicate Pushdown support (Prasad Chakka via athusoo).
Expand Down
25 changes: 22 additions & 3 deletions ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
Expand Up @@ -56,7 +56,7 @@ public boolean accept(File fpath) {
protected String queryDirectory;

protected String queryFile;

protected String resultsDirectory;

protected String logDirectory;
Expand All @@ -67,6 +67,16 @@ public boolean accept(File fpath) {

protected String logFile;

protected String clusterMode;

public void setClusterMode(String clusterMode) {
this.clusterMode = clusterMode;
}

public String getClusterMode() {
return clusterMode;
}

public void setLogFile(String logFile) {
this.logFile = logFile;
}
Expand Down Expand Up @@ -198,8 +208,13 @@ public void execute() throws BuildException {
}

if (queryFile != null && !queryFile.equals("")) {
qFiles = new File[1];
qFiles[0] = inpDir != null ? new File(inpDir, queryFile) : new File(queryFile);
// The user may have passed a list of files - comma seperated
String[] queryFiles = queryFile.split(",");
qFiles = new File[queryFiles.length];

for (int i = 0; i < queryFiles.length; i++) {
qFiles[i] = inpDir != null ? new File(inpDir, queryFiles[i]) : new File(queryFiles[i]);
}
}
else {
qFiles = inpDir.listFiles(new QFileFilter());
Expand Down Expand Up @@ -245,12 +260,16 @@ public void execute() throws BuildException {
ve.init();
Template t = ve.getTemplate(template);

if (clusterMode == null)
clusterMode = new String("");

// For each of the qFiles generate the test
VelocityContext ctx = new VelocityContext();
ctx.put("className", className);
ctx.put("qfiles", qFiles);
ctx.put("resultsDir", resultsDir);
ctx.put("logDir", logDir);
ctx.put("clusterMode", clusterMode);

File outFile = new File(outDir, className + ".java");
FileWriter writer = new FileWriter(outFile);
Expand Down
24 changes: 23 additions & 1 deletion build-common.xml
Expand Up @@ -57,6 +57,7 @@
<property name="test.timeout" value="5400000"/>
<property name="test.junit.output.format" value="xml"/>
<property name="test.junit.output.usefile" value="true"/>
<property name="minimr.query.files" value="join1.q,groupby1.q"/>
<property name="test.silent" value="true"/>

<path id="test.classpath">
Expand Down Expand Up @@ -205,21 +206,41 @@
</target>

<target name="test-conditions">

<condition property="qfile" value="${minimr.query.files}">
<and>
<not>
<isset property="qfile"/>
</not>

<equals arg1="${clustermode}" arg2="miniMR" />
</and>
</condition>

<condition property="qfile" value="">
<not>
<isset property="qfile"/>
</not>
</condition>

<condition property="overwrite" value="false">
<not>
<isset property="overwrite"/>
</not>
</condition>

<condition property="standalone" value="false">
<not>
<isset property="standalone"/>
</not>
</condition>

<condition property="clustermode" value="">
<not>
<isset property="clustermode"/>
</not>
</condition>

</target>

<!-- target to deploy anttasks -->
Expand Down Expand Up @@ -253,10 +274,11 @@
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
<sysproperty key="log4j.configuration" value="file://${test.data.dir}/conf/hive-log4j.properties"/>
<sysproperty key="derby.stream.error.file" value="${test.build.dir}/derby.log"/>
<sysproperty key="hive.aux.jars.path" value="${test.build.dir}/test-udfs.jar,${test.src.data.dir}/files/TestSerDe.jar"/>
<sysproperty key="hive.aux.jars.path" value="file://${test.build.dir}/test-udfs.jar,file://${test.src.data.dir}/files/TestSerDe.jar"/>
<sysproperty key="ql.test.query.clientpositive.dir" value="${ql.test.query.clientpositive.dir}"/>
<sysproperty key="ql.test.results.clientpositive.dir" value="${ql.test.results.clientpositive.dir}"/>
<sysproperty key="test.log.dir" value="${test.log.dir}"/>
<sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
<sysproperty key="test.silent" value="${test.silent}"/>

<classpath refid="${test.classpath.id}"/>
Expand Down
7 changes: 7 additions & 0 deletions build.properties
Expand Up @@ -12,3 +12,10 @@ build.dir.hadoop=${build.dir.hive}/hadoopcore
hadoop.root.default=${build.dir.hadoop}/hadoop-${hadoop.version}
hadoop.root=${hadoop.root.default}
hadoop.jar=${hadoop.root}/hadoop-${hadoop.version}-core.jar
hadoop.test.jar=${hadoop.root}/hadoop-${hadoop.version}-test.jar
jetty.test.jar=${hadoop.root}/lib/jetty-5.1.4.jar
servlet.test.jar=${hadoop.root}/lib/servlet-api.jar
jasper.test.jar=${hadoop.root}/lib/jetty-ext/jasper-runtime.jar
jasperc.test.jar=${hadoop.root}/lib/jetty-ext/jasper-compiler.jar
jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar
common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
8 changes: 8 additions & 0 deletions ql/build.xml
Expand Up @@ -38,6 +38,13 @@
<pathelement location="${test.data.dir}/conf"/>
<pathelement location="${hive.conf.dir}"/>
<pathelement location="${hive.root}/cli/lib/jline-0.9.94.jar"/>
<pathelement location="${hadoop.test.jar}"/>
<pathelement location="${jetty.test.jar}"/>
<pathelement location="${servlet.test.jar}"/>
<pathelement location="${jasper.test.jar}"/>
<pathelement location="${jasperc.test.jar}"/>
<pathelement location="${jsp.test.jar}"/>
<pathelement location="${common.jar}"/>
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
<fileset dir="${hadoop.root}/lib" includes="*.jar"/>
<path refid="classpath"/>
Expand Down Expand Up @@ -68,6 +75,7 @@
templatePath="${ql.test.template.dir}" template="TestCliDriver.vm"
queryDirectory="${ql.test.query.clientpositive.dir}"
queryFile="${qfile}"
clusterMode="${clustermode}"
resultsDirectory="${ql.test.results.clientpositive.dir}" className="TestCliDriver"
logFile="${test.log.dir}/testclidrivergen.log"
logDirectory="${test.log.dir}/clientpositive"/>
Expand Down
10 changes: 8 additions & 2 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
Expand Up @@ -273,13 +273,19 @@ protected void setNumberOfReducers() throws IOException {
* @param newPaths
* Array of classpath elements
*/
private static void addToClassPath(String[] newPaths) throws Exception {
private static void addToClassPath(String[] newPaths, boolean local) throws Exception {
Thread curThread = Thread.currentThread();
URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
List<URL> curPath = Arrays.asList(loader.getURLs());
ArrayList<URL> newPath = new ArrayList<URL>();

for (String onestr : newPaths) {
// special processing for hadoop-17. file:// needs to be removed
if (local) {
if (StringUtils.indexOf(onestr, "file://") == 0)
onestr = StringUtils.substring(onestr, 7);
}

URL oneurl = (new File(onestr)).toURL();
if (!curPath.contains(oneurl)) {
newPath.add(oneurl);
Expand Down Expand Up @@ -541,7 +547,7 @@ public static void main(String[] args) throws IOException, HiveException {
String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
if (StringUtils.isNotBlank(auxJars)) {
try {
addToClassPath(StringUtils.split(auxJars, ","));
addToClassPath(StringUtils.split(auxJars, ","), true);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
Expand Down
66 changes: 64 additions & 2 deletions ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
Expand Up @@ -34,10 +34,13 @@
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.lang.reflect.Method;
import java.lang.reflect.Constructor;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
Expand All @@ -59,6 +62,7 @@
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.MiniMRCluster;

import com.facebook.thrift.protocol.TBinaryProtocol;

Expand All @@ -81,6 +85,10 @@ public class QTestUtil {
private FileSystem fs;
private boolean overWrite;
private CliDriver cliDriver;
private MiniMRCluster mr = null;
private Object dfs = null;
private boolean miniMr = false;
private Class<?> dfsClass = null;

public boolean deleteDirectory(File path) {
if (path.exists()) {
Expand Down Expand Up @@ -155,9 +163,50 @@ public void normalizeNames(File path) throws Exception {
}

public QTestUtil(String outDir, String logDir) throws Exception {
this(outDir, logDir, false);
}

public QTestUtil(String outDir, String logDir, boolean miniMr) throws Exception {
this.outDir = outDir;
this.logDir = logDir;
conf = new HiveConf(Driver.class);
this.miniMr = miniMr;
qMap = new TreeMap<String, String>();

if (miniMr) {
dfsClass = null;

// The path for MiniDFSCluster has changed, so look in both 17 and 19
// In hadoop 17, the path is org.apache.hadoop.dfs.MiniDFSCluster, whereas
// it is org.apache.hadoop.hdfs.MiniDFSCluster in hadoop 19. Due to this anamonly,
// use reflection to invoke the methods.
try {
dfsClass = Class.forName("org.apache.hadoop.dfs.MiniDFSCluster");
} catch (ClassNotFoundException e) {
dfsClass = null;
}

if (dfsClass == null) {
dfsClass = Class.forName("org.apache.hadoop.hdfs.MiniDFSCluster");
}

Constructor<?> dfsCons =
dfsClass.getDeclaredConstructor(new Class<?>[] {Configuration.class, Integer.TYPE,
Boolean.TYPE, (new String[] {}).getClass()});

dfs = dfsCons.newInstance(conf, 4, true, null);
Method m = dfsClass.getDeclaredMethod("getFileSystem", new Class[]{});
FileSystem fs = (FileSystem)m.invoke(dfs, new Object[] {});

mr = new MiniMRCluster(4, fs.getUri().toString(), 1);

// hive.metastore.warehouse.dir needs to be set relative to the jobtracker
String fsName = conf.get("fs.default.name");
assert fsName != null;
conf.set("hive.metastore.warehouse.dir", fsName.concat("/build/ql/test/data/warehouse/"));

conf.set("mapred.job.tracker", "localhost:" + mr.getJobTrackerPort());
}

// System.out.println(conf.toString());
testFiles = conf.get("test.data.files").replace('\\', '/').replace("c:", "");
Expand All @@ -168,12 +217,25 @@ public QTestUtil(String outDir, String logDir) throws Exception {
overWrite = true;
}

qMap = new TreeMap<String, String>();
srcTables = new LinkedList<String>();
init();
}

public void shutdown() throws Exception {
cleanUp();

if (dfs != null) {
Method m = dfsClass.getDeclaredMethod("shutdown", new Class[]{});
m.invoke(dfs, new Object[]{});
dfs = null;
dfsClass = null;
}

if (mr != null) {
mr.shutdown();
mr = null;
}
}

public void addFile(String qFile) throws Exception {

Expand Down Expand Up @@ -367,7 +429,7 @@ public void cliInit(String tname) throws Exception {
}

public void cliInit(String tname, boolean recreate) throws Exception {
if(recreate) {
if (miniMr || recreate) {
cleanUp();
createSources();
}
Expand Down
19 changes: 18 additions & 1 deletion ql/src/test/templates/TestCliDriver.vm
Expand Up @@ -32,7 +32,11 @@ public class $className extends TestCase {
@Override
protected void setUp() {
try {
qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()");
boolean miniMR = false;
if ("$clusterMode".equals("miniMR"))
miniMR = true;

qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR);

#foreach ($qf in $qfiles)
qt.addFile("$qf.getCanonicalPath()");
Expand All @@ -46,6 +50,19 @@ public class $className extends TestCase {
}
}

@Override
protected void tearDown() {
try {
qt.shutdown();
}
catch (Exception e) {
System.out.println("Exception: " + e.getMessage());
e.printStackTrace();
System.out.flush();
fail("Unexpected exception in tearDown");
}
}

public static Test suite() {
TestSuite suite = new TestSuite();
#foreach ($qf in $qfiles)
Expand Down

0 comments on commit fdf7a47

Please sign in to comment.