diff --git a/CHANGES.txt b/CHANGES.txt
index be827a57..f1c90ce9 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -47,6 +47,9 @@ Trunk - Unreleased
HIVE-502. Optimize RCFile's performance by removeing all synchronized
modifiers. (Yongqiang He via zshao)
+ HIVE-204. Provide option to run tests with MiniMRCluster.
+ (Namit Jain via athusoo)
+
OPTIMIZATIONS
HIVE-279. Predicate Pushdown support (Prasad Chakka via athusoo).
diff --git a/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java b/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
index 7ae9e516..66ac8a59 100644
--- a/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
+++ b/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
@@ -56,7 +56,7 @@ public boolean accept(File fpath) {
protected String queryDirectory;
protected String queryFile;
-
+
protected String resultsDirectory;
protected String logDirectory;
@@ -67,6 +67,16 @@ public boolean accept(File fpath) {
protected String logFile;
+ protected String clusterMode;
+
+ public void setClusterMode(String clusterMode) {
+ this.clusterMode = clusterMode;
+ }
+
+ public String getClusterMode() {
+ return clusterMode;
+ }
+
public void setLogFile(String logFile) {
this.logFile = logFile;
}
@@ -198,8 +208,13 @@ public void execute() throws BuildException {
}
if (queryFile != null && !queryFile.equals("")) {
- qFiles = new File[1];
- qFiles[0] = inpDir != null ? new File(inpDir, queryFile) : new File(queryFile);
+ // The user may have passed a list of files - comma seperated
+ String[] queryFiles = queryFile.split(",");
+ qFiles = new File[queryFiles.length];
+
+ for (int i = 0; i < queryFiles.length; i++) {
+ qFiles[i] = inpDir != null ? new File(inpDir, queryFiles[i]) : new File(queryFiles[i]);
+ }
}
else {
qFiles = inpDir.listFiles(new QFileFilter());
@@ -245,12 +260,16 @@ public void execute() throws BuildException {
ve.init();
Template t = ve.getTemplate(template);
+ if (clusterMode == null)
+ clusterMode = new String("");
+
// For each of the qFiles generate the test
VelocityContext ctx = new VelocityContext();
ctx.put("className", className);
ctx.put("qfiles", qFiles);
ctx.put("resultsDir", resultsDir);
ctx.put("logDir", logDir);
+ ctx.put("clusterMode", clusterMode);
File outFile = new File(outDir, className + ".java");
FileWriter writer = new FileWriter(outFile);
diff --git a/build-common.xml b/build-common.xml
index 8b31609d..a31daa84 100644
--- a/build-common.xml
+++ b/build-common.xml
@@ -57,6 +57,7 @@
+
@@ -205,21 +206,41 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -253,10 +274,11 @@
-
+
+
diff --git a/build.properties b/build.properties
index a09886da..3f87a4be 100644
--- a/build.properties
+++ b/build.properties
@@ -12,3 +12,10 @@ build.dir.hadoop=${build.dir.hive}/hadoopcore
hadoop.root.default=${build.dir.hadoop}/hadoop-${hadoop.version}
hadoop.root=${hadoop.root.default}
hadoop.jar=${hadoop.root}/hadoop-${hadoop.version}-core.jar
+hadoop.test.jar=${hadoop.root}/hadoop-${hadoop.version}-test.jar
+jetty.test.jar=${hadoop.root}/lib/jetty-5.1.4.jar
+servlet.test.jar=${hadoop.root}/lib/servlet-api.jar
+jasper.test.jar=${hadoop.root}/lib/jetty-ext/jasper-runtime.jar
+jasperc.test.jar=${hadoop.root}/lib/jetty-ext/jasper-compiler.jar
+jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar
+common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
diff --git a/ql/build.xml b/ql/build.xml
index 9e5a0e02..184fe8ee 100644
--- a/ql/build.xml
+++ b/ql/build.xml
@@ -38,6 +38,13 @@
+
+
+
+
+
+
+
@@ -68,6 +75,7 @@
templatePath="${ql.test.template.dir}" template="TestCliDriver.vm"
queryDirectory="${ql.test.query.clientpositive.dir}"
queryFile="${qfile}"
+ clusterMode="${clustermode}"
resultsDirectory="${ql.test.results.clientpositive.dir}" className="TestCliDriver"
logFile="${test.log.dir}/testclidrivergen.log"
logDirectory="${test.log.dir}/clientpositive"/>
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
index 4f0d8d9a..68de87e4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
@@ -273,13 +273,19 @@ protected void setNumberOfReducers() throws IOException {
* @param newPaths
* Array of classpath elements
*/
- private static void addToClassPath(String[] newPaths) throws Exception {
+ private static void addToClassPath(String[] newPaths, boolean local) throws Exception {
Thread curThread = Thread.currentThread();
URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
List curPath = Arrays.asList(loader.getURLs());
ArrayList newPath = new ArrayList();
for (String onestr : newPaths) {
+ // special processing for hadoop-17. file:// needs to be removed
+ if (local) {
+ if (StringUtils.indexOf(onestr, "file://") == 0)
+ onestr = StringUtils.substring(onestr, 7);
+ }
+
URL oneurl = (new File(onestr)).toURL();
if (!curPath.contains(oneurl)) {
newPath.add(oneurl);
@@ -541,7 +547,7 @@ public static void main(String[] args) throws IOException, HiveException {
String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
if (StringUtils.isNotBlank(auxJars)) {
try {
- addToClassPath(StringUtils.split(auxJars, ","));
+ addToClassPath(StringUtils.split(auxJars, ","), true);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
index 01fc7f20..625e69ae 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -34,10 +34,13 @@
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import java.lang.reflect.Method;
+import java.lang.reflect.Constructor;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.cli.CliDriver;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -59,6 +62,7 @@
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.mapred.MiniMRCluster;
import com.facebook.thrift.protocol.TBinaryProtocol;
@@ -81,6 +85,10 @@ public class QTestUtil {
private FileSystem fs;
private boolean overWrite;
private CliDriver cliDriver;
+ private MiniMRCluster mr = null;
+ private Object dfs = null;
+ private boolean miniMr = false;
+ private Class> dfsClass = null;
public boolean deleteDirectory(File path) {
if (path.exists()) {
@@ -155,9 +163,50 @@ public void normalizeNames(File path) throws Exception {
}
public QTestUtil(String outDir, String logDir) throws Exception {
+ this(outDir, logDir, false);
+ }
+
+ public QTestUtil(String outDir, String logDir, boolean miniMr) throws Exception {
this.outDir = outDir;
this.logDir = logDir;
conf = new HiveConf(Driver.class);
+ this.miniMr = miniMr;
+ qMap = new TreeMap();
+
+ if (miniMr) {
+ dfsClass = null;
+
+ // The path for MiniDFSCluster has changed, so look in both 17 and 19
+ // In hadoop 17, the path is org.apache.hadoop.dfs.MiniDFSCluster, whereas
+ // it is org.apache.hadoop.hdfs.MiniDFSCluster in hadoop 19. Due to this anamonly,
+ // use reflection to invoke the methods.
+ try {
+ dfsClass = Class.forName("org.apache.hadoop.dfs.MiniDFSCluster");
+ } catch (ClassNotFoundException e) {
+ dfsClass = null;
+ }
+
+ if (dfsClass == null) {
+ dfsClass = Class.forName("org.apache.hadoop.hdfs.MiniDFSCluster");
+ }
+
+ Constructor> dfsCons =
+ dfsClass.getDeclaredConstructor(new Class>[] {Configuration.class, Integer.TYPE,
+ Boolean.TYPE, (new String[] {}).getClass()});
+
+ dfs = dfsCons.newInstance(conf, 4, true, null);
+ Method m = dfsClass.getDeclaredMethod("getFileSystem", new Class[]{});
+ FileSystem fs = (FileSystem)m.invoke(dfs, new Object[] {});
+
+ mr = new MiniMRCluster(4, fs.getUri().toString(), 1);
+
+ // hive.metastore.warehouse.dir needs to be set relative to the jobtracker
+ String fsName = conf.get("fs.default.name");
+ assert fsName != null;
+ conf.set("hive.metastore.warehouse.dir", fsName.concat("/build/ql/test/data/warehouse/"));
+
+ conf.set("mapred.job.tracker", "localhost:" + mr.getJobTrackerPort());
+ }
// System.out.println(conf.toString());
testFiles = conf.get("test.data.files").replace('\\', '/').replace("c:", "");
@@ -168,12 +217,25 @@ public QTestUtil(String outDir, String logDir) throws Exception {
overWrite = true;
}
- qMap = new TreeMap();
srcTables = new LinkedList();
init();
}
+ public void shutdown() throws Exception {
+ cleanUp();
+ if (dfs != null) {
+ Method m = dfsClass.getDeclaredMethod("shutdown", new Class[]{});
+ m.invoke(dfs, new Object[]{});
+ dfs = null;
+ dfsClass = null;
+ }
+
+ if (mr != null) {
+ mr.shutdown();
+ mr = null;
+ }
+ }
public void addFile(String qFile) throws Exception {
@@ -367,7 +429,7 @@ public void cliInit(String tname) throws Exception {
}
public void cliInit(String tname, boolean recreate) throws Exception {
- if(recreate) {
+ if (miniMr || recreate) {
cleanUp();
createSources();
}
diff --git a/ql/src/test/templates/TestCliDriver.vm b/ql/src/test/templates/TestCliDriver.vm
index c7761211..9d065f91 100644
--- a/ql/src/test/templates/TestCliDriver.vm
+++ b/ql/src/test/templates/TestCliDriver.vm
@@ -32,7 +32,11 @@ public class $className extends TestCase {
@Override
protected void setUp() {
try {
- qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()");
+ boolean miniMR = false;
+ if ("$clusterMode".equals("miniMR"))
+ miniMR = true;
+
+ qt = new QTestUtil("$resultsDir.getCanonicalPath()", "$logDir.getCanonicalPath()", miniMR);
#foreach ($qf in $qfiles)
qt.addFile("$qf.getCanonicalPath()");
@@ -46,6 +50,19 @@ public class $className extends TestCase {
}
}
+ @Override
+ protected void tearDown() {
+ try {
+ qt.shutdown();
+ }
+ catch (Exception e) {
+ System.out.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.out.flush();
+ fail("Unexpected exception in tearDown");
+ }
+ }
+
public static Test suite() {
TestSuite suite = new TestSuite();
#foreach ($qf in $qfiles)