Skip to content

Commit

Permalink
HIVE-10306 : We need to print tez summary when hive.server2.logging.l…
Browse files Browse the repository at this point in the history
…evel >= PERFORMANCE. (Hari Sankar Sivarama Subramaniyan via Thejas Nair)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1674388 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Thejas Nair committed Apr 17, 2015
1 parent e9b8d99 commit 19f0389
Show file tree
Hide file tree
Showing 12 changed files with 373 additions and 190 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerConfiguration;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.apache.hive.jdbc.miniHS2.MiniHS2.MiniClusterType;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
Expand Down Expand Up @@ -76,7 +77,7 @@ public static void beforeTest() throws Exception {
@Before
public void setUp() throws Exception {
DriverManager.setLoginTimeout(0);
miniHS2 = new MiniHS2(conf, true);
miniHS2 = new MiniHS2(conf, MiniClusterType.MR);
miniHS2.setConfProperty(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false");
miniHS2.setConfProperty(HiveConf.ConfVars.HIVE_SERVER2_MAP_FAIR_SCHEDULER_QUEUE.varname,
"true");
Expand Down
67 changes: 67 additions & 0 deletions itests/hive-unit/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,73 @@
<artifactId>jersey-servlet</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-archives</artifactId>
<version>${hadoop-23.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop-23.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-hs</artifactId>
<version>${hadoop-23.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons-logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-tests</artifactId>
<version>${hadoop-23.version}</version>
<scope>test</scope>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-client</artifactId>
<version>${hadoop-23.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-tests</artifactId>
<version>${tez.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-api</artifactId>
<version>${tez.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-runtime-library</artifactId>
<version>${tez.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-mapreduce</artifactId>
<version>${tez.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-dag</artifactId>
<version>${tez.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</profile>
</profiles>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.ql.WindowsPathUtil;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim;
Expand Down Expand Up @@ -59,15 +60,20 @@ public class MiniHS2 extends AbstractHiveService {
private MiniMrShim mr;
private MiniDFSShim dfs;
private FileSystem localFS;
private boolean useMiniMR = false;
private boolean useMiniKdc = false;
private final String serverPrincipal;
private final String serverKeytab;
private final boolean isMetastoreRemote;
private MiniClusterType miniClusterType = MiniClusterType.DFS_ONLY;

public enum MiniClusterType {
MR,
TEZ,
DFS_ONLY;
}

public static class Builder {
private HiveConf hiveConf = new HiveConf();
private boolean useMiniMR = false;
private MiniClusterType miniClusterType = MiniClusterType.DFS_ONLY;
private boolean useMiniKdc = false;
private String serverPrincipal;
private String serverKeytab;
Expand All @@ -78,7 +84,7 @@ public Builder() {
}

public Builder withMiniMR() {
this.useMiniMR = true;
this.miniClusterType = MiniClusterType.MR;
return this;
}

Expand Down Expand Up @@ -110,15 +116,15 @@ public Builder withHTTPTransport(){


public MiniHS2 build() throws Exception {
if (useMiniMR && useMiniKdc) {
if (miniClusterType == MiniClusterType.MR && useMiniKdc) {
throw new IOException("Can't create secure miniMr ... yet");
}
if (isHTTPTransMode) {
hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_HTTP_MODE);
} else {
hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HS2_BINARY_MODE);
}
return new MiniHS2(hiveConf, useMiniMR, useMiniKdc, serverPrincipal, serverKeytab,
return new MiniHS2(hiveConf, miniClusterType, useMiniKdc, serverPrincipal, serverKeytab,
isMetastoreRemote);
}
}
Expand All @@ -143,38 +149,51 @@ public FileSystem getLocalFS() {
return localFS;
}

public boolean isUseMiniMR() {
return useMiniMR;
public MiniClusterType getMiniClusterType() {
return miniClusterType;
}

public void setUseMiniMR(boolean useMiniMR) {
this.useMiniMR = useMiniMR;
public void setMiniClusterType(MiniClusterType miniClusterType) {
this.miniClusterType = miniClusterType;
}

public boolean isUseMiniKdc() {
return useMiniKdc;
}

private MiniHS2(HiveConf hiveConf, boolean useMiniMR, boolean useMiniKdc,
private MiniHS2(HiveConf hiveConf, MiniClusterType miniClusterType, boolean useMiniKdc,
String serverPrincipal, String serverKeytab, boolean isMetastoreRemote) throws Exception {
super(hiveConf, "localhost", MetaStoreUtils.findFreePort(), MetaStoreUtils.findFreePort());
this.useMiniMR = useMiniMR;
this.miniClusterType = miniClusterType;
this.useMiniKdc = useMiniKdc;
this.serverPrincipal = serverPrincipal;
this.serverKeytab = serverKeytab;
this.isMetastoreRemote = isMetastoreRemote;
baseDir = Files.createTempDir();
localFS = FileSystem.getLocal(hiveConf);
FileSystem fs;
if (useMiniMR) {

if (miniClusterType != MiniClusterType.DFS_ONLY) {
// Initialize dfs
dfs = ShimLoader.getHadoopShims().getMiniDfs(hiveConf, 4, true, null);
fs = dfs.getFileSystem();
mr = ShimLoader.getHadoopShims().getMiniMrCluster(hiveConf, 4,
fs.getUri().toString(), 1);
String uriString = WindowsPathUtil.getHdfsUriString(fs.getUri().toString());

// Initialize the execution engine based on cluster type
switch (miniClusterType) {
case TEZ:
mr = ShimLoader.getHadoopShims().getMiniTezCluster(hiveConf, 4, uriString, 1);
break;
case MR:
mr = ShimLoader.getHadoopShims().getMiniMrCluster(hiveConf, 4, uriString, 1);
break;
default:
throw new IllegalArgumentException("Unsupported cluster type " + mr);
}
// store the config in system properties
mr.setupConfiguration(getHiveConf());
baseDfsDir = new Path(new Path(fs.getUri()), "/base");
} else {
// This is DFS only mode, just initialize the dfs root directory.
fs = FileSystem.getLocal(hiveConf);
baseDfsDir = new Path("file://"+ baseDir.toURI().getPath());
}
Expand Down Expand Up @@ -213,11 +232,11 @@ private MiniHS2(HiveConf hiveConf, boolean useMiniMR, boolean useMiniKdc,
}

public MiniHS2(HiveConf hiveConf) throws Exception {
this(hiveConf, false);
this(hiveConf, MiniClusterType.DFS_ONLY);
}

public MiniHS2(HiveConf hiveConf, boolean useMiniMR) throws Exception {
this(hiveConf, useMiniMR, false, null, null, false);
public MiniHS2(HiveConf hiveConf, MiniClusterType clusterType) throws Exception {
this(hiveConf, clusterType, false, null, null, false);
}

public void start(Map<String, String> confOverlay) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.apache.hive.jdbc.miniHS2.MiniHS2.MiniClusterType;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.session.HiveSessionHook;
import org.apache.hive.service.cli.session.HiveSessionHookContext;
Expand Down Expand Up @@ -82,7 +83,7 @@ public static void beforeTest() throws Exception {
dataFilePath = new Path(dataFileDir, "kv1.txt");
DriverManager.setLoginTimeout(0);
conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
miniHS2 = new MiniHS2(conf, true);
miniHS2 = new MiniHS2(conf, MiniClusterType.MR);
Map<String, String> overlayProps = new HashMap<String, String>();
overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
LocalClusterSparkSessionHook.class.getName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.apache.hive.jdbc.miniHS2.MiniHS2.MiniClusterType;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.session.HiveSessionHook;
import org.apache.hive.service.cli.session.HiveSessionHookContext;
Expand Down Expand Up @@ -73,7 +74,7 @@ public static void beforeTest() throws Exception {
dataFilePath = new Path(dataFileDir, "kv1.txt");
DriverManager.setLoginTimeout(0);
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
miniHS2 = new MiniHS2(conf, true);
miniHS2 = new MiniHS2(conf, MiniClusterType.MR);
Map<String, String> overlayProps = new HashMap<String, String>();
overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
MiniMrTestSessionHook.class.getName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.apache.hive.jdbc.miniHS2.MiniHS2.MiniClusterType;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.session.HiveSessionHook;
import org.apache.hive.service.cli.session.HiveSessionHookContext;
Expand Down Expand Up @@ -91,7 +92,7 @@ public static void beforeTest() throws Exception {
dataFilePath = new Path(dataFileDir, "kv1.txt");
DriverManager.setLoginTimeout(0);
conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
miniHS2 = new MiniHS2(conf, true);
miniHS2 = new MiniHS2(conf, MiniClusterType.MR);
Map<String, String> overlayProps = new HashMap<String, String>();
overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
LocalClusterSparkSessionHook.class.getName());
Expand Down
Loading

0 comments on commit 19f0389

Please sign in to comment.