Skip to content

Commit

Permalink
Merge a3e821b into e509783
Browse files Browse the repository at this point in the history
  • Loading branch information
nvitucci committed Feb 17, 2021
2 parents e509783 + a3e821b commit 0f3612e
Show file tree
Hide file tree
Showing 10 changed files with 108 additions and 30 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
@@ -1,3 +1,7 @@
## [5.0.0] - TBD
### Changed
- Hive version updated to `3.1.2` (was `2.3.7`) and Hadoop version updated to `3.1.0` (was `2.7.2`).

## [4.0.0] - 2021-02-09
### Fixed
- Intermediate temporary folders now cleaned up as part of test lifecycle.
Expand Down
27 changes: 24 additions & 3 deletions pom.xml
Expand Up @@ -9,7 +9,7 @@
</parent>

<artifactId>beeju</artifactId>
<version>4.0.1-SNAPSHOT</version>
<version>5.0.0-SNAPSHOT</version>

<scm>
<connection>scm:git:https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/HotelsDotCom/beeju.git</connection>
Expand All @@ -19,8 +19,9 @@
</scm>

<properties>
<hadoop.version>2.7.2</hadoop.version>
<hive.version>2.3.8</hive.version>
<hadoop.version>3.1.0</hadoop.version>
<hive.version>3.1.2</hive.version>
<tez.version>0.9.1</tez.version>
<jdk.version>1.8</jdk.version>
<junit.jupiter.version>5.7.0</junit.jupiter.version>
<junit.platform.version>1.3.2</junit.platform.version>
Expand Down Expand Up @@ -101,6 +102,16 @@
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-common</artifactId>
<version>${tez.version}</version>
</dependency>
<dependency>
<groupId>org.apache.tez</groupId>
<artifactId>tez-dag</artifactId>
<version>${tez.version}</version>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest</artifactId>
Expand Down Expand Up @@ -182,6 +193,16 @@
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<configuration>
<excludes>
<!-- If this class is not excluded, Jacoco throws a MethodTooLargeException when instrumenting it -->
<exclude>org/apache/hadoop/hive/ql/parse/HiveParser</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>

Expand Down
3 changes: 3 additions & 0 deletions src/main/java/com/hotels/beeju/HiveMetaStoreJUnitRule.java
Expand Up @@ -15,6 +15,8 @@
*/
package com.hotels.beeju;

import static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.CONNECT_URL_KEY;

import java.util.Map;
import java.util.concurrent.ExecutionException;

Expand Down Expand Up @@ -80,6 +82,7 @@ public HiveMetaStoreJUnitRule(

@Override
public void starting(Description description) {
System.clearProperty(CONNECT_URL_KEY.getVarname());
super.starting(description);
try {
hiveMetaStoreCore.initialise();
Expand Down
3 changes: 3 additions & 0 deletions src/main/java/com/hotels/beeju/HiveServer2JUnitRule.java
Expand Up @@ -15,6 +15,8 @@
*/
package com.hotels.beeju;

import static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.CONNECT_URL_KEY;

import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Map;
Expand Down Expand Up @@ -67,6 +69,7 @@ public HiveServer2JUnitRule(String databaseName, Map<String, String> configurati

@Override
public void starting(Description description) {
System.clearProperty(CONNECT_URL_KEY.getVarname());
try {
hiveServer2Core.startServerSocket();
} catch (IOException e) {
Expand Down
Expand Up @@ -15,6 +15,8 @@
*/
package com.hotels.beeju;

import static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.CONNECT_URL_KEY;

import java.util.Map;

import org.junit.runner.Description;
Expand Down Expand Up @@ -76,6 +78,7 @@ public ThriftHiveMetaStoreJUnitRule(

@Override
public void starting(Description description) {
System.clearProperty(CONNECT_URL_KEY.getVarname());
try {
thriftHiveMetaStoreCore.initialise();
} catch (Exception e) {
Expand Down
68 changes: 53 additions & 15 deletions src/main/java/com/hotels/beeju/core/BeejuCore.java
Expand Up @@ -20,6 +20,7 @@
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.ServerSocket;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
Expand All @@ -28,6 +29,7 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.concurrent.TimeUnit;

import org.apache.commons.io.FileUtils;
import org.apache.derby.jdbc.EmbeddedDriver;
Expand All @@ -37,6 +39,7 @@
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -91,22 +94,56 @@ public BeejuCore(String databaseName, Map<String, String> preConfiguration, Map<
configure(preConfiguration);

configureFolders();

configureMetastore();

configureMisc();

configure(postConfiguration);
}

private void configureMisc() {
int webUIPort = getWebUIPort();

// override default port as some of our test environments claim it is in use.
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT, 0);
conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT, webUIPort);

conf.setBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER, false);

// Disable to get rid of clean up exception when stopping the Session.
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false);

// Used to prevent "Not authorized to make the get_current_notificationEventId call" errors
setMetastoreAndSystemProperty(MetastoreConf.ConfVars.EVENT_DB_NOTIFICATION_API_AUTH, "false");

// Used to prevent "Error polling for notification events" error
conf.setTimeVar(HiveConf.ConfVars.HIVE_NOTFICATION_EVENT_POLL_INTERVAL, 0, TimeUnit.MILLISECONDS);

// Has to be added to exclude failures related to the HiveMaterializedViewsRegistry
conf.set(HiveConf.ConfVars.HIVE_SERVER2_MATERIALIZED_VIEWS_REGISTRY_IMPL.varname, "DUMMY");
System.setProperty(HiveConf.ConfVars.HIVE_SERVER2_MATERIALIZED_VIEWS_REGISTRY_IMPL.varname, "DUMMY");
}

private void setMetastoreAndSystemProperty(MetastoreConf.ConfVars key, String value) {
conf.set(key.getVarname(), value);
conf.set(key.getHiveName(), value);

System.setProperty(key.getVarname(), value);
System.setProperty(key.getHiveName(), value);
}

private int getWebUIPort() {
// Try to find a free port, if impossible return the default port 0 which disables the WebUI altogether
int defaultPort = 0;

try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
} catch (IOException e) {
log.info(
"No free port available for the Web UI. Setting the port to " + defaultPort + ", which disables the WebUI.",
e);
return defaultPort;
}
}

private void configureFolders() {
Expand All @@ -115,28 +152,30 @@ private void configureFolders() {
createAndSetFolderProperty(HiveConf.ConfVars.SCRATCHDIR, "scratchdir");
createAndSetFolderProperty(HiveConf.ConfVars.LOCALSCRATCHDIR, "localscratchdir");
createAndSetFolderProperty(HiveConf.ConfVars.HIVEHISTORYFILELOC, "hive-history");

createDerbyPaths();
createWarehousePath();
} catch (IOException e) {
throw new UncheckedIOException("Error creating temporary folders", e);
}
}

private void configureMetastore() {
driverClassName = EmbeddedDriver.class.getName();
conf.setBoolean("hcatalog.hive.client.cache.disabled", true);
connectionURL = "jdbc:derby:memory:" + UUID.randomUUID() + ";create=true";
conf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, connectionURL);
conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_DRIVER, driverClassName);
conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_USER_NAME, METASTORE_DB_USER);
conf.setVar(HiveConf.ConfVars.METASTOREPWD, METASTORE_DB_PASSWORD);

setMetastoreAndSystemProperty(MetastoreConf.ConfVars.CONNECT_URL_KEY, connectionURL);
setMetastoreAndSystemProperty(MetastoreConf.ConfVars.CONNECTION_DRIVER, driverClassName);
setMetastoreAndSystemProperty(MetastoreConf.ConfVars.CONNECTION_USER_NAME, METASTORE_DB_USER);
setMetastoreAndSystemProperty(MetastoreConf.ConfVars.PWD, METASTORE_DB_PASSWORD);

conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, "NONE");
conf.setBoolVar(HiveConf.ConfVars.HMSHANDLERFORCERELOADCONF, true);

// Hive 2.x compatibility
conf.setBoolean("datanucleus.schema.autoCreateAll", true);
conf.setBoolean("hive.metastore.schema.verification", false);
setMetastoreAndSystemProperty(MetastoreConf.ConfVars.AUTO_CREATE_ALL, "true");
setMetastoreAndSystemProperty(MetastoreConf.ConfVars.SCHEMA_VERIFICATION, "false");
}

private void createAndSetFolderProperty(HiveConf.ConfVars var, String childFolderName) throws IOException {
Expand Down Expand Up @@ -215,7 +254,7 @@ public void createDatabase(String databaseName) throws TException {

/**
* @return a copy of the {@link HiveConf} used to create the Hive Metastore database. This {@link HiveConf} should be
* used by tests wishing to connect to the database.
* used by tests wishing to connect to the database.
*/
public HiveConf conf() {
return new HiveConf(conf);
Expand Down Expand Up @@ -265,5 +304,4 @@ public HiveMetaStoreClient newClient() {
throw new RuntimeException("Unable to create HiveMetaStoreClient", e);
}
}

}
19 changes: 8 additions & 11 deletions src/main/java/com/hotels/beeju/core/ThriftHiveMetaStoreCore.java
Expand Up @@ -27,8 +27,8 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge23;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge23;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -57,15 +57,12 @@ public void initialise() throws Exception {
}
beejuCore.setHiveVar(HiveConf.ConfVars.METASTOREURIS, getThriftConnectionUri());
final HiveConf hiveConf = new HiveConf(beejuCore.conf(), HiveMetaStoreClient.class);
thriftServer.execute(new Runnable() {
@Override
public void run() {
try {
HadoopThriftAuthBridge bridge = new HadoopThriftAuthBridge23();
HiveMetaStore.startMetaStore(thriftPort, bridge, hiveConf, startLock, startCondition, startedServing);
} catch (Throwable e) {
LOG.error("Unable to start a Thrift server for Hive Metastore", e);
}
thriftServer.execute(() -> {
try {
HadoopThriftAuthBridge bridge = HadoopThriftAuthBridge23.getBridge();
HiveMetaStore.startMetaStore(thriftPort, bridge, hiveConf, startLock, startCondition, startedServing);
} catch (Throwable e) {
LOG.error("Unable to start a Thrift server for Hive Metastore", e);
}
});
int i = 0;
Expand Down
Expand Up @@ -15,6 +15,8 @@
*/
package com.hotels.beeju.extensions;

import static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.CONNECT_URL_KEY;

import java.util.Map;

import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
Expand Down Expand Up @@ -61,6 +63,7 @@ public HiveMetaStoreJUnitExtension(String databaseName, Map<String, String> conf

@Override
public void beforeEach(ExtensionContext context) throws Exception {
System.clearProperty(CONNECT_URL_KEY.getVarname());
super.beforeEach(context);
hiveMetaStoreCore.initialise();
}
Expand Down
Expand Up @@ -15,6 +15,8 @@
*/
package com.hotels.beeju.extensions;

import static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.CONNECT_URL_KEY;

import java.util.Map;

import org.apache.hive.jdbc.HiveDriver;
Expand Down Expand Up @@ -55,6 +57,7 @@ public HiveServer2JUnitExtension(String databaseName, Map<String, String> config

@Override
public void beforeEach(ExtensionContext context) throws Exception {
System.clearProperty(CONNECT_URL_KEY.getVarname());
hiveServer2Core.startServerSocket();
super.beforeEach(context);
hiveServer2Core.initialise();
Expand Down
Expand Up @@ -15,6 +15,8 @@
*/
package com.hotels.beeju.extensions;

import static org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars.CONNECT_URL_KEY;

import java.util.Map;

import org.junit.jupiter.api.extension.ExtensionContext;
Expand Down Expand Up @@ -60,7 +62,8 @@ public ThriftHiveMetaStoreJUnitExtension(String databaseName, Map<String, String
}

@Override
public void beforeEach(ExtensionContext context) throws Exception{
public void beforeEach(ExtensionContext context) throws Exception {
System.clearProperty(CONNECT_URL_KEY.getVarname());
thriftHiveMetaStoreCore.initialise();
super.beforeEach(context);
}
Expand Down

0 comments on commit 0f3612e

Please sign in to comment.