Skip to content

Commit

Permalink
DRILL-6540: Upgrade to HADOOP-3.0.3 libraries
Browse files Browse the repository at this point in the history
- accomodate apache and mapr profiles with hadoop 3.0 libraries
- update HBase version
- fix jdbc-all woodox dependency
- unban Apache commons-logging dependency
  • Loading branch information
vdiravka committed Apr 5, 2019
1 parent e477480 commit ff49f90
Show file tree
Hide file tree
Showing 11 changed files with 223 additions and 115 deletions.
6 changes: 6 additions & 0 deletions drill-yarn/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,12 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-client</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>

<!-- For ZK monitoring -->
Expand Down
71 changes: 71 additions & 0 deletions exec/java-exec/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,26 @@
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down Expand Up @@ -425,6 +445,57 @@
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<!---->
<!--<exclusion>-->
<!--<groupId>com.sun.jersey</groupId>-->
<!--<artifactId>jersey-core</artifactId>-->
<!--</exclusion>-->
<!--<exclusion>-->
<!--<groupId>com.sun.jersey</groupId>-->
<!--<artifactId>jersey-server</artifactId>-->
<!--</exclusion>-->
<!--<exclusion>-->
<!--<groupId>com.sun.jersey</groupId>-->
<!--<artifactId>jersey-json</artifactId>-->
<!--</exclusion>-->
<!---->
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</exclusion>
<exclusion>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<!---->
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
<!---->
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import java.util.List;
import java.util.Map;

import org.apache.directory.api.util.Strings;
import org.apache.drill.common.map.CaseInsensitiveMap;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.physical.impl.scan.project.ColumnProjection;
Expand All @@ -37,6 +36,7 @@
import org.apache.drill.exec.server.options.OptionSet;
import org.apache.drill.exec.store.ColumnExplorer.ImplicitFileColumns;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.shaded.guava.com.google.common.base.Strings;
import org.apache.hadoop.fs.Path;

import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
Expand All @@ -58,7 +58,7 @@
* On each file (on each reader), the columns are "resolved." Here, that means
* that the columns are filled in with actual values based on the present file.
* <p>
* This is the successor to {@link ColumnExplorer}.
* This is the successor to {@link org.apache.drill.exec.store.ColumnExplorer}.
*/

public class FileMetadataManager implements MetadataManager, ReaderProjectionResolver, VectorSource {
Expand Down Expand Up @@ -167,8 +167,6 @@ public void useLegacyExpansionLocation(boolean flag) {
* one file, rather than a directory
* @param files the set of files to scan. Used to compute the maximum partition
* depth across all readers in this fragment
*
* @return this builder
*/

public FileMetadataManager(OptionSet optionManager,
Expand All @@ -178,7 +176,7 @@ public FileMetadataManager(OptionSet optionManager,
partitionDesignator = optionManager.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
for (ImplicitFileColumns e : ImplicitFileColumns.values()) {
String colName = optionManager.getString(e.optionName());
if (! Strings.isEmpty(colName)) {
if (!Strings.isNullOrEmpty(colName)) {
FileMetadataColumnDefn defn = new FileMetadataColumnDefn(colName, e);
implicitColDefns.add(defn);
fileMetadataColIndex.put(defn.colName, defn);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public FSDataInputStream open(Path path, int i) throws IOException {

@Override
public FSDataOutputStream create(Path path, FsPermission fsPermission, boolean b, int i, short i2, long l, Progressable progressable) throws IOException {
return new FSDataOutputStream(new LocalSyncableOutputStream(path));
return new FSDataOutputStream(new LocalSyncableOutputStream(path), new Statistics(path.toUri().getScheme()));
}

@Override
Expand Down Expand Up @@ -141,7 +141,7 @@ public LocalSyncableOutputStream(Path path) throws FileNotFoundException {
output = new BufferedOutputStream(fos, 64*1024);
}

@Override
// TODO: remove it after upgrade MapR profile onto hadoop.version 3.1
public void sync() throws IOException {
output.flush();
fos.getFD().sync();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -268,17 +268,20 @@ private static void testCreateViewTestHelper(String user, String viewSchema,
@Test
public void testCreateViewInWSWithNoPermissionsForQueryUser() throws Exception {
// Workspace dir owned by "processUser", workspace group is "group0" and "user2" is not part of "group0"
final String viewSchema = MINI_DFS_STORAGE_PLUGIN_NAME + ".drill_test_grp_0_755";
final String tableWS = "drill_test_grp_0_755";
final String viewSchema = MINI_DFS_STORAGE_PLUGIN_NAME + "." + tableWS;
final String viewName = "view1";

updateClient(user2);

test("USE " + viewSchema);

final String query = "CREATE VIEW " + viewName + " AS SELECT " +
"c_custkey, c_nationkey FROM cp.`tpch/customer.parquet` ORDER BY c_custkey;";
final String expErrorMsg = "PERMISSION ERROR: Permission denied: user=drillTestUser2, access=WRITE, inode=\"/drill_test_grp_0_755";
errorMsgTestHelper(query, expErrorMsg);
String expErrorMsg = "PERMISSION ERROR: Permission denied: user=drillTestUser2, access=WRITE, inode=\"/" + tableWS;
thrown.expect(UserRemoteException.class);
thrown.expectMessage(containsString(expErrorMsg));

test("CREATE VIEW %s AS" +
" SELECT c_custkey, c_nationkey FROM cp.`tpch/customer.parquet` ORDER BY c_custkey", viewName);

// SHOW TABLES is expected to return no records as view creation fails above.
testBuilder()
Expand Down Expand Up @@ -348,7 +351,7 @@ public void testCreateTableInWSWithNoPermissionsForQueryUser() throws Exception

thrown.expect(UserRemoteException.class);
thrown.expectMessage(containsString("Permission denied: user=drillTestUser2, " +
"access=WRITE, inode=\"/drill_test_grp_0_755"));
"access=WRITE, inode=\"/" + tableWS));

test("CREATE TABLE %s AS SELECT c_custkey, c_nationkey " +
"FROM cp.`tpch/customer.parquet` ORDER BY c_custkey", tableName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.directory.api.util.Strings;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.expression.LogicalExpression;
Expand Down Expand Up @@ -328,7 +327,7 @@ public void testProjectVariableWidthImpl(boolean transfer, int columnCount, Stri
expr[i * 2] = "lower(" + baselineColumns[i] + ")";
expr[i * 2 + 1] = baselineColumns[i];
}
baselineValues[i] = (transfer ? testString : Strings.lowerCase(testString));
baselineValues[i] = (transfer ? testString : testString.toLowerCase());
}
jsonRow.append("}");
StringBuilder batchString = new StringBuilder("[");
Expand Down Expand Up @@ -385,7 +384,7 @@ public void testProjectVariableWidthMixed() throws Exception {
expr[i * 2] = "lower(" + baselineColumns[i] + ")";
expr[i * 2 + 1] = baselineColumns[i];

baselineValues[i] = Strings.lowerCase(testString);
baselineValues[i] = testString.toLowerCase();
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public static void main(String[] args) throws IOException {
FSDataOutputStream out = fs.create(path);
byte[] s = "hello world".getBytes();
out.write(s);
out.sync();
out.hsync();
FSDataInputStream in = fs.open(path);
byte[] bytes = new byte[s.length];
in.read(bytes);
Expand All @@ -60,7 +60,7 @@ public static void main(String[] args) throws IOException {
bytes = new byte[256*1024];
Stopwatch watch = Stopwatch.createStarted();
out.write(bytes);
out.sync();
out.hsync();
long t = watch.elapsed(TimeUnit.MILLISECONDS);
logger.info(String.format("Elapsed: %d. Rate %d.\n", t, (long) ((long) bytes.length * 1000L / t)));
}
Expand Down
5 changes: 4 additions & 1 deletion exec/jdbc-all/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>3.0.0-M3</version>
<executions>
<execution>
<goals>
Expand Down Expand Up @@ -341,6 +342,7 @@
<exclude>commons-beanutils:commons-beanutils-core:jar:*</exclude>
<exclude>commons-beanutils:commons-beanutils:jar:*</exclude>
<exclude>io.netty:netty-tcnative:jar:*</exclude>
<exclude>com.fasterxml.woodstox:woodstox-core:jar:*</exclude>
</excludes>
</artifactSet>
<relocations>
Expand Down Expand Up @@ -403,6 +405,7 @@
<relocation><pattern>org.apache.xpath.</pattern><shadedPattern>oadd.org.apache.xpath.</shadedPattern></relocation>
<relocation><pattern>org.apache.zookeeper.</pattern><shadedPattern>oadd.org.apache.zookeeper.</shadedPattern></relocation>
<relocation><pattern>org.apache.hadoop.</pattern><shadedPattern>oadd.org.apache.hadoop.</shadedPattern></relocation>
<relocation><pattern>com.fasterxml.woodstox.</pattern><shadedPattern>oadd.com.fasterxml.woodstox.</shadedPattern></relocation>
</relocations>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
Expand Down Expand Up @@ -519,7 +522,7 @@
This is likely due to you adding new dependencies to a java-exec and not updating the excludes in this module. This is important as it minimizes the size of the dependency of Drill application users.

</message>
<maxsize>41000000</maxsize>
<maxsize>42600000</maxsize>
<minsize>15000000</minsize>
<files>
<file>${project.build.directory}/drill-jdbc-all-${project.version}.jar</file>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,16 +26,16 @@

public class DrillbitClassLoader extends URLClassLoader {

public DrillbitClassLoader() {
DrillbitClassLoader() {
super(URLS);
}

private static final URL[] URLS;

static {
ArrayList<URL> urlList = new ArrayList<URL>();
ArrayList<URL> urlList = new ArrayList<>();
final String classPath = System.getProperty("app.class.path");
final String[] st = fracture(classPath, File.pathSeparator);
final String[] st = fracture(classPath);
final int l = st.length;
for (int i = 0; i < l; i++) {
try {
Expand All @@ -49,10 +49,7 @@ public DrillbitClassLoader() {
}
urlList.toArray(new URL[urlList.size()]);

List<URL> urls = new ArrayList<>();
for (URL url : urlList) {
urls.add(url);
}
List<URL> urls = new ArrayList<>(urlList);
URLS = urls.toArray(new URL[urls.size()]);
}

Expand All @@ -61,21 +58,21 @@ public DrillbitClassLoader() {
*
* Taken from Apache Harmony
*/
private static String[] fracture(String str, String sep) {
private static String[] fracture(String str) {
if (str.length() == 0) {
return new String[0];
}
ArrayList<String> res = new ArrayList<String>();
ArrayList<String> res = new ArrayList<>();
int in = 0;
int curPos = 0;
int i = str.indexOf(sep);
int len = sep.length();
int i = str.indexOf(File.pathSeparator);
int len = File.pathSeparator.length();
while (i != -1) {
String s = str.substring(curPos, i);
res.add(s);
in++;
curPos = i + len;
i = str.indexOf(sep, curPos);
i = str.indexOf(File.pathSeparator, curPos);
}

len = str.length();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ protected void failed(Throwable e, Description description) {
super.failed(e, description);
done();
runMethod("failed", description);
logger.error("Check whether this test was running within 'integration-test' Maven phase");
}

private void done() {
Expand Down Expand Up @@ -216,8 +217,8 @@ private static void printClassesLoaded(String prefix, ClassLoader classLoader) {

private static void runWithLoader(String name, ClassLoader loader) throws Exception {
Class<?> clazz = loader.loadClass(ITTestShadedJar.class.getName() + "$" + name);
Object o = clazz.getDeclaredConstructors()[0].newInstance(loader);
clazz.getMethod("go").invoke(o);
Object instance = clazz.getDeclaredConstructors()[0].newInstance(loader);
clazz.getMethod("go").invoke(instance);
}

public abstract static class AbstractLoaderThread extends Thread {
Expand Down Expand Up @@ -264,7 +265,7 @@ protected void internalRun() throws Exception {
// loader.loadClass("org.apache.drill.exec.exception.SchemaChangeException");

// execute a single query to make sure the drillbit is fully up
clazz.getMethod("testNoResult", String.class, new Object[] {}.getClass())
clazz.getMethod("testNoResult", String.class, Object[].class)
.invoke(null, "select * from (VALUES 1)", new Object[] {});

SEM.release();
Expand Down
Loading

0 comments on commit ff49f90

Please sign in to comment.