Skip to content

Commit

Permalink
DRILL-5751: Fix unit tests to use local file system even if it is not…
Browse files Browse the repository at this point in the history
… set by default

DRILL-5751: Changes after code review.

close #927
  • Loading branch information
arina-ielchiieva authored and Aman Sinha committed Sep 2, 2017
1 parent d1a6134 commit 75c3513
Show file tree
Hide file tree
Showing 19 changed files with 206 additions and 223 deletions.
Expand Up @@ -255,8 +255,8 @@ String getBinaryName() {
* @throws IOException in case of binary or source absence or problems during copying jars * @throws IOException in case of binary or source absence or problems during copying jars
*/ */
void initRemoteBackup() throws IOException { void initRemoteBackup() throws IOException {
fs.getFileStatus(stagingBinary); checkPathExistence(stagingBinary);
fs.getFileStatus(stagingSource); checkPathExistence(stagingSource);
fs.mkdirs(remoteTmpDir); fs.mkdirs(remoteTmpDir);
FileUtil.copy(fs, stagingBinary, fs, tmpRemoteBinary, false, true, fs.getConf()); FileUtil.copy(fs, stagingBinary, fs, tmpRemoteBinary, false, true, fs.getConf());
FileUtil.copy(fs, stagingSource, fs, tmpRemoteSource, false, true, fs.getConf()); FileUtil.copy(fs, stagingSource, fs, tmpRemoteSource, false, true, fs.getConf());
Expand Down Expand Up @@ -315,6 +315,19 @@ void cleanUp() {
deleteQuietly(remoteTmpDir, true); deleteQuietly(remoteTmpDir, true);
} }


/**
* Checks if passed path exists on predefined file system.
*
* @param path path to be checked
* @throws IOException if path does not exist
*/
private void checkPathExistence(Path path) throws IOException {
if (!fs.exists(path)) {
throw new IOException(String.format("File %s does not exist on file system %s",
path.toUri().getPath(), fs.getUri()));
}
}

/** /**
* Deletes quietly file or directory, in case of errors, logs warning and proceeds. * Deletes quietly file or directory, in case of errors, logs warning and proceeds.
* *
Expand Down
Expand Up @@ -59,7 +59,6 @@
import org.apache.drill.exec.store.StoragePluginRegistry; import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.util.TestUtilities; import org.apache.drill.exec.util.TestUtilities;
import org.apache.drill.exec.util.VectorUtil; import org.apache.drill.exec.util.VectorUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
Expand All @@ -83,7 +82,8 @@
public class BaseTestQuery extends ExecTest { public class BaseTestQuery extends ExecTest {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BaseTestQuery.class); private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BaseTestQuery.class);


public static final String TEMP_SCHEMA = "dfs_test.tmp"; public static final String TEST_SCHEMA = "dfs_test";
public static final String TEMP_SCHEMA = TEST_SCHEMA + ".tmp";


private static final int MAX_WIDTH_PER_NODE = 2; private static final int MAX_WIDTH_PER_NODE = 2;


Expand Down Expand Up @@ -138,9 +138,7 @@ public static void setupDefaultTestCluster() throws Exception {
// turns on the verbose errors in tests // turns on the verbose errors in tests
// sever side stacktraces are added to the message before sending back to the client // sever side stacktraces are added to the message before sending back to the client
test("ALTER SESSION SET `exec.errors.verbose` = true"); test("ALTER SESSION SET `exec.errors.verbose` = true");
Configuration conf = new Configuration(); fs = getLocalFileSystem();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
fs = FileSystem.get(conf);
} }


protected static void updateTestCluster(int newDrillbitCount, DrillConfig newConfig) { protected static void updateTestCluster(int newDrillbitCount, DrillConfig newConfig) {
Expand Down
Expand Up @@ -23,6 +23,7 @@
import org.apache.drill.common.config.DrillConfig; import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.UserRemoteException; import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.common.util.TestTools; import org.apache.drill.common.util.TestTools;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.exception.VersionMismatchException; import org.apache.drill.exec.exception.VersionMismatchException;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry; import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.expr.fn.registry.LocalFunctionRegistry; import org.apache.drill.exec.expr.fn.registry.LocalFunctionRegistry;
Expand All @@ -35,6 +36,7 @@
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.TemporaryFolder; import org.junit.rules.TemporaryFolder;
Expand All @@ -43,7 +45,6 @@
import org.mockito.runners.MockitoJUnitRunner; import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;


import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
Expand All @@ -67,18 +68,26 @@
@RunWith(MockitoJUnitRunner.class) @RunWith(MockitoJUnitRunner.class)
public class TestDynamicUDFSupport extends BaseTestQuery { public class TestDynamicUDFSupport extends BaseTestQuery {


private static final File jars = new File(TestTools.getWorkingPath() + "/src/test/resources/jars"); private static final Path jars = new Path(TestTools.getWorkingPath(), "src/test/resources/jars");
private static final String default_binary_name = "DrillUDF-1.0.jar"; private static final String default_binary_name = "DrillUDF-1.0.jar";
private static final String default_source_name = JarUtil.getSourceName(default_binary_name); private static final String default_source_name = JarUtil.getSourceName(default_binary_name);


@Rule @Rule
public final TemporaryFolder base = new TemporaryFolder(); public final TemporaryFolder base = new TemporaryFolder();


private static FileSystem localFileSystem;

@BeforeClass
public static void init() throws IOException {
localFileSystem = getLocalFileSystem();
}

@Before @Before
public void setup() { public void setup() {
Properties overrideProps = new Properties(); Properties overrideProps = new Properties();
overrideProps.setProperty("drill.exec.udf.directory.root", base.getRoot().getPath()); overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, base.getRoot().getPath());
overrideProps.setProperty("drill.tmp-dir", base.getRoot().getPath()); overrideProps.setProperty(ExecConstants.DRILL_TMP_DIR, base.getRoot().getPath());
overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
updateTestCluster(1, DrillConfig.create(overrideProps)); updateTestCluster(1, DrillConfig.create(overrideProps));
} }


Expand Down Expand Up @@ -120,8 +129,10 @@ public void testDisableDynamicSupport() throws Exception {
@Test @Test
public void testAbsentBinaryInStaging() throws Exception { public void testAbsentBinaryInStaging() throws Exception {
Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea(); Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea();
FileSystem fs = getDrillbitContext().getRemoteFunctionRegistry().getFs();


String summary = String.format("File %s does not exist", new Path(staging, default_binary_name).toUri().getPath()); String summary = String.format("File %s does not exist on file system %s",
new Path(staging, default_binary_name).toUri().getPath(), fs.getUri());


testBuilder() testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name) .sqlQuery("create function using jar '%s'", default_binary_name)
Expand All @@ -134,10 +145,11 @@ public void testAbsentBinaryInStaging() throws Exception {
@Test @Test
public void testAbsentSourceInStaging() throws Exception { public void testAbsentSourceInStaging() throws Exception {
Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea(); Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea();
copyJar(getDrillbitContext().getRemoteFunctionRegistry().getFs(), new Path(jars.toURI()), FileSystem fs = getDrillbitContext().getRemoteFunctionRegistry().getFs();
staging, default_binary_name); copyJar(fs, jars, staging, default_binary_name);


String summary = String.format("File %s does not exist", new Path(staging, default_source_name).toUri().getPath()); String summary = String.format("File %s does not exist on file system %s",
new Path(staging, default_source_name).toUri().getPath(), fs.getUri());


testBuilder() testBuilder()
.sqlQuery("create function using jar '%s'", default_binary_name) .sqlQuery("create function using jar '%s'", default_binary_name)
Expand Down Expand Up @@ -432,10 +444,11 @@ public void testLazyInit() throws Exception {


Path localUdfDirPath = Deencapsulation.getField( Path localUdfDirPath = Deencapsulation.getField(
getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir"); getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir");
File localUdfDir = new File(localUdfDirPath.toUri().getPath());


assertTrue("Binary should exist in local udf directory", new File(localUdfDir, default_binary_name).exists()); assertTrue("Binary should exist in local udf directory",
assertTrue("Source should exist in local udf directory", new File(localUdfDir, default_source_name).exists()); localFileSystem.exists(new Path(localUdfDirPath, default_binary_name)));
assertTrue("Source should exist in local udf directory",
localFileSystem.exists(new Path(localUdfDirPath, default_source_name)));
} }


@Test @Test
Expand Down Expand Up @@ -498,10 +511,11 @@ public void testDropFunction() throws Exception {


Path localUdfDirPath = Deencapsulation.getField( Path localUdfDirPath = Deencapsulation.getField(
getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir"); getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir");
File localUdfDir = new File(localUdfDirPath.toUri().getPath());


assertTrue("Binary should exist in local udf directory", new File(localUdfDir, default_binary_name).exists()); assertTrue("Binary should exist in local udf directory",
assertTrue("Source should exist in local udf directory", new File(localUdfDir, default_source_name).exists()); localFileSystem.exists(new Path(localUdfDirPath, default_binary_name)));
assertTrue("Source should exist in local udf directory",
localFileSystem.exists(new Path(localUdfDirPath, default_source_name)));


String summary = "The following UDFs in jar %s have been unregistered:\n" + String summary = "The following UDFs in jar %s have been unregistered:\n" +
"[custom_lower(VARCHAR-REQUIRED)]"; "[custom_lower(VARCHAR-REQUIRED)]";
Expand Down Expand Up @@ -530,9 +544,9 @@ public void testDropFunction() throws Exception {
fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name))); fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));


assertFalse("Binary should not be present in local udf directory", assertFalse("Binary should not be present in local udf directory",
new File(localUdfDir, default_binary_name).exists()); localFileSystem.exists(new Path(localUdfDirPath, default_binary_name)));
assertFalse("Source should not be present in local udf directory", assertFalse("Source should not be present in local udf directory",
new File(localUdfDir, default_source_name).exists()); localFileSystem.exists(new Path(localUdfDirPath, default_source_name)));
} }


@Test @Test
Expand All @@ -549,7 +563,7 @@ public void testReRegisterTheSameJarWithDifferentContent() throws Exception {


Thread.sleep(1000); Thread.sleep(1000);


Path src = new Path(jars.toURI().getPath(), "v2"); Path src = new Path(jars, "v2");
copyJarsToStagingArea(src, default_binary_name, default_source_name); copyJarsToStagingArea(src, default_binary_name, default_source_name);
test("create function using jar '%s'", default_binary_name); test("create function using jar '%s'", default_binary_name);
testBuilder() testBuilder()
Expand Down Expand Up @@ -887,11 +901,11 @@ public Boolean answer(InvocationOnMock invocation) throws Throwable {
} }


private void copyDefaultJarsToStagingArea() throws IOException { private void copyDefaultJarsToStagingArea() throws IOException {
copyJarsToStagingArea(new Path(jars.toURI()), default_binary_name, default_source_name); copyJarsToStagingArea(jars, default_binary_name, default_source_name);
} }


private void copyJarsToStagingArea(String binaryName, String sourceName) throws IOException { private void copyJarsToStagingArea(String binaryName, String sourceName) throws IOException {
copyJarsToStagingArea(new Path(jars.toURI()), binaryName, sourceName); copyJarsToStagingArea(jars, binaryName, sourceName);
} }


private void copyJarsToStagingArea(Path src, String binaryName, String sourceName) throws IOException { private void copyJarsToStagingArea(Path src, String binaryName, String sourceName) throws IOException {
Expand Down
14 changes: 14 additions & 0 deletions exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java
Expand Up @@ -39,10 +39,13 @@
import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider; import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider;
import org.apache.drill.exec.util.GuavaPatcher; import org.apache.drill.exec.util.GuavaPatcher;
import org.apache.drill.test.DrillTest; import org.apache.drill.test.DrillTest;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.junit.After; import org.junit.After;
import org.junit.BeforeClass; import org.junit.BeforeClass;


import java.io.File; import java.io.File;
import java.io.IOException;




public class ExecTest extends DrillTest { public class ExecTest extends DrillTest {
Expand All @@ -69,6 +72,17 @@ public static void setupOptionManager() throws Exception{
optionManager.init(); optionManager.init();
} }


/**
* Creates instance of local file system.
*
* @return local file system
*/
public static FileSystem getLocalFileSystem() throws IOException {
Configuration configuration = new Configuration();
configuration.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
return FileSystem.get(configuration);
}

/** /**
* Create a temp directory to store the given <i>dirName</i>. * Create a temp directory to store the given <i>dirName</i>.
* Directory will be deleted on exit. * Directory will be deleted on exit.
Expand Down
@@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
Expand All @@ -19,7 +19,6 @@
package org.apache.drill.exec; package org.apache.drill.exec;


import org.apache.drill.BaseTestQuery; import org.apache.drill.BaseTestQuery;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.BeforeClass; import org.junit.BeforeClass;
Expand All @@ -31,10 +30,7 @@ public class TestRepeatedReaders extends BaseTestQuery {


@BeforeClass @BeforeClass
public static void initFs() throws Exception { public static void initFs() throws Exception {
Configuration conf = new Configuration(); fs = getLocalFileSystem();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);

fs = FileSystem.get(conf);
} }


private static void deleteTableIfExists(String tableName) { private static void deleteTableIfExists(String tableName) {
Expand Down
@@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
Expand All @@ -22,9 +22,6 @@


import com.google.common.io.Files; import com.google.common.io.Files;
import org.apache.drill.common.config.DrillConfig; import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.scanner.ClassPathScanner;
import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.Types; import org.apache.drill.common.types.Types;
import org.apache.drill.common.util.TestTools; import org.apache.drill.common.util.TestTools;
Expand All @@ -42,7 +39,6 @@
import org.apache.drill.exec.vector.IntVector; import org.apache.drill.exec.vector.IntVector;
import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.VarBinaryVector; import org.apache.drill.exec.vector.VarBinaryVector;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -97,12 +93,9 @@ public void test() throws Exception {
VectorAccessibleSerializable wrap = new VectorAccessibleSerializable( VectorAccessibleSerializable wrap = new VectorAccessibleSerializable(
batch, context.getAllocator()); batch, context.getAllocator());


Configuration conf = new Configuration();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);

final VectorAccessibleSerializable newWrap = new VectorAccessibleSerializable( final VectorAccessibleSerializable newWrap = new VectorAccessibleSerializable(
context.getAllocator()); context.getAllocator());
try (final FileSystem fs = FileSystem.get(conf)) { try (final FileSystem fs = getLocalFileSystem()) {
final File tempDir = Files.createTempDir(); final File tempDir = Files.createTempDir();
tempDir.deleteOnExit(); tempDir.deleteOnExit();
final Path path = new Path(tempDir.getAbsolutePath(), "drillSerializable"); final Path path = new Path(tempDir.getAbsolutePath(), "drillSerializable");
Expand Down
Expand Up @@ -119,10 +119,7 @@ public static Collection<Object[]> data() {


@BeforeClass @BeforeClass
public static void initFs() throws Exception { public static void initFs() throws Exception {
Configuration conf = new Configuration(); fs = getLocalFileSystem();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);

fs = FileSystem.get(conf);
test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY)); test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
} }


Expand Down Expand Up @@ -731,6 +728,7 @@ public void runTestAndValidate(String selection, String validationSelection, Str
.go(); .go();


Configuration hadoopConf = new Configuration(); Configuration hadoopConf = new Configuration();
hadoopConf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
Path output = new Path(getDfsTestTmpSchemaLocation(), outputFile); Path output = new Path(getDfsTestTmpSchemaLocation(), outputFile);
FileSystem fs = output.getFileSystem(hadoopConf); FileSystem fs = output.getFileSystem(hadoopConf);
for (FileStatus file : fs.listStatus(output)) { for (FileStatus file : fs.listStatus(output)) {
Expand Down
@@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
Expand All @@ -19,7 +19,6 @@


import org.apache.drill.BaseTestQuery; import org.apache.drill.BaseTestQuery;
import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.ExecConstants;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.Assert; import org.junit.Assert;
Expand All @@ -32,11 +31,7 @@ public class TestParquetWriterEmptyFiles extends BaseTestQuery {


@BeforeClass @BeforeClass
public static void initFs() throws Exception { public static void initFs() throws Exception {
Configuration conf = new Configuration(); fs = getLocalFileSystem();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);

fs = FileSystem.get(conf);

updateTestCluster(3, null); updateTestCluster(3, null);
} }


Expand Down
@@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
Expand Down Expand Up @@ -30,7 +30,6 @@
import org.apache.drill.exec.rpc.user.QueryDataBatch; import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.vector.BigIntVector; import org.apache.drill.exec.vector.BigIntVector;
import org.apache.drill.exec.vector.VarCharVector; import org.apache.drill.exec.vector.VarCharVector;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
Expand All @@ -48,10 +47,7 @@ public class TestWriter extends BaseTestQuery {


@BeforeClass @BeforeClass
public static void initFs() throws Exception { public static void initFs() throws Exception {
Configuration conf = new Configuration(); fs = getLocalFileSystem();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);

fs = FileSystem.get(conf);
} }


@Test @Test
Expand Down

0 comments on commit 75c3513

Please sign in to comment.