From e6fb91963278e120dcfdb1ebf5495ae2726df789 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Thu, 15 Sep 2016 09:37:05 +0100 Subject: [PATCH 1/7] HADOOP-13614: cut some needless, slower S3A tests --- .../java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java | 8 +------- .../hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java | 7 ------- 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java index 454327893f5f9..1fa65237e5227 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java @@ -48,15 +48,9 @@ protected Configuration createConfiguration() { } private static final int[] SIZES = { - 0, 1, 2, 3, 4, 5, 254, 255, 256, 257, 2 ^ 10 - 3, 2 ^ 11 - 2, 2 ^ 12 - 1 + 0, 1, 2, 3, 4, 5, 254, 255, 256, 257, 2 ^ 12 - 1 }; - @Override - public void teardown() throws Exception { - super.teardown(); - IOUtils.closeStream(getFileSystem()); - } - @Test public void testEncryption() throws Throwable { for (int size: SIZES) { diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java index 4e1a734e564b9..93eee066fc9c1 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java @@ -114,11 +114,4 @@ public Boolean call() throws IOException { ContractTestUtils.assertDeleted(fs, finalDir, true, false); } - @Test - public void testOpenCreate() throws IOException { - final Path scaleTestDir = getTestPath(); - final Path srcDir = new Path(scaleTestDir, "opencreate"); - ContractTestUtils.createAndVerifyFile(fs, srcDir, 1024); - ContractTestUtils.createAndVerifyFile(fs, srcDir, 50 * 1024); - } } From 4b64d18f8baaaa5b994ecf5c70cc430f543617d1 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Fri, 16 Sep 2016 14:40:58 +0100 Subject: [PATCH 2/7] HADOOP-13614; fix tests to compile, review all tests, clean slightly and move timing of upload/download times and bandwidth to contract tests --- .../hadoop/fs/contract/ContractTestUtils.java | 6 ++++ .../fs/s3a/ITestS3ABlockingThreadPool.java | 28 +++++-------------- .../hadoop/fs/s3a/ITestS3ABlocksize.java | 3 -- .../hadoop/fs/s3a/ITestS3AConfiguration.java | 6 ++-- ...TestS3AEncryptionAlgorithmPropagation.java | 6 ---- .../scale/ITestS3ADeleteFilesOneByOne.java | 5 ---- 6 files changed, 16 insertions(+), 38 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java index 73c8f1ce0957e..f6b6389a102ad 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java @@ -834,6 +834,7 @@ public static void verifyReceivedData(FileSystem fs, Path path, long totalBytesRead = 0; int nextExpectedNumber = 0; + NanoTimer timer = new NanoTimer(); try (InputStream inputStream = fs.open(path)) { while (true) { final int bytesRead = inputStream.read(testBuffer); @@ -862,6 +863,8 @@ public static void verifyReceivedData(FileSystem fs, Path path, " bytes but only received " + totalBytesRead); } } + timer.end("Time to read %d bytes", expectedSize); + bandwidth(timer, expectedSize); } /** @@ -925,9 +928,12 @@ public static void createAndVerifyFile(FileSystem fs, Path parent, final long fi final Path objectPath = new Path(parent, objectName); // Write test file in a specific pattern + NanoTimer timer = new NanoTimer(); assertEquals(fileSize, generateTestFile(fs, objectPath, fileSize, testBufferSize, modulus)); assertPathExists(fs, "not created successful", objectPath); + timer.end("Time to write %d bytes", fileSize); + bandwidth(timer, fileSize); // Now read the same file back and verify its content try { diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java index 991135e9d7d89..7199c20b0afde 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java @@ -34,32 +34,18 @@ * 4th part should not trigger an exception as it would with a * non-blocking threadpool. */ -public class ITestS3ABlockingThreadPool { - - private Configuration conf; - private S3AFileSystem fs; +public class ITestS3ABlockingThreadPool extends AbstractS3ATestBase { @Rule public Timeout testTimeout = new Timeout(30 * 60 * 1000); - protected Path getTestPath() { - return new Path("/tests3a"); - } - - @Before - public void setUp() throws Exception { - conf = new Configuration(); + @Override + protected Configuration createConfiguration() { + Configuration conf = new Configuration(); conf.setLong(Constants.MIN_MULTIPART_THRESHOLD, 5 * 1024 * 1024); - conf.setLong(Constants.MULTIPART_SIZE, 5 * 1024 * 1024); - conf.setInt(Constants.MAX_THREADS, 2); - conf.setInt(Constants.MAX_TOTAL_TASKS, 1); - } - - @After - public void tearDown() throws Exception { - if (fs != null) { - fs.delete(getTestPath(), true); - } + conf.setInt(Constants.MULTIPART_SIZE, 5 * 1024 * 1024); + conf.setBoolean(Constants.FAST_UPLOAD, true); + return conf; } @Test diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java index 9a6dae75bfdf4..fcd6107135bb2 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java @@ -48,9 +48,6 @@ protected AbstractFSContract createContract(Configuration conf) { return new S3AContract(conf); } - @Rule - public Timeout testTimeout = new Timeout(30 * 60 * 1000); - @Test @SuppressWarnings("deprecation") public void testBlockSize() throws Exception { diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java index 30d4bf66baf53..773119b9ee423 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java @@ -76,13 +76,13 @@ public class ITestS3AConfiguration { /** * Test if custom endpoint is picked up. - *

+ *

* The test expects TEST_ENDPOINT to be defined in the Configuration * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points - * (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland). + * (i.e. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland). * Evidently, the bucket has to be hosted in the region denoted by the * endpoint for the test to succeed. - *

+ *

* More info and the list of endpoint identifiers: * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region * diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java index 81578c202e891..c8394ba65b518 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java @@ -43,12 +43,6 @@ protected Configuration createConfiguration() { return conf; } - @Override - public void teardown() throws Exception { - super.teardown(); - IOUtils.closeStream(getFileSystem()); - } - @Test public void testEncrypt0() throws Throwable { writeThenReadFileToFailure(0); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java index a375664a50ac4..3e2a5b8574b25 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java @@ -36,9 +36,4 @@ protected Configuration createConfiguration() { return configuration; } - @Override - @Test - public void testOpenCreate() throws IOException { - - } } From db246a8ab26eb73380f8523607971b9809702e9d Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Fri, 30 Sep 2016 18:28:14 +0100 Subject: [PATCH 3/7] HADOOP-13614: patch 002. -where possible give all the s3a tests the superclass of {{AbstractS3ATestBase}}, clean up children. -ITestS3AFileSystemContract has path logic reworked *so test can run in parallel* -fixed up yarn test to work in parallel too, along with BlockingThreadPool and FastOutputStream -on the files changed, moved the copyright comment to being a simple comment rather than a javadoc one. This conflicts a bit with HADOOP-13560; this patch should go in first and then I'll address the diffs --- .../TestFSMainOperationsLocalFileSystem.java | 4 +-- hadoop-tools/hadoop-aws/pom.xml | 7 ---- .../hadoop/fs/contract/s3a/S3AContract.java | 6 ++-- .../hadoop/fs/s3a/AbstractS3ATestBase.java | 3 -- .../fs/s3a/ITestS3ABlockingThreadPool.java | 18 ++-------- .../hadoop/fs/s3a/ITestS3ABlocksize.java | 4 +-- .../hadoop/fs/s3a/ITestS3AEncryption.java | 1 - ...TestS3AEncryptionAlgorithmPropagation.java | 1 - .../fs/s3a/ITestS3AFailureHandling.java | 9 ++--- .../fs/s3a/ITestS3AFileOperationCost.java | 16 +-------- .../fs/s3a/ITestS3AFileSystemContract.java | 33 +++++++++++++++++-- .../fs/s3a/ITestS3ATemporaryCredentials.java | 14 ++------ .../hadoop/fs/s3a/S3ATestConstants.java | 8 +++++ .../apache/hadoop/fs/s3a/S3ATestUtils.java | 14 ++++++++ .../fs/s3a/scale/ITestS3ADeleteManyFiles.java | 2 +- .../scale/ITestS3ADirectoryPerformance.java | 8 ++--- .../scale/ITestS3AInputStreamPerformance.java | 2 +- .../hadoop/fs/s3a/scale/S3AScaleTestBase.java | 27 +++++++-------- .../apache/hadoop/fs/s3a/yarn/ITestS3A.java | 4 +-- .../fs/s3a/yarn/ITestS3AMiniYarnCluster.java | 32 +++++++++--------- 20 files changed, 102 insertions(+), 111 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java index 6081f38570699..12687fd8b9289 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -46,7 +46,7 @@ public void setUp() throws Exception { fcTarget = FileSystem.getLocal(conf); super.setUp(); } - + @Override @After public void tearDown() throws Exception { diff --git a/hadoop-tools/hadoop-aws/pom.xml b/hadoop-tools/hadoop-aws/pom.xml index 28825d940b2a2..1c70638dea2d4 100644 --- a/hadoop-tools/hadoop-aws/pom.xml +++ b/hadoop-tools/hadoop-aws/pom.xml @@ -181,9 +181,6 @@ **/ITestJets3tNativeS3FileSystemContract.java - **/ITestS3ABlockingThreadPool.java - **/ITestS3AFileSystemContract.java - **/ITestS3AMiniYarnCluster.java **/ITest*Root*.java **/ITestS3AFileContextStatistics.java **/ITestS3AHuge*.java @@ -211,10 +208,6 @@ **/ITestJets3tNativeS3FileSystemContract.java - **/ITestS3ABlockingThreadPool.java - **/ITestS3AFastOutputStream.java - **/ITestS3AFileSystemContract.java - **/ITestS3AMiniYarnCluster.java **/ITest*Root*.java **/ITestS3AFileContextStatistics.java **/ITestS3AHuge*.java diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java index e9024b5e82294..3510a6486a260 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java @@ -21,6 +21,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.AbstractBondedFSContract; +import org.apache.hadoop.fs.s3a.S3ATestUtils; /** * The contract of S3A: only enabled if the test bucket is provided. @@ -29,7 +30,6 @@ public class S3AContract extends AbstractBondedFSContract { public static final String CONTRACT_XML = "contract/s3a.xml"; - public S3AContract(Configuration conf) { super(conf); //insert the base features @@ -43,8 +43,6 @@ public String getScheme() { @Override public Path getTestPath() { - String testUniqueForkId = System.getProperty("test.unique.fork.id"); - return testUniqueForkId == null ? super.getTestPath() : - new Path("/" + testUniqueForkId, "test"); + return S3ATestUtils.createTestPath(super.getTestPath()); } } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java index e049fd16aa328..358426d95636d 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java @@ -52,9 +52,6 @@ public void teardown() throws Exception { IOUtils.closeStream(getFileSystem()); } - @Rule - public TestName methodName = new TestName(); - @Before public void nameThread() { Thread.currentThread().setName("JUnit-" + methodName.getMethodName()); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java index 7199c20b0afde..b4b6ce589eaa4 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -19,13 +19,8 @@ package org.apache.hadoop.fs.s3a; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.Timeout; /** * Demonstrate that the threadpool blocks additional client requests if @@ -48,21 +43,14 @@ protected Configuration createConfiguration() { return conf; } - @Test - public void testRegularMultiPartUpload() throws Exception { - fs = S3ATestUtils.createTestFileSystem(conf); - ContractTestUtils.createAndVerifyFile(fs, getTestPath(), 16 * 1024 * - 1024); - } - @Test public void testFastMultiPartUpload() throws Exception { conf.setBoolean(Constants.FAST_UPLOAD, true); conf.set(Constants.FAST_UPLOAD_BUFFER, Constants.FAST_UPLOAD_BYTEBUFFER); fs = S3ATestUtils.createTestFileSystem(conf); - ContractTestUtils.createAndVerifyFile(fs, getTestPath(), 16 * 1024 * - 1024); + ContractTestUtils.createAndVerifyFile(fs, + path("testFastMultiPartUpload"), 16 * 1024 * 1024); } } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java index fcd6107135bb2..e8a4e40b18211 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -25,9 +25,7 @@ import org.apache.hadoop.fs.contract.AbstractFSContract; import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; import org.apache.hadoop.fs.contract.s3a.S3AContract; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java index 1fa65237e5227..8432789979e72 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java @@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.apache.hadoop.io.IOUtils; import org.junit.Test; import java.io.IOException; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java index c8394ba65b518..96deb2558642e 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java @@ -20,7 +20,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IOUtils; import org.junit.Test; import java.io.IOException; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java index e284ea7226da2..8116c2c80863d 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java @@ -18,7 +18,6 @@ package org.apache.hadoop.fs.s3a; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -36,20 +35,16 @@ import java.util.concurrent.Callable; import static org.apache.hadoop.fs.contract.ContractTestUtils.*; +import static org.apache.hadoop.fs.s3a.S3ATestUtils.*; /** * Test S3A Failure translation, including a functional test * generating errors during stream IO. */ -public class ITestS3AFailureHandling extends AbstractFSContractTestBase { +public class ITestS3AFailureHandling extends AbstractS3ATestBase { private static final Logger LOG = LoggerFactory.getLogger(ITestS3AFailureHandling.class); - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3AContract(conf); - } - @Test public void testReadFileChanged() throws Throwable { describe("overwrite a file with a shorter one during a read, seek"); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java index f19ea954bc54c..8616e263ce037 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java @@ -18,13 +18,9 @@ package org.apache.hadoop.fs.s3a; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.apache.hadoop.fs.contract.s3a.S3AContract; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,7 +39,7 @@ * Use metrics to assert about the cost of file status queries. * {@link S3AFileSystem#getFileStatus(Path)}. */ -public class ITestS3AFileOperationCost extends AbstractFSContractTestBase { +public class ITestS3AFileOperationCost extends AbstractS3ATestBase { private MetricDiff metadataRequests; private MetricDiff listRequests; @@ -51,16 +47,6 @@ public class ITestS3AFileOperationCost extends AbstractFSContractTestBase { private static final Logger LOG = LoggerFactory.getLogger(ITestS3AFileOperationCost.class); - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3AContract(conf); - } - - @Override - public S3AFileSystem getFileSystem() { - return (S3AFileSystem) super.getFileSystem(); - } - @Override public void setup() throws Exception { super.setup(); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java index 858ac22c7f216..619d09cccc64a 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -18,6 +18,9 @@ package org.apache.hadoop.fs.s3a; +import org.junit.Before; +import org.junit.Rule; +import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -38,18 +41,44 @@ public class ITestS3AFileSystemContract extends FileSystemContractBaseTest { protected static final Logger LOG = LoggerFactory.getLogger(ITestS3AFileSystemContract.class); + private Path basePath; + + @Rule + public TestName methodName = new TestName(); + + @Before + public void nameThread() { + Thread.currentThread().setName("JUnit-" + methodName.getMethodName()); + } + @Override public void setUp() throws Exception { Configuration conf = new Configuration(); fs = S3ATestUtils.createTestFileSystem(conf); + basePath = fs.makeQualified( + S3ATestUtils.createTestPath(new Path("/s3afilesystemcontract"))); super.setUp(); } + /** + * This path explicitly places all absolute paths under the per-test suite + * path directory; this allows the test to run in parallel. + * @param pathString path string as input + * @return a qualified path string. + */ + protected Path path(String pathString) { + if (pathString.startsWith("/")) { + return new Path(basePath, pathString).makeQualified(fs); + } else { + return super.path(pathString); + } + } + @Override protected void tearDown() throws Exception { if (fs != null) { - fs.delete(path("test"), true); + fs.delete(basePath, true); } super.tearDown(); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java index 360a151406a32..84aad3c9b7ffe 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -28,9 +28,6 @@ import com.amazonaws.services.securitytoken.model.GetSessionTokenResult; import com.amazonaws.services.securitytoken.model.Credentials; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; -import org.apache.hadoop.fs.contract.s3a.S3AContract; import org.apache.hadoop.fs.s3native.S3xLoginHelper; import org.apache.hadoop.conf.Configuration; @@ -48,9 +45,7 @@ * should only be used against transient filesystems where you don't care about * the data. */ -public class ITestS3ATemporaryCredentials extends AbstractFSContractTestBase { - public static final String TEST_STS_ENABLED = "test.fs.s3a.sts.enabled"; - public static final String TEST_STS_ENDPOINT = "test.fs.s3a.sts.endpoint"; +public class ITestS3ATemporaryCredentials extends AbstractS3ATestBase { private static final Logger LOG = LoggerFactory.getLogger(ITestS3ATemporaryCredentials.class); @@ -60,11 +55,6 @@ public class ITestS3ATemporaryCredentials extends AbstractFSContractTestBase { private static final long TEST_FILE_SIZE = 1024; - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3AContract(conf); - } - /** * Test use of STS for requesting temporary credentials. * diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java index 6894bb07075de..e62c47bdb3ef4 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java @@ -144,4 +144,12 @@ public interface S3ATestConstants { */ boolean DEFAULT_SCALE_TESTS_ENABLED = false; + String KEY_ENCRYPTION_TESTS = TEST_FS_S3A + "encryption.enabled"; + + /** + * Fork ID passed down from maven if the test is running in parallel. + */ + String TEST_UNIQUE_FORK_ID = "test.unique.fork.id"; + String TEST_STS_ENABLED = "test.fs.s3a.sts.enabled"; + String TEST_STS_ENDPOINT = "test.fs.s3a.sts.endpoint"; } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java index 19dccac15b3a2..5d52b3722ef2a 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java @@ -21,6 +21,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.scale.S3AScaleTestBase; import org.junit.Assert; import org.junit.internal.AssumptionViolatedException; @@ -301,6 +302,19 @@ public static void skipIfEncryptionTestsDisabled( } } + /** + * Create a test path, using the value of + * {@link S3ATestConstants#TEST_UNIQUE_FORK_ID} if it is set. + * @param defVal default value + * @return a path + */ + public static Path createTestPath(Path defVal) { + String testUniqueForkId = System.getProperty( + S3ATestConstants.TEST_UNIQUE_FORK_ID); + return testUniqueForkId == null ? defVal : + new Path("/" + testUniqueForkId, "test"); + } + /** * Reset all metrics in a list. * @param metrics metrics to reset diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java index 93eee066fc9c1..fbbc58f8aa68e 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java @@ -50,7 +50,7 @@ public class ITestS3ADeleteManyFiles extends S3AScaleTestBase { */ @Test public void testBulkRenameAndDelete() throws Throwable { - final Path scaleTestDir = getTestPath(); + final Path scaleTestDir = path("testBulkRenameAndDelete"); final Path srcDir = new Path(scaleTestDir, "src"); final Path finalDir = new Path(scaleTestDir, "final"); final long count = getOperationCount(); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java index b5f4eb3cc8300..1d0f9e1c3a7d6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java @@ -40,7 +40,7 @@ public class ITestS3ADirectoryPerformance extends S3AScaleTestBase { @Test public void testListOperations() throws Throwable { describe("Test recursive list operations"); - final Path scaleTestDir = getTestPath(); + final Path scaleTestDir = path("testListOperations"); final Path listDir = new Path(scaleTestDir, "lists"); // scale factor. @@ -137,7 +137,7 @@ public void testListOperations() throws Throwable { @Test public void testTimeToStatEmptyDirectory() throws Throwable { describe("Time to stat an empty directory"); - Path path = new Path(getTestPath(), "empty"); + Path path = path("empty"); fs.mkdirs(path); timeToStatPath(path); } @@ -145,7 +145,7 @@ public void testTimeToStatEmptyDirectory() throws Throwable { @Test public void testTimeToStatNonEmptyDirectory() throws Throwable { describe("Time to stat a non-empty directory"); - Path path = new Path(getTestPath(), "dir"); + Path path = path("dir"); fs.mkdirs(path); touch(fs, new Path(path, "file")); timeToStatPath(path); @@ -154,7 +154,7 @@ public void testTimeToStatNonEmptyDirectory() throws Throwable { @Test public void testTimeToStatFile() throws Throwable { describe("Time to stat a simple file"); - Path path = new Path(getTestPath(), "file"); + Path path = path("file"); touch(fs, path); timeToStatPath(path); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java index e2163c5da94c2..090acc6c3482d 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java @@ -436,7 +436,7 @@ public void testRandomReadOverBuffer() throws Throwable { describe("read over a buffer, making sure that the requests" + " spans readahead ranges"); int datasetLen = _32K; - Path dataFile = new Path(getTestPath(), "testReadOverBuffer.bin"); + Path dataFile = path("testReadOverBuffer.bin"); byte[] sourceData = dataset(datasetLen, 0, 64); // relies on the field 'fs' referring to the R/W FS writeDataset(fs, dataFile, sourceData, datasetLen, _16K, true); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java index af6d468fb64b2..7a929f3d2a0cd 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java @@ -22,6 +22,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; +import org.apache.hadoop.fs.s3a.AbstractS3ATestBase; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.S3AInputStream; import org.apache.hadoop.fs.s3a.S3AInstrumentation; @@ -35,6 +36,7 @@ import org.junit.Rule; import org.junit.rules.TestName; import org.junit.rules.Timeout; +import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,19 +48,11 @@ * Base class for scale tests; here is where the common scale configuration * keys are defined. */ -public class S3AScaleTestBase extends Assert implements S3ATestConstants { - - @Rule - public final TestName methodName = new TestName(); +public class S3AScaleTestBase extends AbstractS3ATestBase { @Rule public Timeout testTimeout = createTestTimeout(); - @Before - public void nameThread() { - Thread.currentThread().setName("JUnit"); - } - public static final int _1KB = 1024; public static final int _1MB = _1KB * _1KB; @@ -79,6 +73,7 @@ public void nameThread() { protected Configuration createConfiguration() { return new Configuration(); } + private Path testPath; /** * Get the configuration used to set up the FS. @@ -114,18 +109,18 @@ private synchronized Configuration demandCreateConfiguration() { if (conf == null) { conf = createConfiguration(); } - return conf; + return getConfiguration(); } - @After - public void tearDown() throws Exception { - ContractTestUtils.rm(fs, getTestPath(), true, true); + @Override + public void setup() throws Exception { + super.setup(); + testPath = path("/tests3a"); + fs = getFileSystem(); } protected Path getTestPath() { - String testUniqueForkId = System.getProperty("test.unique.fork.id"); - return testUniqueForkId == null ? new Path("/tests3a") : - new Path("/" + testUniqueForkId, "tests3a"); + return testPath; } protected long getOperationCount() { diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java index ca57da65f3cc9..7d2c1dc302377 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -59,7 +59,7 @@ public void tearDown() throws Exception { } protected Path getTestPath() { - return new Path("/tests3afc"); + return S3ATestUtils.createTestPath(new Path("/tests3afc")); } @Test diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java index 772d8c7fd0bee..ac8caf8e79ab8 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.s3a.AbstractS3ATestBase; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.S3ATestUtils; import org.apache.hadoop.io.IOUtils; @@ -39,26 +40,26 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.MiniYARNCluster; -import org.junit.After; -import static org.junit.Assert.assertTrue; -import org.junit.Before; import org.junit.Test; -import static org.junit.Assert.assertEquals; /** * Tests that S3A is usable through a YARN application. */ -public class ITestS3AMiniYarnCluster { +public class ITestS3AMiniYarnCluster extends AbstractS3ATestBase { private final Configuration conf = new YarnConfiguration(); private S3AFileSystem fs; private MiniYARNCluster yarnCluster; - private final String rootPath = "/tests/MiniClusterWordCount/"; + private Path rootPath; - @Before - public void beforeTest() throws IOException { + @Override + public void setup() throws Exception { + super.setup(); fs = S3ATestUtils.createTestFileSystem(conf); - fs.mkdirs(new Path(rootPath + "input/")); + rootPath = path("MiniClusterWordCount"); + Path workingDir = path("working"); + fs.setWorkingDirectory(workingDir); + fs.mkdirs(new Path(rootPath, "input/")); yarnCluster = new MiniYARNCluster("MiniClusterWordCount", // testName 1, // number of node managers @@ -68,17 +69,18 @@ public void beforeTest() throws IOException { yarnCluster.start(); } - @After - public void afterTest() throws IOException { - fs.delete(new Path(rootPath), true); + @Override + public void teardown() throws Exception { + fs.delete(rootPath, true); yarnCluster.stop(); + super.teardown(); } @Test public void testWithMiniCluster() throws Exception { - Path input = new Path(rootPath + "input/in.txt"); + Path input = new Path(rootPath, "input/in.txt"); input = input.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - Path output = new Path(rootPath + "output/"); + Path output = new Path(rootPath, "output/"); output = output.makeQualified(fs.getUri(), fs.getWorkingDirectory()); writeStringToFile(input, "first line\nsecond line\nthird line"); From 6c50eb316c93ccab27876f2065d50ea4bb1064a5 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Tue, 4 Oct 2016 16:19:06 +0100 Subject: [PATCH 4/7] HADOOP-13614 patch 004: tuning timeouts with 10 min for default tests, 30 min for the long tests --- .../hadoop/fs/contract/s3a/ITestS3AContractDistCp.java | 6 ++++++ .../org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java | 7 +++++-- .../org/apache/hadoop/fs/s3a/S3ATestConstants.java | 10 ++++++++++ 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java index 9e14ed2d2cee9..2e92c7f276c99 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.contract.s3a; import static org.apache.hadoop.fs.s3a.Constants.*; +import static org.apache.hadoop.fs.s3a.S3ATestConstants.SCALE_TEST_TIMEOUT; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.tools.contract.AbstractContractDistCpTest; @@ -32,6 +33,11 @@ public class ITestS3AContractDistCp extends AbstractContractDistCpTest { private static final long MULTIPART_SETTING = MULTIPART_MIN_SIZE; + @Override + protected int getTestTimeoutMillis() { + return SCALE_TEST_TIMEOUT; + } + @Override protected Configuration createConfiguration() { Configuration newConf = super.createConfiguration(); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java index 358426d95636d..c793982417bdc 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java @@ -26,8 +26,6 @@ import org.apache.hadoop.fs.contract.s3a.S3AContract; import org.apache.hadoop.io.IOUtils; import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; import java.io.IOException; @@ -57,6 +55,11 @@ public void nameThread() { Thread.currentThread().setName("JUnit-" + methodName.getMethodName()); } + @Override + protected int getTestTimeoutMillis() { + return S3A_TEST_TIMEOUT; + } + protected Configuration getConfiguration() { return getContract().getConf(); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java index e62c47bdb3ef4..daaf948f9ada3 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java @@ -152,4 +152,14 @@ public interface S3ATestConstants { String TEST_UNIQUE_FORK_ID = "test.unique.fork.id"; String TEST_STS_ENABLED = "test.fs.s3a.sts.enabled"; String TEST_STS_ENDPOINT = "test.fs.s3a.sts.endpoint"; + + /** + * Timeout in Milliseconds for standard tests: {@value}. + */ + int S3A_TEST_TIMEOUT = 10 * 60 * 1000; + + /** + * Timeout in Milliseconds for Scale Tests: {@value}. + */ + int SCALE_TEST_TIMEOUT = 30 * 60 * 1000; } From fbbea164f8edb1c9d2a14c1544e0f32b2a92a95e Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Thu, 6 Oct 2016 13:43:16 +0100 Subject: [PATCH 5/7] HADOOP-13614 checkstyle/javac cleanup --- .../org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java | 2 +- .../hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java index 619d09cccc64a..0eb601bdb4461 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java @@ -69,7 +69,7 @@ public void setUp() throws Exception { */ protected Path path(String pathString) { if (pathString.startsWith("/")) { - return new Path(basePath, pathString).makeQualified(fs); + return fs.makeQualified(new Path(basePath, pathString)); } else { return super.path(pathString); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java index 3e2a5b8574b25..b054f16300511 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java @@ -20,9 +20,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.Constants; -import org.junit.Test; - -import java.io.IOException; /** * Tests file deletion with multi-delete disabled. From 20c1669f247dc2e5b6657763df7000ee8cbbace3 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Wed, 19 Oct 2016 14:06:35 +0100 Subject: [PATCH 6/7] HADOOP-13614 apply to branch-2; test and verify that all appears well. Tune more tests, including ITestS3AMiniYarnCluster. A big change here is fixing up the scale tests to work as subclasses of the S3AScaleTestBase indirect subclasses of AbstractFSContractTestBase, because that sets up the test timeout rule. Rather than have a field of the same name and hope that its timeout gets picked up, I've tuned how timeouts get set up, so the subclasses do it. All well and good, except those subclasses are being called during the initialization of the base class, that is: before the subclasses are full inited. I don't ever like doing that, though it is working here. --- .../contract/s3a/ITestS3AContractDistCp.java | 4 +- .../hadoop/fs/s3a/AbstractS3ATestBase.java | 16 +++ .../fs/s3a/ITestS3ABlockingThreadPool.java | 56 -------- .../hadoop/fs/s3a/ITestS3ABlocksize.java | 10 +- .../hadoop/fs/s3a/ITestS3AConfiguration.java | 17 ++- .../fs/s3a/ITestS3AFileOperationCost.java | 3 +- .../hadoop/fs/s3a/S3ATestConstants.java | 19 +-- .../apache/hadoop/fs/s3a/S3ATestUtils.java | 2 +- .../s3a/scale/AbstractSTestS3AHugeFiles.java | 25 ++-- .../scale/ITestS3ADeleteFilesOneByOne.java | 4 +- .../fs/s3a/scale/ITestS3ADeleteManyFiles.java | 4 +- .../scale/ITestS3ADirectoryPerformance.java | 8 +- .../scale/ITestS3AHugeFilesClassicOutput.java | 4 +- .../scale/ITestS3AInputStreamPerformance.java | 1 + .../hadoop/fs/s3a/scale/S3AScaleTestBase.java | 128 +++++++++--------- .../fs/s3a/yarn/ITestS3AMiniYarnCluster.java | 18 +-- 16 files changed, 144 insertions(+), 175 deletions(-) delete mode 100644 hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java index 2e92c7f276c99..50ce0c2a98f40 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java @@ -19,7 +19,7 @@ package org.apache.hadoop.fs.contract.s3a; import static org.apache.hadoop.fs.s3a.Constants.*; -import static org.apache.hadoop.fs.s3a.S3ATestConstants.SCALE_TEST_TIMEOUT; +import static org.apache.hadoop.fs.s3a.S3ATestConstants.SCALE_TEST_TIMEOUT_MILLIS; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.tools.contract.AbstractContractDistCpTest; @@ -35,7 +35,7 @@ public class ITestS3AContractDistCp extends AbstractContractDistCpTest { @Override protected int getTestTimeoutMillis() { - return SCALE_TEST_TIMEOUT; + return SCALE_TEST_TIMEOUT_MILLIS; } @Override diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java index c793982417bdc..c19b72cde9fcc 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java @@ -26,6 +26,8 @@ import org.apache.hadoop.fs.contract.s3a.S3AContract; import org.apache.hadoop.io.IOUtils; import org.junit.Before; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; @@ -38,6 +40,9 @@ public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase implements S3ATestConstants { + protected static final Logger LOG = + LoggerFactory.getLogger(AbstractS3ATestBase.class); + @Override protected AbstractFSContract createContract(Configuration conf) { return new S3AContract(conf); @@ -73,6 +78,17 @@ public S3AFileSystem getFileSystem() { return (S3AFileSystem) super.getFileSystem(); } + /** + * Describe a test in the logs. + * @param text text to print + * @param args arguments to format in the printing + */ + protected void describe(String text, Object... args) { + LOG.info("\n\n{}: {}\n", + methodName.getMethodName(), + String.format(text, args)); + } + /** * Write a file, read it back, validate the dataset. Overwrites the file * if it is present diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java deleted file mode 100644 index b4b6ce589eaa4..0000000000000 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.fs.s3a; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.junit.Test; - -/** - * Demonstrate that the threadpool blocks additional client requests if - * its queue is full (rather than throwing an exception) by initiating an - * upload consisting of 4 parts with 2 threads and 1 spot in the queue. The - * 4th part should not trigger an exception as it would with a - * non-blocking threadpool. - */ -public class ITestS3ABlockingThreadPool extends AbstractS3ATestBase { - - @Rule - public Timeout testTimeout = new Timeout(30 * 60 * 1000); - - @Override - protected Configuration createConfiguration() { - Configuration conf = new Configuration(); - conf.setLong(Constants.MIN_MULTIPART_THRESHOLD, 5 * 1024 * 1024); - conf.setInt(Constants.MULTIPART_SIZE, 5 * 1024 * 1024); - conf.setBoolean(Constants.FAST_UPLOAD, true); - return conf; - } - - @Test - public void testFastMultiPartUpload() throws Exception { - conf.setBoolean(Constants.FAST_UPLOAD, true); - conf.set(Constants.FAST_UPLOAD_BUFFER, - Constants.FAST_UPLOAD_BYTEBUFFER); - fs = S3ATestUtils.createTestFileSystem(conf); - ContractTestUtils.createAndVerifyFile(fs, - path("testFastMultiPartUpload"), 16 * 1024 * 1024); - - } -} diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java index e8a4e40b18211..30fb53439f325 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java @@ -25,7 +25,10 @@ import org.apache.hadoop.fs.contract.AbstractFSContract; import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; import org.apache.hadoop.fs.contract.s3a.S3AContract; + +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,16 +39,11 @@ /** * S3A tests for configuring block size. */ -public class ITestS3ABlocksize extends AbstractFSContractTestBase { +public class ITestS3ABlocksize extends AbstractS3ATestBase { private static final Logger LOG = LoggerFactory.getLogger(ITestS3ABlocksize.class); - @Override - protected AbstractFSContract createContract(Configuration conf) { - return new S3AContract(conf); - } - @Test @SuppressWarnings("deprecation") public void testBlockSize() throws Exception { diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java index 773119b9ee423..68168344d38ea 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java @@ -66,10 +66,10 @@ public class ITestS3AConfiguration { private static final Logger LOG = LoggerFactory.getLogger(ITestS3AConfiguration.class); - private static final String TEST_ENDPOINT = "test.fs.s3a.endpoint"; - @Rule - public Timeout testTimeout = new Timeout(30 * 60 * 1000); + public Timeout testTimeout = new Timeout( + S3ATestConstants.S3A_TEST_TIMEOUT + ); @Rule public final TemporaryFolder tempDir = new TemporaryFolder(); @@ -77,23 +77,26 @@ public class ITestS3AConfiguration { /** * Test if custom endpoint is picked up. *

- * The test expects TEST_ENDPOINT to be defined in the Configuration + * The test expects {@link S3ATestConstants#CONFIGURATION_TEST_ENDPOINT} + * to be defined in the Configuration * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points * (i.e. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland). * Evidently, the bucket has to be hosted in the region denoted by the * endpoint for the test to succeed. *

* More info and the list of endpoint identifiers: - * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region + * @see endpoint list. * * @throws Exception */ @Test public void testEndpoint() throws Exception { conf = new Configuration(); - String endpoint = conf.getTrimmed(TEST_ENDPOINT, ""); + String endpoint = conf.getTrimmed( + S3ATestConstants.CONFIGURATION_TEST_ENDPOINT, ""); if (endpoint.isEmpty()) { - LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " + + LOG.warn("Custom endpoint test skipped as " + + S3ATestConstants.CONFIGURATION_TEST_ENDPOINT + "config " + "setting was not detected"); } else { conf.set(Constants.ENDPOINT, endpoint); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java index 8616e263ce037..7fb54b1ac7e5a 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java @@ -232,7 +232,8 @@ public void testFakeDirectoryDeletion() throws Throwable { int destDirDepth = directoriesInPath(destDir); directoriesCreated.assertDiffEquals(state, 1); -/* TODO: uncomment once HADOOP-13222 is in +/* TODO: uncomment once HADOOP-13222 "s3a.mkdirs() to delete empty fake parent directories" + is in deleteRequests.assertDiffEquals(state,1); directoriesDeleted.assertDiffEquals(state,0); fakeDirectoriesDeleted.assertDiffEquals(state,destDirDepth); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java index daaf948f9ada3..8c22f478d5c22 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java @@ -134,18 +134,11 @@ public interface S3ATestConstants { */ int DEFAULT_DIRECTORY_COUNT = 2; - /** - * Default scale test timeout in seconds: {@value}. - */ - int DEFAULT_TEST_TIMEOUT = 30 * 60; - /** * Default policy on scale tests: {@value}. */ boolean DEFAULT_SCALE_TESTS_ENABLED = false; - String KEY_ENCRYPTION_TESTS = TEST_FS_S3A + "encryption.enabled"; - /** * Fork ID passed down from maven if the test is running in parallel. */ @@ -159,7 +152,15 @@ public interface S3ATestConstants { int S3A_TEST_TIMEOUT = 10 * 60 * 1000; /** - * Timeout in Milliseconds for Scale Tests: {@value}. + * Timeout in Seconds for Scale Tests: {@value}. + */ + int SCALE_TEST_TIMEOUT_SECONDS = 30 * 60; + + int SCALE_TEST_TIMEOUT_MILLIS = SCALE_TEST_TIMEOUT_SECONDS * 1000; + /** + * Optional custom endpoint for S3A configuration tests. + * This does not set the endpoint for s3 access elsewhere. */ - int SCALE_TEST_TIMEOUT = 30 * 60 * 1000; + String CONFIGURATION_TEST_ENDPOINT = + "test.fs.s3a.endpoint"; } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java index 5d52b3722ef2a..a09dcc2d19c3e 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java @@ -59,7 +59,7 @@ public class S3ATestUtils { */ public static S3AFileSystem createTestFileSystem(Configuration conf) throws IOException { - return createTestFileSystem(conf, true); + return createTestFileSystem(conf, false); } /** diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java index a60d0843a4511..fcb6444349ccc 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java @@ -38,6 +38,7 @@ import org.apache.hadoop.fs.StorageStatistics; import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.s3a.S3AFileStatus; +import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.Statistic; import org.apache.hadoop.util.Progressable; @@ -70,27 +71,22 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase { private int partitionSize; @Override - public void setUp() throws Exception { - super.setUp(); - + public void setup() throws Exception { + super.setup(); final Path testPath = getTestPath(); scaleTestDir = new Path(testPath, "scale"); hugefile = new Path(scaleTestDir, "hugefile"); hugefileRenamed = new Path(scaleTestDir, "hugefileRenamed"); } - @Override - public void tearDown() throws Exception { - // do nothing. Specifically: do not delete the test dir - } /** * Note that this can get called before test setup. * @return the configuration to use. */ @Override - protected Configuration createConfiguration() { - Configuration conf = super.createConfiguration(); + protected Configuration createScaleConfiguration() { + Configuration conf = super.createScaleConfiguration(); partitionSize = (int)getTestPropertyBytes(conf, KEY_HUGE_PARTITION_SIZE, DEFAULT_PARTITION_SIZE); @@ -155,6 +151,7 @@ public void test_010_CreateHugeFile() throws IOException { // perform the upload. // there's lots of logging here, so that a tail -f on the output log // can give a view of what is happening. + S3AFileSystem fs = getFileSystem(); StorageStatistics storageStatistics = fs.getStorageStatistics(); String putRequests = Statistic.OBJECT_PUT_REQUESTS.getSymbol(); String putBytes = Statistic.OBJECT_PUT_BYTES.getSymbol(); @@ -286,12 +283,13 @@ private void verifyNoFailures(String operation) { } void assumeHugeFileExists() throws IOException { + S3AFileSystem fs = getFileSystem(); ContractTestUtils.assertPathExists(fs, "huge file not created", hugefile); ContractTestUtils.assertIsFile(fs, hugefile); } private void logFSState() { - LOG.info("File System state after operation:\n{}", fs); + LOG.info("File System state after operation:\n{}", getFileSystem()); } @Test @@ -305,6 +303,7 @@ public void test_040_PositionedReadHugeFile() throws Throwable { } String filetype = encrypted ? "encrypted file" : "file"; describe("Positioned reads of %s %s", filetype, hugefile); + S3AFileSystem fs = getFileSystem(); S3AFileStatus status = fs.getFileStatus(hugefile); long filesize = status.getLen(); int ops = 0; @@ -344,6 +343,7 @@ public void test_040_PositionedReadHugeFile() throws Throwable { public void test_050_readHugeFile() throws Throwable { assumeHugeFileExists(); describe("Reading %s", hugefile); + S3AFileSystem fs = getFileSystem(); S3AFileStatus status = fs.getFileStatus(hugefile); long filesize = status.getLen(); long blocks = filesize / uploadBlockSize; @@ -369,6 +369,7 @@ public void test_050_readHugeFile() throws Throwable { public void test_100_renameHugeFile() throws Throwable { assumeHugeFileExists(); describe("renaming %s to %s", hugefile, hugefileRenamed); + S3AFileSystem fs = getFileSystem(); S3AFileStatus status = fs.getFileStatus(hugefile); long filesize = status.getLen(); fs.delete(hugefileRenamed, false); @@ -396,7 +397,7 @@ public void test_100_renameHugeFile() throws Throwable { public void test_999_DeleteHugeFiles() throws IOException { deleteHugeFile(); ContractTestUtils.NanoTimer timer2 = new ContractTestUtils.NanoTimer(); - + S3AFileSystem fs = getFileSystem(); fs.delete(hugefileRenamed, false); timer2.end("time to delete %s", hugefileRenamed); ContractTestUtils.rm(fs, getTestPath(), true, true); @@ -405,7 +406,7 @@ public void test_999_DeleteHugeFiles() throws IOException { protected void deleteHugeFile() throws IOException { describe("Deleting %s", hugefile); NanoTimer timer = new NanoTimer(); - fs.delete(hugefile, false); + getFileSystem().delete(hugefile, false); timer.end("time to delete %s", hugefile); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java index b054f16300511..10dfa65a15098 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java @@ -27,8 +27,8 @@ public class ITestS3ADeleteFilesOneByOne extends ITestS3ADeleteManyFiles { @Override - protected Configuration createConfiguration() { - Configuration configuration = super.createConfiguration(); + protected Configuration createScaleConfiguration() { + Configuration configuration = super.createScaleConfiguration(); configuration.setBoolean(Constants.ENABLE_MULTI_DELETE, false); return configuration; } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java index fbbc58f8aa68e..d4b6dd97d55fc 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java @@ -20,6 +20,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; +import org.apache.hadoop.fs.s3a.S3AFileSystem; + import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -54,8 +56,8 @@ public void testBulkRenameAndDelete() throws Throwable { final Path srcDir = new Path(scaleTestDir, "src"); final Path finalDir = new Path(scaleTestDir, "final"); final long count = getOperationCount(); + final S3AFileSystem fs = getFileSystem(); ContractTestUtils.rm(fs, scaleTestDir, true, false); - fs.mkdirs(srcDir); fs.mkdirs(finalDir); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java index 1d0f9e1c3a7d6..d71364f4fcd2f 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.s3a.scale; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.Statistic; import org.junit.Test; import org.slf4j.Logger; @@ -42,6 +43,7 @@ public void testListOperations() throws Throwable { describe("Test recursive list operations"); final Path scaleTestDir = path("testListOperations"); final Path listDir = new Path(scaleTestDir, "lists"); + S3AFileSystem fs = getFileSystem(); // scale factor. int scale = getConf().getInt(KEY_DIRECTORY_COUNT, DEFAULT_DIRECTORY_COUNT); @@ -138,7 +140,7 @@ public void testListOperations() throws Throwable { public void testTimeToStatEmptyDirectory() throws Throwable { describe("Time to stat an empty directory"); Path path = path("empty"); - fs.mkdirs(path); + getFileSystem().mkdirs(path); timeToStatPath(path); } @@ -146,6 +148,7 @@ public void testTimeToStatEmptyDirectory() throws Throwable { public void testTimeToStatNonEmptyDirectory() throws Throwable { describe("Time to stat a non-empty directory"); Path path = path("dir"); + S3AFileSystem fs = getFileSystem(); fs.mkdirs(path); touch(fs, new Path(path, "file")); timeToStatPath(path); @@ -155,7 +158,7 @@ public void testTimeToStatNonEmptyDirectory() throws Throwable { public void testTimeToStatFile() throws Throwable { describe("Time to stat a simple file"); Path path = path("file"); - touch(fs, path); + touch(getFileSystem(), path); timeToStatPath(path); } @@ -167,6 +170,7 @@ public void testTimeToStatRoot() throws Throwable { private void timeToStatPath(Path path) throws IOException { describe("Timing getFileStatus(\"%s\")", path); + S3AFileSystem fs = getFileSystem(); MetricDiff metadataRequests = new MetricDiff(fs, Statistic.OBJECT_METADATA_REQUESTS); MetricDiff listRequests = diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java index 45eef2491a884..551956bd8d27d 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java @@ -29,8 +29,8 @@ public class ITestS3AHugeFilesClassicOutput extends AbstractSTestS3AHugeFiles { @Override - protected Configuration createConfiguration() { - final Configuration conf = super.createConfiguration(); + protected Configuration createScaleConfiguration() { + final Configuration conf = super.createScaleConfiguration(); conf.setBoolean(Constants.FAST_UPLOAD, false); return conf; } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java index 090acc6c3482d..cc8187e6276ea 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java @@ -436,6 +436,7 @@ public void testRandomReadOverBuffer() throws Throwable { describe("read over a buffer, making sure that the requests" + " spans readahead ranges"); int datasetLen = _32K; + S3AFileSystem fs = getFileSystem(); Path dataFile = path("testReadOverBuffer.bin"); byte[] sourceData = dataset(datasetLen, 0, 64); // relies on the field 'fs' referring to the R/W FS diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java index 7a929f3d2a0cd..c4174bfd63be1 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java @@ -21,22 +21,15 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.s3a.AbstractS3ATestBase; -import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.S3AInputStream; import org.apache.hadoop.fs.s3a.S3AInstrumentation; import org.apache.hadoop.fs.s3a.S3ATestConstants; import org.apache.hadoop.fs.s3a.Statistic; import org.apache.hadoop.metrics2.lib.MutableGaugeLong; -import org.junit.After; + import org.junit.Assert; import org.junit.Assume; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.junit.rules.Timeout; -import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,17 +40,35 @@ /** * Base class for scale tests; here is where the common scale configuration * keys are defined. + *

+ * Configuration setup is a bit more complex than in the parent classes, + * as the test timeout is desired prior to the {@link #getTestTimeoutMillis()} + * being called to set the test timeout rule; this happens before any of + * the methods tagged with {@code @Before} are invoked. + *

+ * The algorithm is: + *

    + *
  1. Create a configuration on demand, via + * {@link #demandCreateConfiguration()}
  2. + *
  3. Have that return the value of {@link #conf} or create a new one + * if that field is null (and set the field to the created value).
  4. + *
  5. Override the superclasses {@link #createConfiguration()} + * to return the demand created value; make that method final so that + * subclasses don't break things by overridding it.
  6. + *
  7. Add a new override point {@link #createScaleConfiguration()} + * to create the config, one which subclasses can (and do) override.
  8. + *
+ * Bear in mind that this process also takes place during initialization + * of the superclass; the overridden methods are being invoked before + * their instances are fully configured. This is considered + * very bad form in Java code (indeed, in C++ it is actually permitted; + * the base class implementations get invoked instead). */ public class S3AScaleTestBase extends AbstractS3ATestBase { - @Rule - public Timeout testTimeout = createTestTimeout(); - public static final int _1KB = 1024; public static final int _1MB = _1KB * _1KB; - protected S3AFileSystem fs; - protected static final Logger LOG = LoggerFactory.getLogger(S3AScaleTestBase.class); @@ -65,14 +76,7 @@ public class S3AScaleTestBase extends AbstractS3ATestBase { private boolean enabled; - /** - * Configuration generator. May be overridden to inject - * some custom options. - * @return a configuration with which to create FS instances - */ - protected Configuration createConfiguration() { - return new Configuration(); - } + private Path testPath; /** @@ -83,40 +87,49 @@ public Configuration getConf() { return conf; } - /** - * Setup. This triggers creation of the configuration. - */ - @Before - public void setUp() throws Exception { - demandCreateConfiguration(); + @Override + public void setup() throws Exception { + super.setup(); + testPath = path("/tests3ascale"); LOG.debug("Scale test operation count = {}", getOperationCount()); // multipart purges are disabled on the scale tests - fs = createTestFileSystem(conf, false); // check for the test being enabled enabled = getTestPropertyBool( getConf(), KEY_SCALE_TESTS_ENABLED, DEFAULT_SCALE_TESTS_ENABLED); Assume.assumeTrue("Scale test disabled: to enable set property " + - KEY_SCALE_TESTS_ENABLED, enabled); + KEY_SCALE_TESTS_ENABLED, isEnabled()); } /** - * Create the configuration if it is not already set up. + * Create the configuration if it is not already set up, calling + * {@link #createScaleConfiguration()} to do so. * @return the configuration. */ private synchronized Configuration demandCreateConfiguration() { if (conf == null) { - conf = createConfiguration(); + conf = createScaleConfiguration(); } - return getConfiguration(); + return conf; } - @Override - public void setup() throws Exception { - super.setup(); - testPath = path("/tests3a"); - fs = getFileSystem(); + /** + * Returns the config created with {@link #demandCreateConfiguration()}. + * Subclasses must override {@link #createScaleConfiguration()} + * in order to customize their configurations. + * @return a configuration with which to create FS instances + */ + protected final Configuration createConfiguration() { + return demandCreateConfiguration(); + } + + /** + * Override point: create a configuration. + * @return a configuration with which to create FS instances + */ + protected Configuration createScaleConfiguration() { + return new Configuration(); } protected Path getTestPath() { @@ -127,35 +140,19 @@ protected long getOperationCount() { return getConf().getLong(KEY_OPERATION_COUNT, DEFAULT_OPERATION_COUNT); } - /** - * Create the timeout for tests. Some large tests may need a larger value. - * @return the test timeout to use - */ - protected Timeout createTestTimeout() { - demandCreateConfiguration(); - return new Timeout( - getTestTimeoutSeconds() * 1000); - } - /** * Get the test timeout in seconds. * @return the test timeout as set in system properties or the default. */ - protected static int getTestTimeoutSeconds() { - return getTestPropertyInt(null, + protected int getTestTimeoutSeconds() { + return getTestPropertyInt(demandCreateConfiguration(), KEY_TEST_TIMEOUT, - DEFAULT_TEST_TIMEOUT); + SCALE_TEST_TIMEOUT_SECONDS); } - /** - * Describe a test in the logs. - * @param text text to print - * @param args arguments to format in the printing - */ - protected void describe(String text, Object... args) { - LOG.info("\n\n{}: {}\n", - methodName.getMethodName(), - String.format(text, args)); + @Override + protected int getTestTimeoutMillis() { + return getTestTimeoutSeconds() * 1000; } /** @@ -184,20 +181,25 @@ protected S3AInstrumentation.InputStreamStatistics getInputStreamStatistics( * @return the value. */ public long gaugeValue(Statistic statistic) { - S3AInstrumentation instrumentation = fs.getInstrumentation(); + S3AInstrumentation instrumentation = getFileSystem().getInstrumentation(); MutableGaugeLong gauge = instrumentation.lookupGauge(statistic.getSymbol()); assertNotNull("No gauge " + statistic + " in " + instrumentation.dump("", " = ", "\n", true), gauge); return gauge.value(); } - protected boolean isEnabled() { + /** + * Is the test enabled; this is controlled by the configuration + * and the {@code -Dscale} maven option. + * @return true if the scale tests are enabled. + */ + protected final boolean isEnabled() { return enabled; } /** - * Flag to indicate that this test is being used sequentially. This - * is used by some of the scale tests to validate test time expectations. + * Flag to indicate that this test is being executed in parallel. + * This is used by some of the scale tests to validate test time expectations. * @return true if the build indicates this test is being run in parallel. */ protected boolean isParallelExecution() { diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java index ac8caf8e79ab8..5b42d4d1981c1 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.s3a.AbstractS3ATestBase; import org.apache.hadoop.fs.s3a.S3AFileSystem; import org.apache.hadoop.fs.s3a.S3ATestUtils; @@ -71,8 +72,9 @@ public void setup() throws Exception { @Override public void teardown() throws Exception { - fs.delete(rootPath, true); - yarnCluster.stop(); + if (yarnCluster != null) { + yarnCluster.stop(); + } super.teardown(); } @@ -136,15 +138,9 @@ private void writeStringToFile(Path path, String string) throws IOException { /** * helper method. */ - private String readStringFromFile(Path path) { - try (FSDataInputStream in = fs.open(path)) { - long bytesLen = fs.getFileStatus(path).getLen(); - byte[] buffer = new byte[(int) bytesLen]; - IOUtils.readFully(in, buffer, 0, buffer.length); - return new String(buffer); - } catch (IOException e) { - throw new RuntimeException("Failed to read from [" + path + "]", e); - } + private String readStringFromFile(Path path) throws IOException { + return ContractTestUtils.readBytesToString(fs, path, + (int) fs.getFileStatus(path).getLen()); } } From f94d2678f7ce9137e813cd5dec239080100a8f11 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Mon, 24 Oct 2016 18:05:11 +0100 Subject: [PATCH 7/7] HADOOP-13614: clean up imports --- .../org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java index 8116c2c80863d..7cd109438253d 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java @@ -21,9 +21,6 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.contract.AbstractFSContract; -import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; -import org.apache.hadoop.fs.contract.s3a.S3AContract; import org.apache.hadoop.test.LambdaTestUtils; import org.junit.Test; @@ -35,7 +32,6 @@ import java.util.concurrent.Callable; import static org.apache.hadoop.fs.contract.ContractTestUtils.*; -import static org.apache.hadoop.fs.s3a.S3ATestUtils.*; /** * Test S3A Failure translation, including a functional test