Skip to content

Commit

Permalink
Added test case to test runtime exceptions
Browse files Browse the repository at this point in the history
Cleaned up the try catch  logic
  • Loading branch information
abh1nay committed Mar 22, 2013
1 parent 25d6298 commit 11dff85
Show file tree
Hide file tree
Showing 2 changed files with 102 additions and 8 deletions.
Expand Up @@ -511,10 +511,18 @@ private CheckSum copyFileWithCheckSum(FileSystem fs,
}

} catch(Exception e) {
e.printStackTrace();
logger.error("Error during copying file ", e);
return null;

} catch(Throwable te) {
te.printStackTrace();
} finally {
logger.error("Error during copying file ", te);
return null;

}
// the finally block _always_ executes even if we have
// return in the catch block

finally {
IOUtils.closeQuietly(output);
IOUtils.closeQuietly(input);
if(success) {
Expand Down
@@ -1,3 +1,19 @@
/*
* Copyright 2008-2009 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package voldemort.store.readonly.fetcher;

import static org.junit.Assert.assertEquals;
Expand Down Expand Up @@ -29,10 +45,17 @@
import voldemort.store.readonly.checksum.CheckSumTests;
import voldemort.store.readonly.fetcher.HdfsFetcher.CopyStats;

/*
* This test suite tests the HDFSFetcher We test the fetch from hadoop by
* simulating exceptions during fetches
*/
public class HDFSFetcherAdvancedTest {

public static final Random UNSEEDED_RANDOM = new Random();

/*
* Tests that HdfsFetcher can correctly fetch a file in happy path
*/
@Test
public void testCheckSumMetadata() throws Exception {

Expand Down Expand Up @@ -124,9 +147,13 @@ private Object invokePrivateMethod(Object test, String methodName, Object params
return ret;
}

/*
* Tests that HdfsFetcher can correctly fetch a file when there is an
* IOException, specifically an EofException during the fetch
*/
@Test
public void testEofExceptionIntermittent() throws Exception {
// Generate 0_0.[index | data] and their corresponding metadata

File testSourceDirectory = createTempDir();
File testDestinationDirectory = createTempDir();

Expand All @@ -142,7 +169,6 @@ public void testEofExceptionIntermittent() throws Exception {

HdfsFetcher fetcher = new HdfsFetcher();

HdfsFetcher spyfetcher = Mockito.spy(fetcher);
Configuration config = new Configuration();

FileSystem fs = source.getFileSystem(config);
Expand All @@ -169,9 +195,16 @@ public void testEofExceptionIntermittent() throws Exception {

}

/*
* Tests that HdfsFetcher can correctly fetch a file when there is an
* IOException, specifically an EofException during the fetch this test case
* is different from the earlier one since it simulates an excpetion midway
* a fetch
*/

@Test
public void testEofExceptionIntermittent2() throws Exception {
// Generate 0_0.[index | data] and their corresponding metadata
public void testEofExceptionIntermittentDuringFetch() throws Exception {

File testSourceDirectory = createTempDir();
File testDestinationDirectory = createTempDir();

Expand All @@ -187,7 +220,6 @@ public void testEofExceptionIntermittent2() throws Exception {

HdfsFetcher fetcher = new HdfsFetcher();

HdfsFetcher spyfetcher = Mockito.spy(fetcher);
Configuration config = new Configuration();

FileSystem fs = source.getFileSystem(config);
Expand Down Expand Up @@ -221,6 +253,54 @@ public void testEofExceptionIntermittent2() throws Exception {

}

/*
* Tests that HdfsFetcher can correctly handle when there is an
* RuntimeException
*
* Expected- the exception should be consumed without spilling it over
*/

@Test
public void testIntermittentRuntimeExceptions() throws Exception {

File testSourceDirectory = createTempDir();
File testDestinationDirectory = createTempDir();

File indexFile = new File(testSourceDirectory, "0_0.index");
byte[] indexBytes = TestUtils.randomBytes(100);
FileUtils.writeByteArrayToFile(indexFile, indexBytes);

final Path source = new Path(indexFile.getAbsolutePath());
CheckSum fileCheckSumGenerator = CheckSum.getInstance(CheckSumType.MD5);

fileCheckSumGenerator.update(indexBytes);

HdfsFetcher fetcher = new HdfsFetcher();

Configuration config = new Configuration();

FileSystem fs = source.getFileSystem(config);

FileSystem spyfs = Mockito.spy(fs);
CopyStats stats = new CopyStats(testSourceDirectory.getAbsolutePath(), sizeOfPath(fs,
source));

File destination = new File(testDestinationDirectory.getAbsolutePath() + "1");
File copyLocation = new File(destination, "0_0.index");

Mockito.doThrow(new RuntimeException())
.doAnswer(Mockito.CALLS_REAL_METHODS)
.when(spyfs)
.open(source);

Object[] params = { spyfs, source, copyLocation, stats, CheckSumType.MD5 };

CheckSum ckSum = (CheckSum) this.invokePrivateMethod(fetcher,
"copyFileWithCheckSum",
params);

}

private long sizeOfPath(FileSystem fs, Path path) throws IOException {
long size = 0;
FileStatus[] statuses = fs.listStatus(path);
Expand All @@ -235,6 +315,9 @@ private long sizeOfPath(FileSystem fs, Path path) throws IOException {
return size;
}

/*
* Helper method to delete a non empty directory
*/
public static boolean deleteDir(File dir) {
if(dir.isDirectory()) {
String[] children = dir.list();
Expand All @@ -248,6 +331,9 @@ public static boolean deleteDir(File dir) {
return dir.delete();
}

/*
* Helper method to calculate checksum for a single file
*/
private byte[] calculateCheckSumForFile(Path source) throws Exception {
CheckSum fileCheckSumGenerator = CheckSum.getInstance(CheckSumType.MD5);
byte[] buffer = new byte[VoldemortConfig.DEFAULT_BUFFER_SIZE];
Expand Down

0 comments on commit 11dff85

Please sign in to comment.