Skip to content
This repository has been archived by the owner. It is now read-only.
Permalink
Browse files
CHUKWA-716. Fixed undefined System Properties for test cases and test…
… case dependency. (Eric Yang)

git-svn-id: https://svn.apache.org/repos/asf/chukwa/trunk@1611855 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
macroadster committed Jul 19, 2014
1 parent 7a30f2d commit 0a2f924aba6c8871cc8a5e72fb05fd7157557172
Showing 2 changed files with 22 additions and 2 deletions.
@@ -28,11 +28,11 @@
import java.util.Date;
import java.util.regex.*;

import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.database.TableCreator;

import org.apache.hadoop.chukwa.datacollection.DataFactory;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent.AlreadyRunningException;
@@ -101,6 +101,8 @@ public class TestFSMBuilder extends TestCase {
private static String cluster = "demo";
long[] timeWindow = {7, 30, 91, 365, 3650};
long current = 1244617200000L; // 2009-06-10
private String testBuildDir = System.getProperty("test.build.data", "/tmp");
private File dfsDataDir = new File(testBuildDir+"/dfs");

public void setUp() {
// Startup HDFS cluster - stored collector-ed JobHistory chunks
@@ -119,6 +121,9 @@ public void setUp() {

// Startup HDFS cluster - stored collector-ed JobHistory chunks
try {
if(dfsDataDir.exists()) {
FileUtils.deleteDirectory(dfsDataDir);
}
dfs = new MiniDFSCluster(conf, NUM_HADOOP_SLAVES, true, null);
fileSys = dfs.getFileSystem();
DEMUX_INPUT_PATH = new Path(fileSys.getUri().toString()+File.separator+dataSink);
@@ -212,6 +217,9 @@ public void tearDown() {
jettyCollector.stop();
mr.shutdown();
dfs.shutdown();
if(dfsDataDir.exists()) {
FileUtils.deleteDirectory(dfsDataDir);
}
Thread.sleep(2000);
} catch(Exception e) {
e.printStackTrace();
@@ -18,8 +18,11 @@
package org.apache.hadoop.chukwa.extraction.demux;


import java.io.File;
import java.io.IOException;
import java.util.Calendar;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapred.JobConf;
@@ -29,6 +32,7 @@
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import junit.framework.TestCase;

/**
@@ -112,14 +116,22 @@ public void testDemux() {
+ fileSys.getFileStatus(DEMUX_INPUT_PATH).getLen()
+ " bytes of temp test data");
long ts_start = System.currentTimeMillis();
runDemux(mr.createJobConf(), DEMUX_INPUT_PATH, DEMUX_OUTPUT_PATH);
Path inputPath = new Path(fileSys.getUri().toString()+DEMUX_INPUT_PATH);
Path outputPath = new Path(fileSys.getUri().toString()+DEMUX_OUTPUT_PATH);
runDemux(mr.createJobConf(), inputPath, outputPath);

long time = (System.currentTimeMillis() - ts_start);
long bytes = fileSys.getContentSummary(DEMUX_OUTPUT_PATH).getLength();
System.out.println("result was " + bytes + " bytes long");
System.out.println("processing took " + time + " milliseconds");
System.out.println("aka " + time * 1.0 / LINES + " ms per line or "
+ time * 1000.0 / bytes + " ms per kilobyte of log data");
mr.shutdown();
dfs.shutdown();
String testBuildDir = System.getProperty("test.build.data", "/tmp");
String dfsPath = testBuildDir + "/dfs";
FileUtils.deleteDirectory(new File(dfsPath));
System.out.println(dfsPath);

} catch (Exception e) {
e.printStackTrace();

0 comments on commit 0a2f924

Please sign in to comment.