Skip to content
This repository has been archived by the owner. It is now read-only.
Permalink
Browse files
Create Junit test case for caffe
  • Loading branch information
fangyiwang committed May 19, 2017
1 parent d1b7ef9 commit 4223cbe1ee8b903bc4df78185842bb3fc31a0b05
Showing 6 changed files with 420 additions and 3 deletions.
@@ -64,6 +64,9 @@ RUN wget http://www-eu.apache.org/dist/maven/maven-3/3.3.9/binaries/apache-maven
RUN git clone https://github.com/yahoo/CaffeOnSpark.git --recursive
RUN bash /tmp/config-caffe.sh

RUN chmod 755 /caffe-test/train/train.sh
RUN chmod 755 /caffe-test/tera/tera.sh

RUN wget https://www.apache.org/dist/zookeeper/zookeeper-3.4.6/zookeeper-3.4.6.tar.gz
RUN wget https://www.apache.org/dist/hadoop/common/hadoop-2.7.2/hadoop-2.7.2.tar.gz
RUN wget https://www.apache.org/dist/hbase/1.2.5/hbase-1.2.5-bin.tar.gz
@@ -1,5 +1,5 @@
GLOG_logtostderr=1 /CaffeOnSpark/caffe-public/.build_release/tools/convert_imageset \
--resize_height=200 --resize_width=200 --shuffle --encoded \
--resize_height=200 --resize_width=1000 --shuffle --encoded \
/caffe-test/train/data/ \
/caffe-test/train/data/labels.txt \
/caffe-test/train/lmdb
@@ -13,7 +13,7 @@ layer {
batch_size: 1
channels: 1
height: 200
width: 200
width: 1000
share_in_parallel: false
}
transform_param {
@@ -35,7 +35,7 @@ layer {
batch_size: 1
channels: 1
height: 200
width: 200
width: 1000
share_in_parallel: false
}
transform_param {
@@ -0,0 +1,182 @@
package org.apache.hadoop.chukwa.caffe;

import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import javax.imageio.ImageIO;


/**
* Read csv files to create image files of dimension 1000 * 200
*
*/
public class ImageCreator
{
private static final int X_SIZE = 1000;
private static final int Y_SIZE = 200;
private String dirName = null;

public ImageCreator (String dirName) {
this.dirName = dirName;
}

public void drawImages () throws Exception
{
String outputFileName = dirName + "/labels.txt";
BufferedWriter bufferedWriter = null;
try {
FileWriter fileWriter = new FileWriter(outputFileName);
bufferedWriter = new BufferedWriter(fileWriter);
} catch (IOException e) {
e.printStackTrace ();
}

//int start = 1;
File dir = new File (dirName);
File [] files = dir.listFiles ();
Arrays.sort(files);

// find min and max memory usage
double minMem = 0;
double maxMem = 0;
long minTime = 0L;
long maxTime = 0L;

// image size: 1000 *200
int lineNum = 0;
for (int i = 0; i < files.length; i++) {
String fileName = files [i].getName ();
if (!fileName.endsWith ("csv")) {
continue;
}
//System.out.println (">>>>> " + fileName);
BufferedReader bufferedReader = new BufferedReader(new FileReader(files [i]));
String line = null;

while ((line = bufferedReader.readLine()) != null)
{
lineNum ++;
String [] point = line.split (",");
long time = Long.parseLong (point[0]);
double mem = Double.parseDouble (point[1]);
point [1] = String.valueOf (mem);
if (maxMem == 0 || maxMem < mem){
maxMem = mem;
}
if (minMem == 0 || minMem > mem) {
minMem = mem;
}
if (maxTime == 0 || maxTime < time){
maxTime = time;
}
if (minTime == 0 || minTime > time) {
minTime = time;
}
}
bufferedReader.close ();
}
//System.out.println ("minMem:" + minMem + ", maxMem:" + maxMem + ", total line number: " + lineNum);
//System.out.println ("minTime:" + minTime + ", maxTime:" + maxTime + ", total elapseTime: " + (maxTime - minTime));

List <String []> dataList = new ArrayList<String []> ();
lineNum = 0;
long startTime = 0;
long endTime = 0;
int imageId = 1;
int totalPoint = 0;
for (int i = 0; i < files.length; i++) {
String fileName = files [i].getName ();
if (!fileName.endsWith ("csv")) {
continue;
}
System.out.println (">>>>> " + fileName);
BufferedReader bufferedReader = new BufferedReader(new FileReader(files [i]));
String line = null;

while ((line = bufferedReader.readLine()) != null)
{
lineNum ++;
String [] point = line.split (",");
long time = Long.parseLong (point[0]);
double mem = Double.parseDouble (point[1]);
point [1] = String.valueOf (mem);

if (startTime == 0) {
startTime = time;
}
dataList.add (point);
endTime = time;
long elapseTime = endTime - startTime;
if (elapseTime > X_SIZE) {
totalPoint = totalPoint + dataList.size ();
String imageFileName = dirName + "\\image" + imageId + ".png";
System.out.println ("elapseTime: " + elapseTime + ", data size: " + dataList.size () + ", imageFileName: " + imageFileName);
drawImage (dataList, imageFileName, X_SIZE, Y_SIZE);
bufferedWriter.write (imageFileName + " 0\n");
bufferedWriter.flush ();
dataList.clear ();
startTime = 0;
imageId ++;
}
}
bufferedReader.close ();
bufferedWriter.close ();
}
//System.out.println ("Total points: " + totalPoint + ", lineNum: " + lineNum);
}

private static void drawImage (List <String []> dataList, String imageFileName, int x_size, int y_size) throws Exception
{
int size = dataList.size ();
String [] startPt = dataList.get (0);
//String [] endPt = dataList.get (size - 1);
long startTimeX = Long.parseLong (startPt [0]);
//long endTimeX = Long.parseLong (endPt [0]);
//System.out.println ("x_size: " + x_size + ", y_size: " + y_size + ", startTimeX: " + startTimeX + ", endTimeX: " + endTimeX);
BufferedImage img = new BufferedImage(x_size, y_size, BufferedImage.TYPE_INT_ARGB);

Graphics2D ig2 = img.createGraphics();
ig2.setBackground(Color.WHITE);

ig2.setColor (Color.BLACK);
ig2.setStroke(new BasicStroke(3));

MyPoint prevPoint = null;
for (int i = 0; i < size; i++) {
String [] point = (String []) dataList.get (i);
long time = Long.parseLong (point[0]);
double mem = Double.parseDouble (point[1]);
MyPoint currPoint = new MyPoint (time, mem);
//System.out.println ("time:" + time + ", mem:" + mem);

if (prevPoint != null) {
ig2.drawLine ((int) (prevPoint.time - startTimeX), (int) (y_size - prevPoint.data), (int) (currPoint.time - startTimeX), (int) (y_size - currPoint.data));
}
prevPoint = currPoint;
}
File f = new File(imageFileName);
ImageIO.write(img, "PNG", f);
}
}

class MyPoint
{
public long time;
public double data;

public MyPoint (long time, double data) {
this.time = time;
this.data = data;
}
}
@@ -0,0 +1,117 @@
package org.apache.hadoop.chukwa.caffe;

import java.io.BufferedWriter;
import java.io.PrintWriter;
import java.util.Calendar;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.Timer;
import java.util.TimerTask;

import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.bean.Series;
import org.json.simple.JSONObject;

//export CLASSPATH=/opt/apache/hadoop/etc/hadoop:/opt/apache/hbase/conf:/opt/apache/chukwa-0.8.0/share/chukwa/*:/opt/apache/chukwa-0.8.0/share/chukwa/lib/*:$CLASSPATH

public class MetricsCollector
{
private Timer getMetricSnapshotTimer = null;
private long intervalInMin;
private String hostname;

public MetricsCollector (long intervalInMin, String hostname) {
this.intervalInMin = intervalInMin;
this.hostname = hostname;
getMetricSnapshotTimer = new Timer ("GetMetricSnapshot", true);
}

public void start () {
if (getMetricSnapshotTimer != null)
getMetricSnapshotTimer.schedule (new GetMetricSnapshotTimerTask (hostname, intervalInMin), 0, intervalInMin);
}

public void cancel ()
{
if (getMetricSnapshotTimer != null)
getMetricSnapshotTimer.cancel ();
}

class GetMetricSnapshotTimerTask extends TimerTask
{
private String hostname = null;
private BufferedWriter bufferedWriter = null;
private long intervalInMilli;

/**
* Normalize the timestamp in time series data to use seconds
*/
private final static int XSCALE = 1000;

GetMetricSnapshotTimerTask (String hostname, long intervalInMin)
{
this.hostname = hostname;
this.intervalInMilli = intervalInMin * 60 * 1000;
}

public void run ()
{
TimeZone tz = TimeZone.getTimeZone("UTC");
Calendar now = Calendar.getInstance(tz);
long currTime=now.getTimeInMillis();

System.out.println ("currTime in UTC: " + currTime);
System.out.println ("currTime in current time zone" + System.currentTimeMillis ());

long startTime = currTime - intervalInMilli;
long endTime = currTime;
try {
System.out.println ("About to run");
getHadoopMetrics (startTime, endTime);
System.out.println ("Done run");
} catch (Exception e) {
e.printStackTrace ();
}
}

private void getHadoopMetrics(long startTime, long endTime) throws Exception
{
String source = hostname + ":NodeManager";
System.out.println ("source: " + source);
System.out.println ("startTime: " + startTime);
System.out.println ("endTime: " + endTime);
Series series = ChukwaHBaseStore.getSeries ("HadoopMetrics.jvm.JvmMetrics.MemHeapUsedM", source, startTime, endTime);
String value = series.toString ();
System.out.println ("value: " + value);

JSONObject jsonObj = (JSONObject) series.toJSONObject ();
Set set = jsonObj.keySet ();
Iterator iter = set.iterator ();
List list = (List) jsonObj.get ("data");
if (list != null) {
int size = list.size ();
System.out.println ("size: " + size);
if (size > 0 ) {
String name = "NodeManager" + "_" + "HadoopMetrics.jvm.JvmMetrics.MemHeapUsedM" + "_" + hostname;
generateCsv (list, name, startTime, bufferedWriter);
}
}
}

private void generateCsv (List list, String name, long startTime, BufferedWriter bufferedWriter) throws Exception
{
String fileName = name + "_" + startTime;
PrintWriter writer = new PrintWriter(fileName + ".csv", "UTF-8");
int size = list.size ();
for (int i = 0; i < size; i++) {
List point = (List) list.get (i);
long time = (Long) point.get (0) / XSCALE;
double val = (Double) point.get (1);
writer.println(time + "," + val);
}
writer.close();
}
}
}

0 comments on commit 4223cbe

Please sign in to comment.