Skip to content
This repository has been archived by the owner. It is now read-only.
Permalink
Browse files
CHUKWA-635. Collect swap usage. (Eric Yang)
git-svn-id: https://svn.apache.org/repos/asf/incubator/chukwa/trunk@1369521 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
macroadster committed Aug 5, 2012
1 parent b80b08a commit d55104a67f7df6f73133f5542b295126730b1727
Show file tree
Hide file tree
Showing 7 changed files with 38 additions and 6 deletions.
@@ -4,6 +4,8 @@ Trunk (unreleased changes)

NEW FEATURES

CHUKWA-635. Collect swap usage. (Eric Yang)

IMPROVEMENTS

CHUKWA-648. Make Chukwa Reduce Type to support hierarchy format. (Jie Huang via asrabkin)
@@ -22,6 +22,7 @@ create "SystemMetrics",
{NAME => "system", VERSION => 65535},
{NAME => "disk", VERSION => 65535},
{NAME => "memory", VERSION => 65535},
{NAME => "swap", VERSION => 65535},
{NAME => "network", VERSION => 65535},
{NAME => "tags", VERSION => 65535}
create "ClusterSummary",
@@ -30,6 +31,7 @@ create "ClusterSummary",
{NAME => "disk", VERSION => 65535},
{NAME => "memory", VERSION => 65535},
{NAME => "network", VERSION => 65535},
{NAME => "swap", VERSION => 65535},
{NAME => "hdfs", VERSION => 65535},
{NAME => "mapreduce", VERSION => 65535}
create "chukwa",
@@ -16,12 +16,12 @@
* limitations under the License.
*/
%default START '1234567890';
SystemMetrics = load 'hbase://SystemMetrics' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('tags:cluster cpu:combined cpu:idle cpu:sys cpu:user disk:ReadBytes disk:Reads disk:WriteBytes disk:Writes system:LoadAverage.1 memory:FreePercent memory:UsedPercent network:RxBytes network:RxDropped network:RxErrors network:RxPackets network:TxBytes network:TxCollisions network:TxErrors network:TxPackets','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, cpuCombined, cpuIdle, cpuSys, cpuUser, diskReadBytes, diskReads, diskWriteBytes, diskWrites, LoadAverage, memoryFreePercent, memoryUsedPercent, networkRxBytes, networkRxDropped, networkRxErrors, networkRxPackets, networkTxBytes, networkTxCollisions, networkTxErrors, networkTxPackets);
CleanseBuffer = foreach SystemMetrics generate REGEX_EXTRACT($0,'^\\d+',0) as time, cluster, cpuCombined, cpuIdle, cpuSys, cpuUser, diskReadBytes, diskReads, diskWriteBytes, diskWrites, LoadAverage, memoryFreePercent, memoryUsedPercent, networkRxBytes, networkRxDropped, networkRxErrors, networkRxPackets, networkTxBytes, networkTxCollisions, networkTxErrors, networkTxPackets;
ConcatBuffer = foreach CleanseBuffer generate CONCAT(CONCAT($0, '-'), $1) as rowId, cpuCombined, cpuIdle, cpuSys, cpuUser, diskReadBytes, diskReads, diskWriteBytes, diskWrites, LoadAverage, memoryFreePercent, memoryUsedPercent, networkRxBytes, networkRxDropped, networkRxErrors, networkRxPackets, networkTxBytes, networkTxCollisions, networkTxErrors, networkTxPackets;
SystemMetrics = load 'hbase://SystemMetrics' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('tags:cluster cpu:combined cpu:idle cpu:sys cpu:user disk:ReadBytes disk:Reads disk:WriteBytes disk:Writes system:LoadAverage.1 memory:FreePercent memory:UsedPercent network:RxBytes network:RxDropped network:RxErrors network:RxPackets network:TxBytes network:TxCollisions network:TxErrors network:TxPackets swap:Total swap:Used swap:Free swap:PageIn swap:PageOut','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, cpuCombined, cpuIdle, cpuSys, cpuUser, diskReadBytes, diskReads, diskWriteBytes, diskWrites, LoadAverage, memoryFreePercent, memoryUsedPercent, networkRxBytes, networkRxDropped, networkRxErrors, networkRxPackets, networkTxBytes, networkTxCollisions, networkTxErrors, networkTxPackets, swapTotal, swapUsed, swapFree, swapPageIn, swapPageOut);
CleanseBuffer = foreach SystemMetrics generate REGEX_EXTRACT($0,'^\\d+',0) as time, cluster, cpuCombined, cpuIdle, cpuSys, cpuUser, diskReadBytes, diskReads, diskWriteBytes, diskWrites, LoadAverage, memoryFreePercent, memoryUsedPercent, networkRxBytes, networkRxDropped, networkRxErrors, networkRxPackets, networkTxBytes, networkTxCollisions, networkTxErrors, networkTxPackets, swapTotal, swapUsed, swapFree, swapPageIn, swapPageOut;
ConcatBuffer = foreach CleanseBuffer generate CONCAT(CONCAT($0, '-'), $1) as rowId, cpuCombined, cpuIdle, cpuSys, cpuUser, diskReadBytes, diskReads, diskWriteBytes, diskWrites, LoadAverage, memoryFreePercent, memoryUsedPercent, networkRxBytes, networkRxDropped, networkRxErrors, networkRxPackets, networkTxBytes, networkTxCollisions, networkTxErrors, networkTxPackets, swapTotal, swapUsed, swapFree, swapPageIn, swapPageOut;
TimeSeries = GROUP ConcatBuffer BY rowId;
ComputeBuffer = FOREACH TimeSeries GENERATE group, AVG(ConcatBuffer.cpuCombined), AVG(ConcatBuffer.cpuIdle), AVG(ConcatBuffer.cpuSys), AVG(ConcatBuffer.cpuUser), AVG(ConcatBuffer.diskReadBytes), AVG(ConcatBuffer.diskReads), AVG(ConcatBuffer.diskWriteBytes), AVG(ConcatBuffer.diskWrites), AVG(ConcatBuffer.LoadAverage), AVG(ConcatBuffer.memoryFreePercent), AVG(ConcatBuffer.memoryUsedPercent), AVG(ConcatBuffer.networkRxBytes), AVG(ConcatBuffer.networkRxDropped), AVG(ConcatBuffer.networkRxErrors), AVG(ConcatBuffer.networkRxPackets), AVG(ConcatBuffer.networkTxBytes), AVG(ConcatBuffer.networkTxCollisions), AVG(ConcatBuffer.networkTxErrors), AVG(ConcatBuffer.networkTxPackets);
STORE ComputeBuffer INTO 'ClusterSummary' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('cpu:Combined cpu:Idle cpu:Sys cpu:User disk:ReadBytes disk:Reads disk:WriteBytes disk:Writes system:LoadAverage memory:FreePercent memory:UsedPercent network:RxBytes network:RxDropped network:RxErrors network:RxPackets network:TxBytes network:TxCollisions network:TxErrors network:TxPackets');
ComputeBuffer = FOREACH TimeSeries GENERATE group, AVG(ConcatBuffer.cpuCombined), AVG(ConcatBuffer.cpuIdle), AVG(ConcatBuffer.cpuSys), AVG(ConcatBuffer.cpuUser), AVG(ConcatBuffer.diskReadBytes), AVG(ConcatBuffer.diskReads), AVG(ConcatBuffer.diskWriteBytes), AVG(ConcatBuffer.diskWrites), AVG(ConcatBuffer.LoadAverage), AVG(ConcatBuffer.memoryFreePercent), AVG(ConcatBuffer.memoryUsedPercent), AVG(ConcatBuffer.networkRxBytes), AVG(ConcatBuffer.networkRxDropped), AVG(ConcatBuffer.networkRxErrors), AVG(ConcatBuffer.networkRxPackets), AVG(ConcatBuffer.networkTxBytes), AVG(ConcatBuffer.networkTxCollisions), AVG(ConcatBuffer.networkTxErrors), AVG(ConcatBuffer.networkTxPackets), AVG(ConcatBuffer.swapTotal), AVG(ConcatBuffer.swapUsed), AVG(ConcatBuffer.swapFree), AVG(ConcatBuffer.swapPageIn), AVG(ConcatBuffer.swapPageOut);
STORE ComputeBuffer INTO 'ClusterSummary' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('cpu:Combined cpu:Idle cpu:Sys cpu:User disk:ReadBytes disk:Reads disk:WriteBytes disk:Writes system:LoadAverage memory:FreePercent memory:UsedPercent network:RxBytes network:RxDropped network:RxErrors network:RxPackets network:TxBytes network:TxCollisions network:TxErrors network:TxPackets swap:Total swap:Used swap:Free swap:PageIn swap:PageOut');
HDFSMetrics = load 'hbase://Hadoop' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('dfs_FSNamesystem:cluster dfs_FSNamesystem:CapacityTotalGB dfs_FSNamesystem:CapacityUsedGB dfs_FSNamesystem:CapacityRemainingGB dfs_FSNamesystem:BlockCapacity dfs_FSNamesystem:BlocksTotal dfs_FSNamesystem:MissingBlocks dfs_FSNamesystem:CorruptBlocks dfs_FSNamesystem:UnderReplicatedBlocks dfs_FSNamesystem:FilesTotal','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks, FilesTotal);
CleanseBuffer = foreach HDFSMetrics generate REGEX_EXTRACT($0,'^\\d+',0) as time, cluster, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks, FilesTotal;
ConcatBuffer = foreach CleanseBuffer generate CONCAT(CONCAT($0, '-'), $1) as rowId, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks, FilesTotal;
@@ -30,6 +30,7 @@
import org.hyperic.sigar.FileSystem;
import org.hyperic.sigar.FileSystemUsage;
import org.hyperic.sigar.Mem;
import org.hyperic.sigar.Swap;
import org.hyperic.sigar.NetInterfaceStat;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.Uptime;
@@ -61,6 +62,7 @@ public void run() {
CpuInfo[] cpuinfo = null;
CpuPerc[] cpuPerc = null;
Mem mem = null;
Swap swap = null;
FileSystem[] fs = null;
String[] netIf = null;
Uptime uptime = null;
@@ -103,6 +105,12 @@ public void run() {
JSONObject memMap = new JSONObject();
memMap.putAll(mem.toMap());
json.put("memory", memMap);

// Swap Utilization
swap = sigar.getSwap();
JSONObject swapMap = new JSONObject();
swapMap.putAll(swap.toMap());
json.put("swap", swapMap);

// Network Utilization
netIf = sigar.getNetInterfaceList();
@@ -102,6 +102,16 @@ protected void parse(String recordEntry,
}
buildGenericRecord(record, null, cal.getTimeInMillis(), "memory");
output.collect(key, record);

record = new ChukwaRecord();
JSONObject swap = (JSONObject) json.get("swap");
Iterator<String> swapKeys = swap.keySet().iterator();
while(swapKeys.hasNext()) {
String key = swapKeys.next();
record.add(key, swap.get(key).toString());
}
buildGenericRecord(record, null, cal.getTimeInMillis(), "swap");
output.collect(key, record);

double rxBytes = 0;
double rxDropped = 0;
@@ -24,6 +24,11 @@
{"label":"Disk Write Operations","value":"/hicc/v1/metrics/series/ClusterSummary/disk:Writes/session/cluster"},
{"label":"Memory Free Percentage","value":"/hicc/v1/metrics/series/ClusterSummary/memory:FreePercent/session/cluster"},
{"label":"Memory Used Percentage","value":"/hicc/v1/metrics/series/ClusterSummary/memory:UsedPercent/session/cluster"},
{"label":"Swap Total","value":"/hicc/v1/metrics/series/ClusterSummary/memory:SwapTotal/session/cluster"},
{"label":"Swap Used","value":"/hicc/v1/metrics/series/ClusterSummary/swap:Used/session/cluster"},
{"label":"Swap Free","value":"/hicc/v1/metrics/series/ClusterSummary/swap:Free/session/cluster"},
{"label":"Swap Page In","value":"/hicc/v1/metrics/series/ClusterSummary/swap:PageIn/session/cluster"},
{"label":"Swap Page Out","value":"/hicc/v1/metrics/series/ClusterSummary/swap:PageOut/session/cluster"},
{"label":"Network Receive Bytes","value":"/hicc/v1/metrics/series/ClusterSummary/network:RxBytes/session/cluster"},
{"label":"Network Receive Dropped","value":"/hicc/v1/metrics/series/ClusterSummary/network:RxDropped/session/cluster"},
{"label":"Network Receive Errors","value":"/hicc/v1/metrics/series/ClusterSummary/network:RxErrors/session/cluster"},
@@ -32,7 +37,7 @@
{"label":"Network Transfer Collisions","value":"/hicc/v1/metrics/series/ClusterSummary/network:TxCollisions/session/cluster"},
{"label":"Network Transfer Errors","value":"/hicc/v1/metrics/series/ClusterSummary/network:TxErrors/session/cluster"},
{"label":"Network Transfer Packets","value":"/hicc/v1/metrics/series/ClusterSummary/network:TxPackets/session/cluster"},
{"label":"Load Average","value":"/hicc/v1/metrics/series/ClusterSummary/system:LoadAverage/session/cluster"},
{"label":"Load Average","value":"/hicc/v1/metrics/series/ClusterSummary/system:LoadAverage/session/cluster"}
]},
{"name":"width","type":"select","value":"300","label":"Width","options":[
{"label":"300","value":"300"},
@@ -23,6 +23,11 @@
{"label":"Memory Free","value":"/hicc/v1/metrics/series/SystemMetrics/memory:Free/session/hosts"},
{"label":"Memory Used Percent","value":"/hicc/v1/metrics/series/SystemMetrics/memory:UsedPercent/session/hosts"},
{"label":"Memory Free Percent","value":"/hicc/v1/metrics/series/SystemMetrics/memory:FreePercent/session/hosts"},
{"label":"Swap Total","value":"/hicc/v1/metrics/series/SystemMetrics/swap/Total/session/hosts"},
{"label":"Swap Used","value":"/hicc/v1/metrics/series/SystemMetrics/swap/Used/session/hosts"},
{"label":"Swap Free","value":"/hicc/v1/metrics/series/SystemMetrics/swap/Free/session/hosts"},
{"label":"Swap Page In","value":"/hicc/v1/metrics/series/SystemMetrics/swap/PageIn/session/hosts"},
{"label":"Swap Page Out","value":"/hicc/v1/metrics/series/SystemMetrics/swap/PageOut/session/hosts"},
{"label":"Network Card 1 Receive Error/Second","value":"/hicc/v1/metrics/series/SystemMetrics/network:RxErrors.0/session/hosts"},
{"label":"Network Card 1 Receive Bytes/Second","value":"/hicc/v1/metrics/series/SystemMetrics/network:RxBytes.0/session/hosts"},
{"label":"Network Card 1 Receive Packets/Second","value":"/hicc/v1/metrics/series/SystemMetrics/network:RxPackets.0/session/hosts"},

0 comments on commit d55104a

Please sign in to comment.