Skip to content

Commit

Permalink
AMBARI-8566. Remaining changes to Hadoop Sink implementation. (mpapir…
Browse files Browse the repository at this point in the history
…kovskyy)
  • Loading branch information
Myroslav Papirkovskyy committed Dec 5, 2014
1 parent 1e94bc1 commit 9e052ee
Show file tree
Hide file tree
Showing 13 changed files with 216 additions and 83 deletions.
29 changes: 1 addition & 28 deletions ambari-metrics/ambari-metrics-common/pom.xml
Expand Up @@ -31,33 +31,6 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>3.0</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>1.8</version>
<executions>
<execution>
<id>parse-version</id>
<phase>validate</phase>
<goals>
<goal>parse-version</goal>
</goals>
</execution>
<execution>
<id>regex-property</id>
<goals>
<goal>regex-property</goal>
</goals>
<configuration>
<name>ambariVersion</name>
<value>${project.version}</value>
<regex>^([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).*</regex>
<replacement>$1.$2.$3</replacement>
<failIfNoMatch>false</failIfNoMatch>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

Expand All @@ -78,4 +51,4 @@
<version>1.8.0</version>
</dependency>
</dependencies>
</project>
</project>
47 changes: 35 additions & 12 deletions ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
Expand Up @@ -32,20 +32,36 @@ limitations under the License.
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptors>
<descriptor>src/main/assemblies/sink.xml</descriptor>
</descriptors>
<tarLongFileMode>gnu</tarLongFileMode>
</configuration>
<executions>
<execution>
<configuration>
<descriptors>
<descriptor>src/main/assemblies/sink.xml</descriptor>
</descriptors>
<tarLongFileMode>gnu</tarLongFileMode>
</configuration>
<id>build-tarball</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>

<execution>
<configuration>
<descriptors>
<descriptor>src/main/assemblies/sink-jar.xml</descriptor>
</descriptors>
<tarLongFileMode>gnu</tarLongFileMode>
<appendAssemblyId>false</appendAssemblyId>
<finalName>${project.artifactId}-with-common-${project.version}</finalName>
</configuration>
<id>build-jar</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
Expand Down Expand Up @@ -115,11 +131,11 @@ limitations under the License.
<groupname>root</groupname>
<sources>
<source>
<location>target/${project.artifactId}-${project.version}.jar</location>
<location>target/${project.artifactId}-with-common-${project.version}.jar</location>
</source>
<softlinkSource>
<destination>ambari-metrics-hadoop-sink.jar</destination>
<location>/usr/lib/ambari-metrics-hadoop-sink/${project.artifactId}-${project.version}.jar</location>
<location>/usr/lib/ambari-metrics-hadoop-sink/${project.artifactId}-with-common-${project.version}.jar</location>
</softlinkSource>
</sources>

Expand All @@ -135,7 +151,7 @@ limitations under the License.
<dependency>
<groupId>org.apache.ambari</groupId>
<artifactId>ambari-metrics-common</artifactId>
<version>0.1.0-SNAPSHOT</version>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down Expand Up @@ -179,9 +195,16 @@ limitations under the License.
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.8.0</version>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
<version>4.10</version>
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<version>3.2</version>
<scope>test</scope>
</dependency>
</dependencies>

Expand Down
@@ -0,0 +1,37 @@
<?xml version='1.0'?>

<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->

<assembly>
<id>hadoop-sink-jar</id>
<formats>
<format>jar</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<dependencySets>
<dependencySet>
<outputDirectory>/</outputDirectory>
<unpack>true</unpack>
<includes>
<include>org.apache.ambari:ambari-metrics-common</include>
<include>org.apache.ambari:ambari-metrics-hadoop-sink</include>
</includes>
</dependencySet>
</dependencySets>
</assembly>
@@ -0,0 +1,115 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.metrics2.sink.timeline;

import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.sink.timeline.base.AbstractTimelineMetricsSink;
import org.easymock.IAnswer;

import java.util.Arrays;
import java.util.Iterator;

import static org.apache.hadoop.metrics2.sink.timeline.base.AbstractTimelineMetricsSink.*;
import static org.easymock.EasyMock.anyInt;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;

public class HadoopTimelineMetricsSinkTest {

@org.junit.Test
public void testPutMetrics() throws Exception {
HadoopTimelineMetricsSink sink = new HadoopTimelineMetricsSink();

SubsetConfiguration conf = createNiceMock(SubsetConfiguration.class);
expect(conf.getString(eq("slave.host.name"))).andReturn("testhost").anyTimes();
expect(conf.getParent()).andReturn(null).anyTimes();
expect(conf.getPrefix()).andReturn("service").anyTimes();
expect(conf.getString(eq(COLLECTOR_HOST_PROPERTY))).andReturn("localhost:63188").anyTimes();

expect(conf.getInt(eq(MAX_METRIC_ROW_CACHE_SIZE), anyInt())).andReturn(10).anyTimes();
expect(conf.getInt(eq(METRICS_SEND_INTERVAL), anyInt())).andReturn(1000).anyTimes();

conf.setListDelimiter(eq(','));
expectLastCall().anyTimes();

expect(conf.getKeys()).andReturn(new Iterator() {
@Override
public boolean hasNext() {
return false;
}

@Override
public Object next() {
return null;
}

@Override
public void remove() {

}
}).once();


HttpClient httpClient = createNiceMock(HttpClient.class);

expect(httpClient.executeMethod(anyObject(PostMethod.class))).andReturn(200).once(); //metrics send only once due to caching

AbstractMetric metric = createNiceMock(AbstractMetric.class);
expect(metric.name()).andReturn("metricName").anyTimes();
expect(metric.value()).andReturn(9.5687).anyTimes();
//TODO currently only numeric metrics are supported

MetricsRecord record = createNiceMock(MetricsRecord.class);
expect(record.name()).andReturn("testName").anyTimes();
expect(record.context()).andReturn("testContext").anyTimes();
expect(record.timestamp()).andAnswer(new IAnswer<Long>() {
@Override
public Long answer() throws Throwable {
return System.currentTimeMillis();
}
}).anyTimes();

expect(record.metrics()).andReturn(Arrays.asList(metric)).anyTimes();


replay(conf, httpClient, record, metric);

sink.setHttpClient(httpClient);
sink.init(conf);

sink.putMetrics(record);

Thread.sleep(1500L);

sink.putMetrics(record);

verify(conf, httpClient, record, metric);


}
}
Expand Up @@ -67,6 +67,7 @@ function write_pidfile
fi
}

#TODO decide if rebuild on each start (pretty quickly) to tolerate major node changes (like kernel update)
#build psutil
if [ ! "$(ls -A ${RESOURCE_MONITORING_DIR}/psutil/build)" ]; then
echo "Building psutil..."
Expand Down
7 changes: 6 additions & 1 deletion ambari-metrics/ambari-metrics-host-monitoring/pom.xml
Expand Up @@ -120,6 +120,7 @@
<autoRequires>false</autoRequires>
<requires>
<require>ambari-metrics-hadoop-sink</require>
<require>ambari-metrics-flume-sink</require>
<require>${python.ver}</require>
<require>gcc</require>
<require>python-devel</require>
Expand Down Expand Up @@ -172,6 +173,10 @@
<location>
${project.basedir}/src/main/python/psutil
</location>
<excludes>
<exclude>build/**</exclude>
<exclude>build/*</exclude>
</excludes>
</source>
</sources>
</mapping>
Expand Down Expand Up @@ -208,7 +213,7 @@
<executions>
<execution>
<id>psutils-compile</id>
<phase>process-classes</phase>
<phase>process-test-classes</phase>
<goals>
<goal>run</goal>
</goals>
Expand Down
27 changes: 0 additions & 27 deletions ambari-metrics/pom.xml
Expand Up @@ -66,33 +66,6 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>1.8</version>
<executions>
<execution>
<id>parse-version</id>
<phase>validate</phase>
<goals>
<goal>parse-version</goal>
</goals>
</execution>
<execution>
<id>regex-property</id>
<goals>
<goal>regex-property</goal>
</goals>
<configuration>
<name>ambariVersion</name>
<value>${project.version}</value>
<regex>^([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).*</regex>
<replacement>$1.$2.$3</replacement>
<failIfNoMatch>false</failIfNoMatch>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.0</version>
Expand Down
Expand Up @@ -67,7 +67,7 @@ resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
{% if has_metric_collector %}

*.period=60
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.TimelineMetricsSink
*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
*.sink.timeline.period=10
datanode.sink.timeline.collector={{metric_collector_host}}:8188
namenode.sink.timeline.collector={{metric_collector_host}}:8188
Expand All @@ -80,4 +80,4 @@ supervisor.sink.timeline.collector={{metric_collector_host}}:8188
maptask.sink.timeline.collector={{metric_collector_host}}:8188
reducetask.sink.timeline.collector={{metric_collector_host}}:8188

{% endif %}
{% endif %}
Expand Up @@ -86,20 +86,20 @@ hbase.sink.ganglia.servers={{ganglia_server_host}}:8663
# If this variable is left out, then the default is no expiration.
hbase.extendedperiod = 3600

hbase.class=org.apache.hadoop.metrics2.sink.timeline.TimelineMetricsSink
hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
hbase.period=10
hbase.collector={{metric_collector_host}}:8188

jvm.class=org.apache.hadoop.metrics2.sink.timeline.TimelineMetricsSink
jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
jvm.period=10
jvm.collector={{metric_collector_host}}:8188

rpc.class=org.apache.hadoop.metrics2.sink.timeline.TimelineMetricsSink
rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
rpc.period=10
rpc.collector={{metric_collector_host}}:8188

hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.TimelineMetricsSink
hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
hbase.sink.timeline.period=10
hbase.sink.timeline.collector={{metric_collector_host}}:8188

{% endif %}
{% endif %}

0 comments on commit 9e052ee

Please sign in to comment.