Skip to content
This repository has been archived by the owner. It is now read-only.
Permalink
Browse files
[AMBARI-24180] Ambari metrics Service Check fails. (#1615)
* [AMBARI-24180] Ambari metrics Service Check fails post EU . Error - 401 Authentication required in response.

* [AMBARI-24180] Ambari metrics Service Check fails.

* [AMBARI-24180] Ambari metrics Service Check fails. - 3
  • Loading branch information
avijayanhwx committed Jun 26, 2018
1 parent 9ef2bb9 commit bd577e1740ae725975a6f4500d80bb44881bdae2
Showing 10 changed files with 50 additions and 7 deletions.
@@ -396,8 +396,10 @@ static Multimap<String, List<Function>> parseMetricNamesToAggregationFunctions(L
return metricsFunctions;
}

public void putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException {
public TimelinePutResponse putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException {
TimelinePutResponse response = new TimelinePutResponse();
hBaseAccessor.insertMetricRecordsWithMetadata(metricMetadataManager, metrics, true);
return response;
}

@Override
@@ -1105,6 +1105,9 @@ private void getTimelineMetricsFromResultSet(TimelineMetrics metrics, Function f
throws SQLException, IOException {
if (condition.getPrecision().equals(Precision.SECONDS)) {
TimelineMetric metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
if (metric == null) {
return;
}
if (f != null && f.getSuffix() != null) { //Case : Requesting "._rate" for precision data
metric.setMetricName(metric.getMetricName() + f.getSuffix());
}
@@ -68,6 +68,16 @@ TimelineMetrics getTimelineMetrics(List<String> metricNames, List<String> hostna
*/
TimelinePutResponse putMetrics(TimelineMetrics metrics) throws SQLException, IOException;

/**
* Stores metric information to the timeline store without any buffering of data.
*
* @param metrics An {@link TimelineMetrics}.
* @return An {@link org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse}.
* @throws SQLException, IOException
*/
TimelinePutResponse putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException;


/**
* Store container metric into the timeline tore
*/
@@ -98,7 +98,9 @@ private Map<TimelineClusterMetric, MetricHostAggregate> aggregateMetricsFromResu

while (rs.next()) {
TimelineClusterMetric currentMetric = readHelper.fromResultSet(rs);

if (currentMetric == null) {
continue;
}
MetricClusterAggregate currentHostAggregate =
isClusterPrecisionInputTable ?
readHelper.getMetricClusterAggregateFromResultSet(rs) :
@@ -160,6 +160,10 @@ Map<TimelineClusterMetric, MetricClusterAggregate> aggregateMetricsFromResultSet
// If rows belong to same host combine them before slicing. This
// avoids issues across rows that belong to same hosts but get
// counted as coming from different ones.
if (nextMetric == null) {
continue;
}

if (metric.equalsExceptTime(nextMetric)) {
metric.addMetricValues(nextMetric.getMetricValues());
} else {
@@ -92,6 +92,9 @@ private Map<TimelineMetric, MetricHostAggregate> aggregateMetricsFromResultSet(R
while (rs.next()) {
TimelineMetric currentMetric =
readHelper.getTimelineMetricKeyFromResultSet(rs);
if (currentMetric == null) {
continue;
}
MetricHostAggregate currentHostAggregate =
readHelper.getMetricHostAggregateFromResultSet(rs);

@@ -53,6 +53,9 @@ public TimelineMetricReadHelper(TimelineMetricMetadataManager timelineMetricMeta
public TimelineMetric getTimelineMetricFromResultSet(ResultSet rs)
throws SQLException, IOException {
TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
if (metric == null) {
return null;
}
TreeMap<Long, Double> sortedByTimeMetrics = PhoenixHBaseAccessor.readMetricFromJSON(rs.getString("METRICS"));
metric.setMetricValues(sortedByTimeMetrics);
return metric;
@@ -110,6 +113,9 @@ public TimelineMetric getTimelineMetricCommonsFromResultSet(ResultSet rs)

byte[] uuid = rs.getBytes("UUID");
TimelineMetric metric = metadataManagerInstance.getMetricFromUuid(uuid);
if (metric == null) {
return null;
}
if (ignoreInstance) {
metric.setInstanceId(null);
}
@@ -147,7 +153,9 @@ public TimelineClusterMetric fromResultSet(ResultSet rs) throws SQLException {

byte[] uuid = rs.getBytes("UUID");
TimelineMetric timelineMetric = metadataManagerInstance.getMetricFromUuid(uuid);

if (timelineMetric == null) {
return null;
}
return new TimelineClusterMetric(
timelineMetric.getMetricName(),
timelineMetric.getAppId(),
@@ -48,6 +48,7 @@
import javax.xml.bind.annotation.XmlRootElement;

import org.apache.ambari.metrics.core.timeline.TimelineMetricServiceSummary;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Public;
@@ -77,6 +78,7 @@ public class TimelineWebServices {
private static final Log LOG = LogFactory.getLog(TimelineWebServices.class);

private TimelineMetricStore timelineMetricStore;
private static final String SMOKETEST_METRIC_APP_ID = "amssmoketestfake";

@Inject
public TimelineWebServices(TimelineMetricStore timelineMetricStore) {
@@ -149,7 +151,11 @@ public TimelinePutResponse postMetrics(
TimelineUtils.dumpTimelineRecordtoJSON(metrics, true));
}

return timelineMetricStore.putMetrics(metrics);
if (CollectionUtils.isNotEmpty(metrics.getMetrics()) && metrics.getMetrics().get(0).getAppId().equals(SMOKETEST_METRIC_APP_ID)) {
return timelineMetricStore.putMetricsSkipCache(metrics);
} else {
return timelineMetricStore.putMetrics(metrics);
}

} catch (Exception e) {
LOG.error("Error saving metrics.", e);
@@ -81,6 +81,11 @@ public TimelinePutResponse putMetrics(TimelineMetrics metrics)
return new TimelinePutResponse();
}

@Override
public TimelinePutResponse putMetricsSkipCache(TimelineMetrics metrics) throws SQLException, IOException {
return new TimelinePutResponse();
}

@Override
public TimelinePutResponse putContainerMetrics(List<ContainerMetric> metrics)
throws SQLException, IOException {
@@ -50,9 +50,9 @@ public class TimelineMetricStoreWatcherTest {
public void testRunPositive() throws Exception {
HBaseTimelineMetricsService metricStore = createNiceMock(HBaseTimelineMetricsService.class);

metricStore.putMetricsSkipCache(anyObject(TimelineMetrics.class));
expectLastCall().once();

expect(metricStore.putMetricsSkipCache(anyObject(TimelineMetrics.class)))
.andReturn(new TimelinePutResponse());
// metric found
expect(metricStore.getTimelineMetrics(EasyMock.<List<String>>anyObject(),
EasyMock.<List<String>>anyObject(), anyObject(String.class),

0 comments on commit bd577e1

Please sign in to comment.