Skip to content
Permalink
Browse files
AMBARI-23825 - Log feeder fails to parse date (#1248)
  • Loading branch information
kasakrisz authored and oleewere committed May 11, 2018
1 parent 6b01d0c commit 4824afa12afb41afeb3d785ed594b6d4322bdfe0
Show file tree
Hide file tree
Showing 17 changed files with 325 additions and 145 deletions.
@@ -18,23 +18,13 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;

import org.junit.Before;
import org.junit.Test;

public class AtlasLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testAtlasLogLayout() {
String layout = Log4jXml.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get(
@@ -18,23 +18,13 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;

import org.junit.Before;
import org.junit.Test;

public class HBaseLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testHBaseLogLayout() {
String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get(
@@ -18,23 +18,13 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;

import org.junit.Before;
import org.junit.Test;

public class HDFSLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testHDFSLogLayout() {
String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get(
@@ -20,7 +20,6 @@

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;
@@ -31,18 +30,10 @@
import java.util.Map;

import org.apache.log4j.PatternLayout;
import org.junit.Before;
import org.junit.Test;

public class HdfsAuditLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testHDFSAudit() throws Exception {
// given
@@ -18,28 +18,99 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;

import java.io.File;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Date;
import java.util.Map;

import org.hamcrest.MatcherAssert;
import org.junit.Test;

public class HiveLogPatterntIT extends PatternITBase {
// TODO: use hdp_ambari_definitions

@Test
public void testHiveLogLayout() {
String layout = Log4jProperties.unwrapFrom(new File(AMBARI_STACK_DEFINITIONS, "HIVE/0.12.0.2.0/configuration/hive-log4j.xml")).getLayout("DRFA");
assertThatDateIsISO8601(layout);
public void testHiveServer2LogEntry() throws Exception {
String logEntry = "2018-05-11T07:46:01,087 WARN [main]: metastore.HiveMetaStoreClient (:()) - Failed to connect to the MetaStore Server...";
Map<String, Object> result = testLogEntry(logEntry,"hive_hiveserver2", inputConfigTemplate(
new File(HDP_SERVICES_FOLDER, "HIVE/package/templates/input.config-hive.json.j2")));

assertThat(result.isEmpty(), is(false));
assertThat(result.get("cluster"), is(CLUSTER));
assertThat(result.get("level"), is("WARN"));
assertThat(result.get("event_count"), is(1));
assertThat(result.get("type"), is("hive_hiveserver2"));
assertThat(result.containsKey("seq_num"), is(true));
assertThat(result.containsKey("id"), is(true));
assertThat(result.containsKey("message_md5"), is(true));
assertThat(result.containsKey("event_md5"), is(true));
assertThat(result.containsKey("ip"), is(true));
assertThat(result.containsKey("host"), is(true));
assertThat(result.get("log_message"), is("Failed to connect to the MetaStore Server..."));
assertThat(result.get("logger_name"), is("metastore.HiveMetaStoreClient "));
assertThat(result.get("host"), is("HW13201.local"));
Date logTime = (Date) result.get("logtime");
LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault());
MatcherAssert.assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 11, 7, 46, 1, 87000000)));
}

@Test
public void testHiveServer2() throws Exception {
String layout = Log4jProperties.unwrapFrom(new File(AMBARI_STACK_DEFINITIONS, "HIVE/0.12.0.2.0/configuration/hive-log4j.xml")).getLayout("DRFA");
testServiceLog("hive_hiveserver2", layout, inputConfigTemplate(new File(AMBARI_STACK_DEFINITIONS, "HIVE/0.12.0.2.0/package/templates/input.config-hive.json.j2")));
public void testHiveServer2InteractiveLogEntry() throws Exception {
String logEntry = "2018-05-11T08:48:02,973 WARN [main]: conf.HiveConf (HiveConf.java:initialize(5193)) - HiveConf of name hive.hook.proto.base-directory does not exist";
Map<String, Object> result = testLogEntry(logEntry,"hive_hiveserver2", inputConfigTemplate(
new File(HDP_SERVICES_FOLDER, "HIVE/package/templates/input.config-hive.json.j2")));

assertThat(result.isEmpty(), is(false));
assertThat(result.get("cluster"), is(CLUSTER));
assertThat(result.get("level"), is("WARN"));
assertThat(result.get("event_count"), is(1));
assertThat(result.get("type"), is("hive_hiveserver2"));
assertThat(result.containsKey("seq_num"), is(true));
assertThat(result.containsKey("id"), is(true));
assertThat(result.containsKey("message_md5"), is(true));
assertThat(result.containsKey("event_md5"), is(true));
assertThat(result.containsKey("ip"), is(true));
assertThat(result.containsKey("host"), is(true));
assertThat(result.get("log_message"), is("HiveConf of name hive.hook.proto.base-directory does not exist"));
assertThat(result.get("logger_name"), is("conf.HiveConf "));
assertThat(result.get("host"), is("HW13201.local"));
assertThat(result.get("file"), is("HiveConf.java"));
assertThat(result.get("method"), is("initialize"));
assertThat(result.get("line_number"), is("5193"));
Date logTime = (Date) result.get("logtime");
LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault());
MatcherAssert.assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 11, 8, 48, 2, 973000000)));
}

@Test
public void testHiveMetastore() throws Exception {
String layout = Log4jProperties.unwrapFrom(new File(AMBARI_STACK_DEFINITIONS, "HIVE/0.12.0.2.0/configuration/hive-log4j.xml")).getLayout("DRFA");
testServiceLog("hive_metastore", layout, inputConfigTemplate(new File(AMBARI_STACK_DEFINITIONS, "HIVE/0.12.0.2.0/package/templates/input.config-hive.json.j2")));
public void testHiveMetastoreLogEntry() throws Exception {
String logEntry = "2018-05-11T09:13:14,706 INFO [pool-7-thread-6]: txn.TxnHandler (TxnHandler.java:performWriteSetGC(1588)) - Deleted 0 obsolete rows from WRTIE_SET";
Map<String, Object> result = testLogEntry(logEntry,"hive_metastore", inputConfigTemplate(
new File(HDP_SERVICES_FOLDER, "HIVE/package/templates/input.config-hive.json.j2")));

assertThat(result.isEmpty(), is(false));
assertThat(result.get("cluster"), is(CLUSTER));
assertThat(result.get("level"), is("INFO"));
assertThat(result.get("event_count"), is(1));
assertThat(result.get("type"), is("hive_metastore"));
assertThat(result.containsKey("seq_num"), is(true));
assertThat(result.containsKey("id"), is(true));
assertThat(result.containsKey("message_md5"), is(true));
assertThat(result.containsKey("event_md5"), is(true));
assertThat(result.containsKey("ip"), is(true));
assertThat(result.containsKey("host"), is(true));
assertThat(result.get("log_message"), is("Deleted 0 obsolete rows from WRTIE_SET"));
assertThat(result.get("logger_name"), is("txn.TxnHandler "));
assertThat(result.get("host"), is("HW13201.local"));
assertThat(result.get("line_number"), is("1588"));
assertThat(result.get("file"), is("TxnHandler.java"));
assertThat(result.get("method"), is("performWriteSetGC"));
Date logTime = (Date) result.get("logtime");
LocalDateTime localDateTime = LocalDateTime.ofInstant(logTime.toInstant(), ZoneId.systemDefault());
MatcherAssert.assertThat(localDateTime, is(LocalDateTime.of(2018, 5, 11, 9, 13, 14, 706000000)));
}
}

@@ -18,23 +18,13 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;

import org.junit.Before;
import org.junit.Test;

public class KafkaLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testKafkaRequestAppenderLayout() {
testKafkaAppenderLayout("requestAppender");
@@ -18,23 +18,13 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;

import org.junit.Before;
import org.junit.Test;

public class KnoxLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testKnoxGatewayAppenderLayout() {
String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get(
@@ -18,23 +18,13 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;

import org.junit.Before;
import org.junit.Test;

public class MetricsLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testMetricsLogLayout() {
String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get(
@@ -18,7 +18,7 @@
*/
package org.apache.ambari.logsearch.patterns;

import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assume.assumeTrue;
@@ -51,12 +51,10 @@
public class PatternITBase {
protected final static Logger LOG = Logger.getLogger(PatternITBase.class);

public static final String HDP_AMBARI_DEFINITIONS_PATH = "/Users/kkasa/project/hdp_ambari_definitions/";
public static final File HDP_AMBARI_DEFINITIONS = new File(
isBlank(System.getProperty("hdp.ambari.definitions.path")) ? HDP_AMBARI_DEFINITIONS_PATH : System.getProperty("hdp.ambari.definitions.path"));
public static File HDP_AMBARI_DEFINITIONS;
public static File AMBARI_STACK_DEFINITIONS;
public static File AMBARI_FOLDER;
public static final File HDP_SERVICES_FOLDER = new File(HDP_AMBARI_DEFINITIONS, Paths.get( "src", "main", "resources", "stacks", "HDP", "3.0", "services").toString());
public static File HDP_SERVICES_FOLDER;
public static final String CLUSTER = "cl1";
public static final String GLOBAL_CONFIG = "[\n" +
" {\n" +
@@ -75,6 +73,14 @@ public class PatternITBase {

@BeforeClass
public static void setupGlobal() throws Exception {
String hdpAmbariDefinitionsPath = System.getProperty("hdp.ambari.definitions.path");
if (isNotBlank(hdpAmbariDefinitionsPath)) {
HDP_AMBARI_DEFINITIONS = new File(hdpAmbariDefinitionsPath);
HDP_SERVICES_FOLDER = new File(HDP_AMBARI_DEFINITIONS, Paths.get( "src", "main", "resources", "stacks", "HDP", "3.0", "services").toString());
}

assumeTrue(HDP_SERVICES_FOLDER != null && HDP_SERVICES_FOLDER.exists());

URL location = PatternITBase.class.getProtectionDomain().getCodeSource().getLocation();

AMBARI_FOLDER = new File(new File(location.toURI()).getParentFile().getParentFile().getParentFile().getParent());
@@ -20,23 +20,14 @@

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assume.assumeTrue;

import java.io.File;
import java.nio.file.Paths;

import org.junit.Before;
import org.junit.Test;

public class RangerLogPatternIT extends PatternITBase {

@Override
@Before
public void setUp() throws Exception {
super.setUp();
assumeTrue(HDP_SERVICES_FOLDER.exists());
}

@Test
public void testRangerAdminLogLayout() {
String layout = Log4jProperties.unwrapFrom(new File(HDP_SERVICES_FOLDER, Paths.get(

0 comments on commit 4824afa

Please sign in to comment.