Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

HIVE-2691 : Specify location of log4j configuration files via configu…

…ration properties (Zhenxiao Luo via Ashutosh Chauhan)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1418858 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
commit 0539db1d1e15746f814c0b18ee989260d2559bf6 1 parent 27b1db4
@ashutoshc ashutoshc authored
View
107 common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -19,17 +19,26 @@
package org.apache.hadoop.hive.common;
import java.net.URL;
+import java.io.File;
+import java.io.IOException;
+import java.io.FileNotFoundException;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.log4j.LogManager;
import org.apache.log4j.PropertyConfigurator;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
/**
* Utilities common to logging operations.
*/
public class LogUtils {
- public static final String HIVE_L4J = "hive-log4j.properties";
- public static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties";
+ private static final String HIVE_L4J = "hive-log4j.properties";
+ private static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties";
+ private static final Log l4j = LogFactory.getLog(LogUtils.class);
@SuppressWarnings("serial")
public static class LogInitializationException extends Exception {
@@ -39,21 +48,101 @@ public LogInitializationException(String msg) {
}
/**
- * Initialize log4j based on hive-log4j.properties.
+ * Initialize log4j.
*
* @return an message suitable for display to the user
* @throws LogInitializationException if log4j fails to initialize correctly
*/
- public static String initHiveLog4j() throws LogInitializationException {
- // allow hive log4j to override any normal initialized one
- URL hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
+ public static String initHiveLog4j()
+ throws LogInitializationException {
+ return initHiveLog4jCommon(HiveConf.ConfVars.HIVE_LOG4J_FILE);
+ }
+
+ /**
+ * Initialize log4j for execution mode.
+ *
+ * @return an message suitable for display to the user
+ * @throws LogInitializationException if log4j-exec fails to initialize correctly
+ */
+ public static String initHiveExecLog4j()
+ throws LogInitializationException {
+ return initHiveLog4jCommon(HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE);
+ }
+
+ private static String initHiveLog4jCommon(ConfVars confVarName)
+ throws LogInitializationException {
+ HiveConf conf = new HiveConf();
+ if (HiveConf.getVar(conf, confVarName).equals("")) {
+ // if log4j configuration file not set, or could not found, use default setting
+ return initHiveLog4jDefault(conf, "", confVarName);
+ } else {
+ // if log4j configuration file found successfully, use HiveConf property value
+ String log4jFileName = HiveConf.getVar(conf, confVarName);
+ File log4jConfigFile = new File(log4jFileName);
+ boolean fileExists = log4jConfigFile.exists();
+ if (!fileExists) {
+ // if property specified file not found in local file system
+ // use default setting
+ return initHiveLog4jDefault(
+ conf, "Not able to find conf file: " + log4jConfigFile, confVarName);
+ } else {
+ // property speficied file found in local file system
+ // use the specified file
+ if (confVarName == HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE) {
+ System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(),
+ HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
+ }
+ LogManager.resetConfiguration();
+ PropertyConfigurator.configure(log4jFileName);
+ logConfigLocation(conf);
+ return ("Logging initialized using configuration in " + log4jConfigFile);
+ }
+ }
+ }
+
+ private static String initHiveLog4jDefault(
+ HiveConf conf, String logMessage, ConfVars confVarName)
+ throws LogInitializationException {
+ URL hive_l4j = null;
+ switch (confVarName) {
+ case HIVE_EXEC_LOG4J_FILE:
+ hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_EXEC_L4J);
+ if (hive_l4j == null) {
+ hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
+ }
+ System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(),
+ HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID));
+ break;
+ case HIVE_LOG4J_FILE:
+ hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J);
+ break;
+ default:
+ break;
+ }
if (hive_l4j != null) {
LogManager.resetConfiguration();
PropertyConfigurator.configure(hive_l4j);
- return "Logging initialized using configuration in " + hive_l4j;
+ logConfigLocation(conf);
+ return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j);
+ } else {
+ throw new LogInitializationException(
+ logMessage + "Unable to initialize logging using "
+ + LogUtils.HIVE_L4J + ", not found on CLASSPATH!");
+ }
+ }
+
+ private static void logConfigLocation(HiveConf conf) throws LogInitializationException {
+ // Log a warning if hive-default.xml is found on the classpath
+ if (conf.getHiveDefaultLocation() != null) {
+ l4j.warn("DEPRECATED: Ignoring hive-default.xml found on the CLASSPATH at "
+ + conf.getHiveDefaultLocation().getPath());
+ }
+ // Look for hive-site.xml on the CLASSPATH and log its location if found.
+ if (conf.getHiveSiteLocation() == null) {
+ l4j.warn("hive-site.xml not found on CLASSPATH");
} else {
- throw new LogInitializationException("Unable to initialize logging using "
- + LogUtils.HIVE_L4J + ", not found on CLASSPATH!");
+ l4j.debug("Using hive-site.xml found on CLASSPATH at "
+ + conf.getHiveSiteLocation().getPath());
}
}
}
View
31 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -37,6 +37,8 @@
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.LogUtils;
+import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobConf;
@@ -52,6 +54,7 @@
protected Properties origProp;
protected String auxJars;
private static final Log l4j = LogFactory.getLog(HiveConf.class);
+ private static URL hiveDefaultURL = null;
private static URL hiveSiteURL = null;
private static byte[] confVarByteArray = null;
@@ -63,20 +66,10 @@
classLoader = HiveConf.class.getClassLoader();
}
- // Log a warning if hive-default.xml is found on the classpath
- URL hiveDefaultURL = classLoader.getResource("hive-default.xml");
- if (hiveDefaultURL != null) {
- l4j.warn("DEPRECATED: Ignoring hive-default.xml found on the CLASSPATH at " +
- hiveDefaultURL.getPath());
- }
+ hiveDefaultURL = classLoader.getResource("hive-default.xml");
// Look for hive-site.xml on the CLASSPATH and log its location if found.
hiveSiteURL = classLoader.getResource("hive-site.xml");
- if (hiveSiteURL == null) {
- l4j.warn("hive-site.xml not found on CLASSPATH");
- } else {
- l4j.debug("Using hive-site.xml found on CLASSPATH at " + hiveSiteURL.getPath());
- }
for (ConfVars confVar : ConfVars.values()) {
vars.put(confVar.varname, confVar);
}
@@ -640,6 +633,10 @@
HIVE_CONCATENATE_CHECK_INDEX ("hive.exec.concatenate.check.index", true),
HIVE_IO_EXCEPTION_HANDLERS("hive.io.exception.handlers", ""),
+ // logging configuration
+ HIVE_LOG4J_FILE("hive.log4j.file", ""),
+ HIVE_EXEC_LOG4J_FILE("hive.exec.log4j.file", ""),
+
// prefix used to auto generated column aliases (this should be started with '_')
HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL("hive.autogen.columnalias.prefix.label", "_c"),
HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME(
@@ -1081,10 +1078,6 @@ public Properties getChangedProperties() {
return (ret);
}
- public String getHiveSitePath() {
- return hiveSiteURL.getPath();
- }
-
public String getJar() {
return hiveJar;
}
@@ -1104,6 +1097,14 @@ public void setAuxJars(String auxJars) {
setVar(this, ConfVars.HIVEAUXJARS, auxJars);
}
+ public URL getHiveDefaultLocation() {
+ return hiveDefaultURL;
+ }
+
+ public URL getHiveSiteLocation() {
+ return hiveSiteURL;
+ }
+
/**
* @return the user name set in hadoop.job.ugi param or the current user from System
* @throws IOException
View
2  common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
@@ -35,7 +35,7 @@
public void testHiveSitePath() throws Exception {
String expectedPath =
new Path(System.getProperty("test.build.resources") + "/hive-site.xml").toUri().getPath();
- assertEquals(expectedPath, new HiveConf().getHiveSitePath());
+ assertEquals(expectedPath, new HiveConf().getHiveSiteLocation().getPath());
}
private void checkHadoopConf(String name, String expectedHadoopVal) throws Exception {
View
125 common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.conf;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.common.LogUtils;
+import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+/**
+ * TestHiveLogging
+ *
+ * Test cases for HiveLogging, which is initialized in HiveConf.
+ * Loads configuration files located in common/src/test/resources.
+ */
+public class TestHiveLogging extends TestCase {
+ private Runtime runTime;
+ private Process process;
+
+ public TestHiveLogging() {
+ super();
+ runTime = Runtime.getRuntime();
+ process = null;
+ }
+
+ private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) {
+ System.setProperty(ConfVars.HIVE_LOG4J_FILE.varname,
+ System.getProperty("test.build.resources") + "/" + hiveLog4jTest);
+ System.setProperty(ConfVars.HIVE_EXEC_LOG4J_FILE.varname,
+ System.getProperty("test.build.resources") + "/" + hiveExecLog4jTest);
+
+ String expectedLog4jPath = System.getProperty("test.build.resources")
+ + "/" + hiveLog4jTest;
+ String expectedLog4jExecPath = System.getProperty("test.build.resources")
+ + "/" + hiveExecLog4jTest;
+
+ try {
+ LogUtils.initHiveLog4j();
+ } catch (LogInitializationException e) {
+ }
+
+ HiveConf conf = new HiveConf();
+ assertEquals(expectedLog4jPath, conf.getVar(ConfVars.HIVE_LOG4J_FILE));
+ assertEquals(expectedLog4jExecPath, conf.getVar(ConfVars.HIVE_EXEC_LOG4J_FILE));
+ }
+
+ private void runCmd(String cmd) {
+ try {
+ process = runTime.exec(cmd);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ try {
+ process.waitFor();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void getCmdOutput(String logFile) {
+ boolean logCreated = false;
+ BufferedReader buf = new BufferedReader(
+ new InputStreamReader(process.getInputStream()));
+ String line = "";
+ try {
+ while((line = buf.readLine()) != null) {
+ if (line.equals(logFile))
+ logCreated = true;
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ assertEquals(true, logCreated);
+ }
+
+ private void RunTest(String cleanCmd, String findCmd, String logFile,
+ String hiveLog4jProperty, String hiveExecLog4jProperty) throws Exception {
+ // clean test space
+ runCmd(cleanCmd);
+
+ // config log4j with customized files
+ // check whether HiveConf initialize log4j correctly
+ configLog(hiveLog4jProperty, hiveExecLog4jProperty);
+
+ // check whether log file is created on test running
+ runCmd(findCmd);
+ getCmdOutput(logFile);
+
+ // clean test space
+ runCmd(cleanCmd);
+ }
+
+ public void testHiveLogging() throws Exception {
+ // customized log4j config log file to be: /tmp/hiveLog4jTest.log
+ String customLogPath = "/tmp/";
+ String customLogName = "hiveLog4jTest.log";
+ String customLogFile = customLogPath + customLogName;
+ String customCleanCmd = "rm -rf " + customLogFile;
+ String customFindCmd = "find /tmp -name " + customLogName;
+ RunTest(customCleanCmd, customFindCmd, customLogFile,
+ "hive-log4j-test.properties", "hive-exec-log4j-test.properties");
+ }
+}
View
55 common/src/test/resources/hive-exec-log4j-test.properties
@@ -0,0 +1,55 @@
+# Define some default values that can be overridden by system properties
+hive.root.logger=INFO,FA
+hive.log.dir=/tmp
+hive.log.file=hiveExecLog4jTest.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=WARN
+
+#
+# File Appender
+#
+
+log4j.appender.FA=org.apache.log4j.FileAppender
+log4j.appender.FA.File=${hive.log.dir}/${hive.log.file}
+log4j.appender.FA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+
+
+log4j.category.DataNucleus=ERROR,FA
+log4j.category.Datastore=ERROR,FA
+log4j.category.Datastore.Schema=ERROR,FA
+log4j.category.JPOX.Datastore=ERROR,FA
+log4j.category.JPOX.Plugin=ERROR,FA
+log4j.category.JPOX.MetaData=ERROR,FA
+log4j.category.JPOX.Query=ERROR,FA
+log4j.category.JPOX.General=ERROR,FA
+log4j.category.JPOX.Enhancer=ERROR,FA
+
View
67 common/src/test/resources/hive-log4j-test.properties
@@ -0,0 +1,67 @@
+# Define some default values that can be overridden by system properties
+hive.root.logger=WARN,DRFA
+hive.log.dir=/tmp
+hive.log.file=hiveLog4jTest.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=WARN
+
+#
+# Daily Rolling File Appender
+#
+# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
+# for different CLI session.
+#
+# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+
+log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+log4j.appender.console.encoding=UTF-8
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+
+
+log4j.category.DataNucleus=ERROR,DRFA
+log4j.category.Datastore=ERROR,DRFA
+log4j.category.Datastore.Schema=ERROR,DRFA
+log4j.category.JPOX.Datastore=ERROR,DRFA
+log4j.category.JPOX.Plugin=ERROR,DRFA
+log4j.category.JPOX.MetaData=ERROR,DRFA
+log4j.category.JPOX.Query=ERROR,DRFA
+log4j.category.JPOX.General=ERROR,DRFA
+log4j.category.JPOX.Enhancer=ERROR,DRFA
+
View
15 conf/hive-default.xml.template
@@ -1586,5 +1586,20 @@
<description>Whether to show the unquoted partition names in query results.</description>
</property>
+<property>
+ <name>hive.log4j.file</name>
+ <value></value>
+ <description>Hive log4j configuration file.
+ If the property is not set, then logging will be initialized using hive-log4j.properties found on the classpath.
+ If the property is set, the value must be a valid URI (java.net.URI, e.g. "file:///tmp/my-logging.properties"), which you can then extract a URL from and pass to PropertyConfigurator.configure(URL).</description>
+</property>
+
+<property>
+ <name>hive.exec.log4j.file</name>
+ <value></value>
+ <description>Hive log4j configuration file for execution mode(sub command).
+ If the property is not set, then logging will be initialized using hive-exec-log4j.properties found on the classpath.
+ If the property is set, the value must be a valid URI (java.net.URI, e.g. "file:///tmp/my-logging.properties"), which you can then extract a URL from and pass to PropertyConfigurator.configure(URL).</description>
+</property>
</configuration>
View
4 metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -4097,8 +4097,8 @@ public static void main(String[] args) throws Throwable {
// any log specific settings via hiveconf will be ignored
Properties hiveconf = cli.addHiveconfToSystemProperties();
- // If the log4j.configuration property hasn't already been explicitly set, use Hive's default
- // log4j configuration
+ // If the log4j.configuration property hasn't already been explicitly set,
+ // use Hive's default log4j configuration
if (System.getProperty("log4j.configuration") == null) {
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
View
17 ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
@@ -48,6 +48,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.CompressionUtils;
import org.apache.hadoop.hive.common.LogUtils;
+import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.Context;
@@ -563,18 +564,10 @@ private static void printUsage() {
*/
private static void setupChildLog4j(Configuration conf) {
- URL hive_l4j = ExecDriver.class.getClassLoader().getResource(LogUtils.HIVE_EXEC_L4J);
- if (hive_l4j == null) {
- hive_l4j = ExecDriver.class.getClassLoader().getResource(LogUtils.HIVE_L4J);
- }
-
- if (hive_l4j != null) {
- // setting queryid so that log4j configuration can use it to generate
- // per query log file
- System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), HiveConf.getVar(conf,
- HiveConf.ConfVars.HIVEQUERYID));
- LogManager.resetConfiguration();
- PropertyConfigurator.configure(hive_l4j);
+ try {
+ LogUtils.initHiveExecLog4j();
+ } catch (LogInitializationException e) {
+ System.err.println(e.getMessage());
}
}
View
4 ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
@@ -119,12 +119,10 @@ public void testSimpleQuery() {
try {
// NOTE: It is critical to do this here so that log4j is reinitialized
- // before
- // any of the other core hive classes are loaded
+ // before any of the other core hive classes are loaded
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
- // ignore
}
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
Please sign in to comment.
Something went wrong with that request. Please try again.