Permalink
Browse files

Updating hadoop_cluster to use log_integration::logrotate instead of …

…native rotation
  • Loading branch information...
1 parent 1b39d03 commit 83a44bd217fd1fca5a2a578ba06a3d97acd0a060 @temujin9 temujin9 committed Dec 4, 2012
@@ -2,7 +2,7 @@
maintainer_email "coders@infochimps.com"
license "Apache 2.0"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
-version "3.3.0"
+version "3.4.0"
description "Hadoop: distributed massive-scale data processing framework. Store and analyze terabyte-scale datasets with ease"
@@ -31,6 +31,7 @@
%w[ core-site.xml hdfs-site.xml mapred-site.xml
hadoop-env.sh fairscheduler.xml hadoop-metrics.properties
+ log4j.properties
].each do |conf_file|
template "#{node[:hadoop][:conf_dir]}/#{conf_file}" do
owner "root"
@@ -25,6 +25,9 @@
hadoop_service(:datanode)
announce(:hadoop, :datanode, {
+ :logs => { :datanode => {
+ :glob => node[:hadoop][:log_dir] + '/hadoop-hadoop-datanode-*.log'
+ } },
:ports => {
:xcvr_port => { :port => node[:hadoop][:datanode][:xcvr_port] },
:ipc_port => { :port => node[:hadoop][:datanode][:ipc_port], },
@@ -24,7 +24,35 @@
hadoop_service(:jobtracker)
+# Don't attempt to do a rolling rotate of single files created, just drop
+# them if they're over two weeks old.
+logrotate_single_files = {
+ :daily => nil,
+ :dateext => nil,
+ :dateformat => nil,
+ :delaycompress => nil,
+ :copytruncate => nil,
+ :compress => nil,
+ :missingok => true,
+ :maxage => 14,
+ :olddir => nil,
+ :rotate => 0
+}
+
announce(:hadoop, :jobtracker, {
+ :logs => {
+ :jobtracker => {
+ :glob => node[:hadoop][:log_dir] + '/hadoop-hadoop-jobtracker-*.log'
+ },
+ :jobs => logrotate_single_files.merge({
+ :glob => node[:hadoop][:log_dir] + '/job_*_conf.xml',
+ }),
+ :history => logrotate_single_files.merge({
+ :path => node[:hadoop][:log_dir],
+ :glob => node[:hadoop][:log_dir] + '/history/done/* ' \
+ + node[:hadoop][:log_dir] + '/history/done/.*.crc ',
+ })
+ },
:ports => {
:dash_port => { :port => node[:hadoop][:jobtracker][:dash_port],
:dashboard => true, :protocol => 'http' },
@@ -43,6 +43,9 @@
end
announce(:hadoop, :namenode, {
+ :logs => { :namenode => {
+ :glob => node[:hadoop][:log_dir] + '/hadoop-hadoop-namenode-*.log'
+ } },
:ports => {
:dash_port => { :port => node[:hadoop][:namenode][:dash_port], :protocol => 'http' },
:jmx_dash_port => { :port => node[:hadoop][:namenode][:jmx_dash_port],
@@ -29,6 +29,9 @@
end
announce(:hadoop, :secondarynn, {
+ :logs => { :secondarynn => {
+ :glob => node[:hadoop][:log_dir] + '/hadoop-hadoop-secondarynn-*.log'
+ } },
:ports => {
:dash_port => { :port => node[:hadoop][:secondarynn][:dash_port],
:dashboard => true, :protocol => 'http' },
@@ -25,6 +25,9 @@
hadoop_service(:tasktracker)
announce(:hadoop, :tasktracker, {
+ :logs => { :tasktracker => {
+ :glob => node[:hadoop][:log_dir] + '/hadoop-hadoop-tasktracker-*.log'
+ } },
:ports => {
:dash_port => { :port => node[:hadoop][:tasktracker][:dash_port],
:dashboard => true, :protocol => 'http' },
@@ -0,0 +1,98 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=INFO,console
+hadoop.security.logger=INFO,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+#
+# Job Summary Appender
+#
+# Use following logger to send summary to separate file defined by
+# hadoop.mapreduce.jobsummary.log.file rolled daily:
+# hadoop.mapreduce.jobsummary.logger=INFO,JSA
+#
+hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
+hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=ALL
+
+#
+# File Appender (no rolling - logrotate handles this)
@mrflip
mrflip Dec 7, 2012 Member

... This caused Hadoop to bomb out because the runit scripts were still referring to a different logger strategy.

templates/default/sv-hadoop_tasktracker-run.erb:export HADOOP_ROOT_LOGGER="INFO,DRFA"

As the log amenities march of joy proceeds make sure you look out for similar in each cookbook's runits

+#
+
+log4j.appender.FA=org.apache.log4j.FileAppender
+log4j.appender.FA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+log4j.appender.FA.layout=org.apache.log4j.PatternLayout
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this
+#
+
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+#Security audit appender
+#
+hadoop.security.log.file=SecurityAuth.audit
+log4j.appender.FAS=org.apache.log4j.FileAppender
+log4j.appender.FAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
+
+log4j.appender.FAS.layout=org.apache.log4j.PatternLayout
+log4j.appender.FAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+#new logger
+log4j.category.SecurityLogger=${hadoop.security.logger}
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN
+
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+
+#
+# Job Summary Appender
+#
+log4j.appender.JSA=org.apache.log4j.FileAppender
+log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
+log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
+log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
+log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false

0 comments on commit 83a44bd

Please sign in to comment.