Skip to content

Commit

Permalink
MAPREDUCE-6983. Moving logging APIs over to slf4j in hadoop-mapreduce…
Browse files Browse the repository at this point in the history
…-client-core. Contributed by Jinjiang Ling.
  • Loading branch information
aajisaka committed Nov 2, 2017
1 parent 940ffe3 commit 178751e
Show file tree
Hide file tree
Showing 94 changed files with 352 additions and 318 deletions.
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
import java.util.List; import java.util.List;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
Expand All @@ -45,6 +43,8 @@
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.CryptoUtils; import org.apache.hadoop.mapreduce.CryptoUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/** /**
* <code>BackupStore</code> is an utility class that is used to support * <code>BackupStore</code> is an utility class that is used to support
Expand All @@ -60,7 +60,8 @@
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class BackupStore<K,V> { public class BackupStore<K,V> {


private static final Log LOG = LogFactory.getLog(BackupStore.class.getName()); private static final Logger LOG =
LoggerFactory.getLogger(BackupStore.class.getName());
private static final int MAX_VINT_SIZE = 9; private static final int MAX_VINT_SIZE = 9;
private static final int EOF_MARKER_SIZE = 2 * MAX_VINT_SIZE; private static final int EOF_MARKER_SIZE = 2 * MAX_VINT_SIZE;
private final TaskAttemptID tid; private final TaskAttemptID tid;
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -21,16 +21,16 @@
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;


import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;


import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;


class CleanupQueue { class CleanupQueue {


public static final Log LOG = public static final Logger LOG =
LogFactory.getLog(CleanupQueue.class); LoggerFactory.getLogger(CleanupQueue.class);


private static PathCleanupThread cleanupThread; private static PathCleanupThread cleanupThread;


Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import java.util.Iterator; import java.util.Iterator;


import org.apache.commons.collections.IteratorUtils; import org.apache.commons.collections.IteratorUtils;
import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.FileSystemCounter; import org.apache.hadoop.mapreduce.FileSystemCounter;
Expand All @@ -44,6 +43,7 @@
import org.apache.hadoop.mapreduce.counters.Limits; import org.apache.hadoop.mapreduce.counters.Limits;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter; import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter;
import org.slf4j.Logger;


import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;


Expand Down Expand Up @@ -596,7 +596,7 @@ public static Counters sum(Counters a, Counters b) {
* Logs the current counter values. * Logs the current counter values.
* @param log The log to use. * @param log The log to use.
*/ */
public void log(Log log) { public void log(Logger log) {
log.info("Counters: " + size()); log.info("Counters: " + size());
for(Group group: this) { for(Group group: this) {
log.info(" " + group.getDisplayName()); log.info(" " + group.getDisplayName());
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
import org.apache.hadoop.mapreduce.QueueState; import org.apache.hadoop.mapreduce.QueueState;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import static org.apache.hadoop.mapred.QueueManager.*; import static org.apache.hadoop.mapred.QueueManager.*;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;


import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
Expand All @@ -37,8 +37,8 @@
* *
*/ */
class DeprecatedQueueConfigurationParser extends QueueConfigurationParser { class DeprecatedQueueConfigurationParser extends QueueConfigurationParser {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(DeprecatedQueueConfigurationParser.class); LoggerFactory.getLogger(DeprecatedQueueConfigurationParser.class);
static final String MAPRED_QUEUE_NAMES_KEY = "mapred.queue.names"; static final String MAPRED_QUEUE_NAMES_KEY = "mapred.queue.names";


DeprecatedQueueConfigurationParser(Configuration conf) { DeprecatedQueueConfigurationParser(Configuration conf) {
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@
import java.util.Set; import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
Expand All @@ -50,6 +48,8 @@
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;


import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/** /**
* A base class for file-based {@link InputFormat}. * A base class for file-based {@link InputFormat}.
Expand All @@ -68,8 +68,8 @@
@InterfaceStability.Stable @InterfaceStability.Stable
public abstract class FileInputFormat<K, V> implements InputFormat<K, V> { public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {


public static final Log LOG = public static final Logger LOG =
LogFactory.getLog(FileInputFormat.class); LoggerFactory.getLogger(FileInputFormat.class);


@Deprecated @Deprecated
public enum Counter { public enum Counter {
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@


import java.io.IOException; import java.io.IOException;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/** An {@link OutputCommitter} that commits files specified /** An {@link OutputCommitter} that commits files specified
* in job output directory i.e. ${mapreduce.output.fileoutputformat.outputdir}. * in job output directory i.e. ${mapreduce.output.fileoutputformat.outputdir}.
Expand All @@ -34,7 +34,7 @@
@InterfaceStability.Stable @InterfaceStability.Stable
public class FileOutputCommitter extends OutputCommitter { public class FileOutputCommitter extends OutputCommitter {


public static final Log LOG = LogFactory.getLog( public static final Logger LOG = LoggerFactory.getLogger(
"org.apache.hadoop.mapred.FileOutputCommitter"); "org.apache.hadoop.mapred.FileOutputCommitter");


/** /**
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@
import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.io.serializer.Serializer;


import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;


/** /**
* <code>IFile</code> is the simple &lt;key-len, value-len, key, value&gt; format * <code>IFile</code> is the simple &lt;key-len, value-len, key, value&gt; format
Expand All @@ -56,7 +56,7 @@
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class IFile { public class IFile {
private static final Log LOG = LogFactory.getLog(IFile.class); private static final Logger LOG = LoggerFactory.getLogger(IFile.class);
public static final int EOF_MARKER = -1; // End of File Marker public static final int EOF_MARKER = -1; // End of File Marker


/** /**
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@
import java.io.InputStream; import java.io.InputStream;


import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.ChecksumException;
Expand All @@ -36,6 +34,8 @@
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest; import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* A checksum input stream, used for IFiles. * A checksum input stream, used for IFiles.
* Used to validate the checksum of files created by {@link IFileOutputStream}. * Used to validate the checksum of files created by {@link IFileOutputStream}.
Expand All @@ -59,7 +59,8 @@ public class IFileInputStream extends InputStream {
private boolean readahead; private boolean readahead;
private int readaheadLength; private int readaheadLength;


public static final Log LOG = LogFactory.getLog(IFileInputStream.class); public static final Logger LOG =
LoggerFactory.getLogger(IFileInputStream.class);


private boolean disableChecksumValidation = false; private boolean disableChecksumValidation = false;


Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -22,17 +22,17 @@
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig; import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


class IndexCache { class IndexCache {


private final JobConf conf; private final JobConf conf;
private final int totalMemoryAllowed; private final int totalMemoryAllowed;
private AtomicInteger totalMemoryUsed = new AtomicInteger(); private AtomicInteger totalMemoryUsed = new AtomicInteger();
private static final Log LOG = LogFactory.getLog(IndexCache.class); private static final Logger LOG = LoggerFactory.getLogger(IndexCache.class);


private final ConcurrentHashMap<String,IndexInformation> cache = private final ConcurrentHashMap<String,IndexInformation> cache =
new ConcurrentHashMap<String,IndexInformation>(); new ConcurrentHashMap<String,IndexInformation>();
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -20,20 +20,20 @@
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


@InterfaceAudience.Private @InterfaceAudience.Private
public class JobACLsManager { public class JobACLsManager {


static final Log LOG = LogFactory.getLog(JobACLsManager.class); static final Logger LOG = LoggerFactory.getLogger(JobACLsManager.class);
Configuration conf; Configuration conf;
private final AccessControlList adminAcl; private final AccessControlList adminAcl;


Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@
import java.util.regex.Pattern; import java.util.regex.Pattern;


import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
Expand Down Expand Up @@ -53,6 +51,8 @@
import org.apache.hadoop.util.ClassUtil; import org.apache.hadoop.util.ClassUtil;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/** /**
* A map/reduce job configuration. * A map/reduce job configuration.
Expand Down Expand Up @@ -115,7 +115,7 @@
@InterfaceStability.Stable @InterfaceStability.Stable
public class JobConf extends Configuration { public class JobConf extends Configuration {


private static final Log LOG = LogFactory.getLog(JobConf.class); private static final Logger LOG = LoggerFactory.getLogger(JobConf.class);
private static final Pattern JAVA_OPTS_XMX_PATTERN = private static final Pattern JAVA_OPTS_XMX_PATTERN =
Pattern.compile(".*(?:^|\\s)-Xmx(\\d+)([gGmMkK]?)(?:$|\\s).*"); Pattern.compile(".*(?:^|\\s)-Xmx(\\d+)([gGmMkK]?)(?:$|\\s).*");


Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -24,20 +24,20 @@
import java.util.concurrent.Delayed; import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.params.ClientPNames; import org.apache.http.client.params.ClientPNames;
import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.CoreConnectionPNames; import org.apache.http.params.CoreConnectionPNames;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class JobEndNotifier { public class JobEndNotifier {
private static final Log LOG = private static final Logger LOG =
LogFactory.getLog(JobEndNotifier.class.getName()); LoggerFactory.getLogger(JobEndNotifier.class.getName());






Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@


import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.commons.logging.Log; import org.slf4j.Logger;
import org.apache.commons.logging.LogFactory; import org.slf4j.LoggerFactory;


class JvmContext implements Writable { class JvmContext implements Writable {


public static final Log LOG = public static final Logger LOG =
LogFactory.getLog(JvmContext.class); LoggerFactory.getLogger(JvmContext.class);


JVMId jvmId; JVMId jvmId;
String pid; String pid;
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -39,17 +39,17 @@
import org.apache.hadoop.mapreduce.lib.input.CompressedSplitLineReader; import org.apache.hadoop.mapreduce.lib.input.CompressedSplitLineReader;
import org.apache.hadoop.mapreduce.lib.input.SplitLineReader; import org.apache.hadoop.mapreduce.lib.input.SplitLineReader;
import org.apache.hadoop.mapreduce.lib.input.UncompressedSplitLineReader; import org.apache.hadoop.mapreduce.lib.input.UncompressedSplitLineReader;
import org.apache.commons.logging.LogFactory; import org.slf4j.Logger;
import org.apache.commons.logging.Log; import org.slf4j.LoggerFactory;


/** /**
* Treats keys as offset in file and value as line. * Treats keys as offset in file and value as line.
*/ */
@InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"}) @InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"})
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class LineRecordReader implements RecordReader<LongWritable, Text> { public class LineRecordReader implements RecordReader<LongWritable, Text> {
private static final Log LOG private static final Logger LOG =
= LogFactory.getLog(LineRecordReader.class.getName()); LoggerFactory.getLogger(LineRecordReader.class.getName());


private CompressionCodecFactory compressionCodecs = null; private CompressionCodecFactory compressionCodecs = null;
private long start; private long start;
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@
import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
Expand Down Expand Up @@ -74,6 +72,8 @@
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/** A Map task. */ /** A Map task. */
@InterfaceAudience.LimitedPrivate({"MapReduce"}) @InterfaceAudience.LimitedPrivate({"MapReduce"})
Expand All @@ -87,7 +87,8 @@ public class MapTask extends Task {
private TaskSplitIndex splitMetaInfo = new TaskSplitIndex(); private TaskSplitIndex splitMetaInfo = new TaskSplitIndex();
private final static int APPROX_HEADER_LENGTH = 150; private final static int APPROX_HEADER_LENGTH = 150;


private static final Log LOG = LogFactory.getLog(MapTask.class.getName()); private static final Logger LOG =
LoggerFactory.getLogger(MapTask.class.getName());


private Progress mapPhase; private Progress mapPhase;
private Progress sortPhase; private Progress sortPhase;
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
Expand All @@ -45,6 +43,8 @@
import org.apache.hadoop.util.PriorityQueue; import org.apache.hadoop.util.PriorityQueue;
import org.apache.hadoop.util.Progress; import org.apache.hadoop.util.Progress;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/** /**
* Merger is an utility class used by the Map and Reduce tasks for merging * Merger is an utility class used by the Map and Reduce tasks for merging
Expand All @@ -53,7 +53,7 @@
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class Merger { public class Merger {
private static final Log LOG = LogFactory.getLog(Merger.class); private static final Logger LOG = LoggerFactory.getLogger(Merger.class);


// Local directories // Local directories
private static LocalDirAllocator lDirAlloc = private static LocalDirAllocator lDirAlloc =
Expand Down
Loading

0 comments on commit 178751e

Please sign in to comment.