Skip to content

Commit

Permalink
MAPREDUCE-6998. Moving logging APIs over to slf4j in hadoop-mapreduce…
Browse files Browse the repository at this point in the history
…-client-jobclient. Contributed by Gergely Novák.
  • Loading branch information
aajisaka committed Dec 7, 2017
1 parent 6cca5b3 commit d4cae97
Show file tree
Hide file tree
Showing 98 changed files with 411 additions and 394 deletions.
Expand Up @@ -23,8 +23,6 @@
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.mapreduce.JobID;
Expand All @@ -35,13 +33,15 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class ClientCache {

private final Configuration conf;
private final ResourceMgrDelegate rm;

private static final Log LOG = LogFactory.getLog(ClientCache.class);
private static final Logger LOG = LoggerFactory.getLogger(ClientCache.class);

private Map<JobID, ClientServiceDelegate> cache =
new HashMap<JobID, ClientServiceDelegate>();
Expand Down
Expand Up @@ -29,8 +29,6 @@
import java.util.concurrent.atomic.AtomicBoolean;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.RPC;
Expand Down Expand Up @@ -79,11 +77,14 @@
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.client.ClientToAMTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.annotations.VisibleForTesting;

public class ClientServiceDelegate {
private static final Log LOG = LogFactory.getLog(ClientServiceDelegate.class);
private static final Logger LOG =
LoggerFactory.getLogger(ClientServiceDelegate.class);
private static final String UNAVAILABLE = "N/A";

// Caches for per-user NotRunningJobs
Expand Down
Expand Up @@ -25,8 +25,6 @@
import java.util.Map;
import java.util.Set;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -78,11 +76,14 @@
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.annotations.VisibleForTesting;

public class ResourceMgrDelegate extends YarnClient {
private static final Log LOG = LogFactory.getLog(ResourceMgrDelegate.class);
private static final Logger LOG =
LoggerFactory.getLogger(ResourceMgrDelegate.class);

private YarnConfiguration conf;
private ApplicationSubmissionContext application;
Expand Down
Expand Up @@ -36,8 +36,6 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
Expand Down Expand Up @@ -99,6 +97,8 @@
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.UnitsConversionUtil;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.annotations.VisibleForTesting;

Expand All @@ -108,7 +108,7 @@
@SuppressWarnings("unchecked")
public class YARNRunner implements ClientProtocol {

private static final Log LOG = LogFactory.getLog(YARNRunner.class);
private static final Logger LOG = LoggerFactory.getLogger(YARNRunner.class);

private static final String RACK_GROUP = "rack";
private static final String NODE_IF_RACK_GROUP = "node1";
Expand Down
Expand Up @@ -19,9 +19,9 @@

import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* This class is responsible for the decision of when a fault
Expand All @@ -42,7 +42,8 @@
*/
public class ProbabilityModel {
private static Random generator = new Random();
private static final Log LOG = LogFactory.getLog(ProbabilityModel.class);
private static final Logger LOG =
LoggerFactory.getLogger(ProbabilityModel.class);

static final String FPROB_NAME = "fi.";
private static final String ALL_PROBABILITIES = FPROB_NAME + "*";
Expand Down
Expand Up @@ -20,10 +20,10 @@
import java.io.IOException;
import java.util.Iterator;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Reducer that accumulates values based on their type.
Expand All @@ -47,7 +47,9 @@ public class AccumulatingReducer extends MapReduceBase
static final String VALUE_TYPE_LONG = "l:";
static final String VALUE_TYPE_FLOAT = "f:";
static final String VALUE_TYPE_STRING = "s:";
private static final Log LOG = LogFactory.getLog(AccumulatingReducer.class);

private static final Logger LOG =
LoggerFactory.getLogger(AccumulatingReducer.class);

protected String hostName;

Expand Down
Expand Up @@ -28,8 +28,6 @@
import java.util.Date;
import java.util.StringTokenizer;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
Expand All @@ -38,6 +36,8 @@
import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Distributed i/o benchmark.
Expand Down Expand Up @@ -69,7 +69,7 @@
@Ignore
public class DFSCIOTest {
// Constants
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
private static final Logger LOG = LoggerFactory.getLogger(DFSCIOTest.class);
private static final int TEST_TYPE_READ = 0;
private static final int TEST_TYPE_WRITE = 1;
private static final int TEST_TYPE_CLEANUP = 2;
Expand Down
Expand Up @@ -33,15 +33,15 @@

import junit.framework.TestCase;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*;
import org.junit.Ignore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Distributed checkup of the file system consistency.
Expand All @@ -56,7 +56,8 @@
@Ignore
public class DistributedFSCheck extends TestCase {
// Constants
private static final Log LOG = LogFactory.getLog(DistributedFSCheck.class);
private static final Logger LOG =
LoggerFactory.getLogger(DistributedFSCheck.class);
private static final int TEST_TYPE_READ = 0;
private static final int TEST_TYPE_CLEANUP = 2;
private static final int DEFAULT_BUFFER_SIZE = 1000000;
Expand Down
Expand Up @@ -34,8 +34,6 @@
import java.util.StringTokenizer;
import java.util.HashMap;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
Expand All @@ -46,6 +44,8 @@
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Job History Log Analyzer.
Expand Down Expand Up @@ -144,7 +144,8 @@
*/
@SuppressWarnings("deprecation")
public class JHLogAnalyzer {
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
private static final Logger LOG =
LoggerFactory.getLogger(JHLogAnalyzer.class);
// Constants
private static final String JHLA_ROOT_DIR =
System.getProperty("test.build.data", "stats/JHLA");
Expand Down
Expand Up @@ -33,8 +33,6 @@
import java.util.Date;
import java.util.Random;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DistributedFileSystem;
Expand Down Expand Up @@ -62,6 +60,8 @@
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Distributed i/o benchmark.
Expand Down Expand Up @@ -92,7 +92,7 @@
*/
public class TestDFSIO implements Tool {
// Constants
private static final Log LOG = LogFactory.getLog(TestDFSIO.class);
private static final Logger LOG = LoggerFactory.getLogger(TestDFSIO.class);
private static final int DEFAULT_BUFFER_SIZE = 1000000;
private static final String BASE_FILE_NAME = "test_io_";
private static final String DEFAULT_RES_FILE_NAME = "TestDFSIO_results.log";
Expand Down
Expand Up @@ -23,19 +23,20 @@
import java.io.OutputStreamWriter;
import java.io.File;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Test Job History Log Analyzer.
*
* @see JHLogAnalyzer
*/
public class TestJHLA {
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
private static final Logger LOG =
LoggerFactory.getLogger(JHLogAnalyzer.class);
private String historyLog = System.getProperty("test.build.data",
"build/test/data") + "/history/test.log";

Expand Down
Expand Up @@ -26,8 +26,6 @@
import java.util.EnumSet;
import java.util.Iterator;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.CreateFlag;
Expand All @@ -50,6 +48,8 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/** The load generator is a tool for testing NameNode behavior under
* different client loads.
Expand All @@ -63,7 +63,7 @@
*
*/
public class LoadGeneratorMR extends LoadGenerator {
public static final Log LOG = LogFactory.getLog(LoadGenerator.class);
public static final Logger LOG = LoggerFactory.getLogger(LoadGenerator.class);
private static int numMapTasks = 1;
private String mrOutDir;

Expand Down
Expand Up @@ -24,12 +24,12 @@
import java.util.List;
import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Operation which selects a random file and appends a random amount of bytes
Expand All @@ -41,7 +41,7 @@
*/
class AppendOp extends Operation {

private static final Log LOG = LogFactory.getLog(AppendOp.class);
private static final Logger LOG = LoggerFactory.getLogger(AppendOp.class);

AppendOp(ConfigExtractor cfg, Random rnd) {
super(AppendOp.class.getSimpleName(), cfg, rnd);
Expand Down
Expand Up @@ -22,20 +22,21 @@
import java.util.HashMap;
import java.util.Map;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.Constants.OperationType;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Simple access layer onto of a configuration object that extracts the slive
* specific configuration values needed for slive running
*/
class ConfigExtractor {

private static final Log LOG = LogFactory.getLog(ConfigExtractor.class);
private static final Logger LOG =
LoggerFactory.getLogger(ConfigExtractor.class);

private Configuration config;

Expand Down
Expand Up @@ -22,13 +22,13 @@
import java.util.List;
import java.util.Random;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Operation which selects a random file and a random number of bytes to create
Expand All @@ -42,7 +42,7 @@
*/
class CreateOp extends Operation {

private static final Log LOG = LogFactory.getLog(CreateOp.class);
private static final Logger LOG = LoggerFactory.getLogger(CreateOp.class);

private static int DEF_IO_BUFFER_SIZE = 4096;

Expand Down

0 comments on commit d4cae97

Please sign in to comment.