Skip to content

Commit

Permalink
[HOTFIX] replace apache common log with carbondata log4j
Browse files Browse the repository at this point in the history
replace apache common log with carbondata log4j

This closes apache#2999
  • Loading branch information
brijoobopanna authored and qiuchenjian committed Jun 14, 2019
1 parent 9b8ebb6 commit 02de389
Show file tree
Hide file tree
Showing 9 changed files with 33 additions and 27 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.concurrent.TimeUnit;

import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.dev.BlockletSerializer;
import org.apache.carbondata.core.datamap.dev.DataMap;
Expand All @@ -50,8 +51,7 @@
import org.apache.carbondata.events.OperationContext;
import org.apache.carbondata.events.OperationEventListener;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.Logger;

/**
* Index at the table level, user can add any number of DataMap for one table, by
Expand All @@ -74,7 +74,8 @@ public final class TableDataMap extends OperationEventListener {

private SegmentPropertiesFetcher segmentPropertiesFetcher;

private static final Log LOG = LogFactory.getLog(TableDataMap.class);
private static final Logger LOG =
LogServiceFactory.getLogService(TableDataMap.class.getName());

/**
* It is called to initialize and load the required table datamap metadata.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,6 @@
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.carbondata.events.Event;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
Expand All @@ -67,7 +65,6 @@
public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
implements BlockletDetailsFetcher, SegmentPropertiesFetcher, CacheableDataMap {

private static final Log LOG = LogFactory.getLog(BlockletDataMapFactory.class);
private static final String NAME = "clustered.btree.blocklet";
/**
* variable for cache level BLOCKLET
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import java.util.Set;
import java.util.TreeMap;

import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
Expand All @@ -60,15 +61,15 @@
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.util.path.CarbonTablePath;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.log4j.Logger;

public class BlockletDataMapUtil {

private static final Log LOG = LogFactory.getLog(BlockletDataMapUtil.class);
private static final Logger LOG =
LogServiceFactory.getLogService(BlockletDataMapUtil.class.getName());

public static Map<String, BlockMetaInfo> getBlockMetaInfoMap(
TableBlockIndexUniqueIdentifierWrapper identifierWrapper,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,17 @@
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.carbondata.common.logging.LogServiceFactory;

import org.apache.log4j.Logger;

/**
* It provides methods to convert object to Base64 string and vice versa.
*/
public class ObjectSerializationUtil {

private static final Log LOG = LogFactory.getLog(ObjectSerializationUtil.class);
private static final Logger LOG =
LogServiceFactory.getLogService(ObjectSerializationUtil.class.getName());

/**
* Convert object to Base64 String
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.lang.reflect.Constructor;
import java.util.*;

import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonCommonConstantsInternal;
import org.apache.carbondata.core.datamap.DataMapChooser;
Expand Down Expand Up @@ -69,8 +70,6 @@
import org.apache.carbondata.hadoop.readsupport.impl.DictionaryDecodeReadSupport;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
Expand All @@ -82,6 +81,7 @@
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.log4j.Logger;

/**
* Base class for carbondata input format, there are two input format implementations:
Expand All @@ -100,7 +100,8 @@ public abstract class CarbonInputFormat<T> extends FileInputFormat<Void, T> {
"mapreduce.input.carboninputformat.validsegments";
// comma separated list of input files
private static final String ALTER_PARTITION_ID = "mapreduce.input.carboninputformat.partitionid";
private static final Log LOG = LogFactory.getLog(CarbonInputFormat.class);
private static final Logger LOG =
LogServiceFactory.getLogService(CarbonInputFormat.class.getName());
private static final String FILTER_PREDICATE =
"mapreduce.input.carboninputformat.filter.predicate";
private static final String COLUMN_PROJECTION = "mapreduce.input.carboninputformat.projection";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.List;
import java.util.Map;

import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.DataMapStoreManager;
import org.apache.carbondata.core.datamap.Segment;
import org.apache.carbondata.core.datamap.TableDataMap;
Expand Down Expand Up @@ -60,8 +61,6 @@
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.carbondata.hadoop.CarbonInputSplit;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
Expand All @@ -71,6 +70,7 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.log4j.Logger;

/**
* InputFormat for reading carbondata files with table level metadata support,
Expand All @@ -86,7 +86,8 @@ public class CarbonTableInputFormat<T> extends CarbonInputFormat<T> {
// comma separated list of input files
public static final String INPUT_FILES = "mapreduce.input.carboninputformat.files";
private static final String ALTER_PARTITION_ID = "mapreduce.input.carboninputformat.partitionid";
private static final Log LOG = LogFactory.getLog(CarbonTableInputFormat.class);
private static final Logger LOG =
LogServiceFactory.getLogService(CarbonTableInputFormat.class.getName());
private static final String CARBON_READ_SUPPORT = "mapreduce.input.carboninputformat.readsupport";
private static final String CARBON_CONVERTER = "mapreduce.input.carboninputformat.converter";
private static final String CARBON_TRANSACTIONAL_TABLE =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
import org.apache.carbondata.core.datastore.compression.CompressorFactory;
Expand All @@ -46,15 +47,14 @@
import org.apache.carbondata.processing.loading.model.CarbonLoadModel;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.Logger;

/**
* This is table level output format which writes the data to store in new segment. Each load
Expand Down Expand Up @@ -116,7 +116,8 @@ public class CarbonTableOutputFormat extends FileOutputFormat<NullWritable, Obje
*/
public static final String OPERATION_CONTEXT = "mapreduce.carbontable.operation.context";

private static final Log LOG = LogFactory.getLog(CarbonTableOutputFormat.class);
private static final Logger LOG =
LogServiceFactory.getLogService(CarbonTableOutputFormat.class.getName());

private CarbonOutputCommitter committer;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.carbondata.common.logging.LogServiceFactory;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
Expand All @@ -39,6 +39,7 @@
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.server.HiveServer2;
import org.apache.log4j.Logger;

/**
* Utility starting a local/embedded Hive org.apache.carbondata.hive.server for testing purposes.
Expand All @@ -48,7 +49,7 @@
*/
public class HiveEmbeddedServer2 {
private static final String SCRATCH_DIR = "/tmp/hive";
private static final Log log = LogFactory.getLog(Hive.class);
private static final Logger log = LogServiceFactory.getLogService(Hive.class.getName());
private HiveServer2 hiveServer;
private HiveConf config;
private int port;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,19 @@
import java.util.concurrent.TimeUnit;

import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.util.CarbonProperties;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.Logger;

/**
* It is wrapper class to hold the rows in batches when record writer writes the data and allows
* to iterate on it during data load. It uses blocking queue to coordinate between read and write.
*/
public class CarbonOutputIteratorWrapper extends CarbonIterator<Object[]> {

private static final Log LOG = LogFactory.getLog(CarbonOutputIteratorWrapper.class);
private static final Logger LOG =
LogServiceFactory.getLogService(CarbonOutputIteratorWrapper.class.getName());

private boolean close;

Expand Down

0 comments on commit 02de389

Please sign in to comment.