Skip to content
This repository has been archived by the owner on Jan 29, 2022. It is now read-only.

Commit

Permalink
Debug level logging only if debug logging is enabled.
Browse files Browse the repository at this point in the history
  • Loading branch information
Luke Lovett committed Feb 3, 2015
1 parent 435ec5e commit 73ca504
Show file tree
Hide file tree
Showing 8 changed files with 38 additions and 16 deletions.
4 changes: 3 additions & 1 deletion core/src/main/java/com/mongodb/hadoop/MongoInputFormat.java
Expand Up @@ -54,7 +54,9 @@ public List<InputSplit> getSplits(final JobContext context) throws IOException {
final Configuration conf = context.getConfiguration();
try {
MongoSplitter splitterImpl = MongoSplitterFactory.getSplitter(conf);
LOG.debug("Using " + splitterImpl.toString() + " to calculate splits.");
if (LOG.isDebugEnabled()) {
LOG.debug("Using " + splitterImpl.toString() + " to calculate splits.");
}
return splitterImpl.calculateSplits();
} catch (SplitFailedException spfe) {
throw new IOException(spfe);
Expand Down
Expand Up @@ -84,7 +84,7 @@ public boolean next(final NullWritable key, final BSONWritable value) throws IOE
value.setDoc(bo);

numDocsRead++;
if (numDocsRead % 5000 == 0) {
if (LOG.isDebugEnabled() && numDocsRead % 5000 == 0) {
LOG.debug(String.format("read %d docs from %s at %d", numDocsRead, fileSplit, in.getPos()));
}
return true;
Expand Down
4 changes: 3 additions & 1 deletion core/src/main/java/com/mongodb/hadoop/util/BSONLoader.java
Expand Up @@ -65,7 +65,9 @@ private synchronized void checkHeader() {
nextHdr = l;
hasMore.set(true);
} catch (Exception e) {
LOG.debug("Failed to get next header: " + e, e);
if (LOG.isDebugEnabled()) {
LOG.debug("Failed to get next header: " + e, e);
}
hasMore.set(false);
try {
input.close();
Expand Down
12 changes: 8 additions & 4 deletions core/src/main/java/com/mongodb/hadoop/util/MongoTool.java
Expand Up @@ -115,8 +115,10 @@ private int runMapredJob(final Configuration conf) {
job.setJarByClass(getClass());
final Class<? extends org.apache.hadoop.mapred.Mapper> mapper = MapredMongoConfigUtil.getMapper(conf);

LOG.debug("Mapper Class: " + mapper);
LOG.debug("Input URI: " + conf.get(MapredMongoConfigUtil.INPUT_URI));
if (LOG.isDebugEnabled()) {
LOG.debug("Mapper Class: " + mapper);
LOG.debug("Input URI: " + conf.get(MapredMongoConfigUtil.INPUT_URI));
}
job.setMapperClass(mapper);
Class<? extends org.apache.hadoop.mapred.Reducer> combiner = MapredMongoConfigUtil.getCombiner(conf);
if (combiner != null) {
Expand Down Expand Up @@ -179,8 +181,10 @@ private int runMapReduceJob(final Configuration conf) throws IOException {
job.setJarByClass(getClass());
final Class<? extends Mapper> mapper = MongoConfigUtil.getMapper(conf);

LOG.debug("Mapper Class: " + mapper);
LOG.debug("Input URI: " + conf.get(MongoConfigUtil.INPUT_URI));
if (LOG.isDebugEnabled()) {
LOG.debug("Mapper Class: " + mapper);
LOG.debug("Input URI: " + conf.get(MongoConfigUtil.INPUT_URI));
}
job.setMapperClass(mapper);
Class<? extends Reducer> combiner = MongoConfigUtil.getCombiner(conf);
if (combiner != null) {
Expand Down
Expand Up @@ -32,8 +32,10 @@ public void reduce(final Text pKey, final Iterable<IntWritable> pValues, final C

BasicBSONObject query = new BasicBSONObject("devices", new ObjectId(pKey.toString()));
BasicBSONObject update = new BasicBSONObject("$inc", new BasicBSONObject("logs_count", count));
LOG.debug("query: " + query);
LOG.debug("update: " + update);
if (LOG.isDebugEnabled()) {
LOG.debug("query: " + query);
LOG.debug("update: " + update);
}
pContext.write(null, new MongoUpdateWritable(query, update, true, false));
}

Expand All @@ -47,8 +49,10 @@ public void reduce(final Text key, final Iterator<IntWritable> values, final Out

BasicBSONObject query = new BasicBSONObject("devices", new ObjectId(key.toString()));
BasicBSONObject update = new BasicBSONObject("$inc", new BasicBSONObject("logs_count", count));
LOG.debug("query: " + query);
LOG.debug("update: " + update);
if (LOG.isDebugEnabled()) {
LOG.debug("query: " + query);
LOG.debug("update: " + update);
}
output.collect(null, new MongoUpdateWritable(query, update, true, false));
}

Expand Down
Expand Up @@ -51,7 +51,9 @@ public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValue

final double avg = sum / count;

LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
if (LOG.isDebugEnabled()) {
LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
}

BasicBSONObject output = new BasicBSONObject();
output.put("count", count);
Expand All @@ -73,7 +75,9 @@ public void reduce(final IntWritable key, final Iterator<DoubleWritable> values,

final double avg = sum / count;

LOG.debug("Average 10 Year Treasury for " + key.get() + " was " + avg);
if (LOG.isDebugEnabled()) {
LOG.debug("Average 10 Year Treasury for " + key.get() + " was " + avg);
}

BasicBSONObject bsonObject = new BasicBSONObject();
bsonObject.put("count", count);
Expand Down
Expand Up @@ -51,7 +51,9 @@ public void reduce(final IntWritable pKey,

final double avg = sum / count;

LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
if (LOG.isDebugEnabled()) {
LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
}

BasicBSONObject query = new BasicBSONObject("_id", pKey.get());
BasicBSONObject modifiers = new BasicBSONObject();
Expand Down
8 changes: 6 additions & 2 deletions pig/src/main/java/com/mongodb/hadoop/pig/MongoStorage.java
Expand Up @@ -99,15 +99,19 @@ public void storeStatistics(final ResourceStatistics stats, final String locatio


public void putNext(final Tuple tuple) throws IOException {
LOG.debug("writing " + tuple.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("writing " + tuple.toString());
}
final BasicDBObjectBuilder builder = BasicDBObjectBuilder.start();

ResourceFieldSchema[] fields = this.schema.getFields();
for (int i = 0; i < fields.length; i++) {
writeField(builder, fields[i], tuple.get(i));
}

LOG.debug("writing out:" + builder.get().toString());
if (LOG.isDebugEnabled()) {
LOG.debug("writing out:" + builder.get().toString());
}
//noinspection unchecked
recordWriter.write(null, builder.get());
}
Expand Down

0 comments on commit 73ca504

Please sign in to comment.