Skip to content

Commit

Permalink
Send Lucene's IndexWriter infoStream messages to Logger lucene.iw, le…
Browse files Browse the repository at this point in the history
…vel=TRACE

Lucene's IndexWriter logs many low-level details to its infoStream
which can be helpful for diagnosing; with this change, if you enable
TRACE logging for the "lucene.iw" logger name then IndexWriter's
infoStream output will be captured and can later be scrutinized
e.g. using https://code.google.com/a/apache-extras.org/p/luceneutil/source/browse/src/python/iwLogToGraphs.py
to generate graphs like http://people.apache.org/~mikemccand/lucenebench/iw.html

Closes #5891
  • Loading branch information
mikemccand committed Apr 24, 2014
1 parent 9a68e60 commit ba73877
Show file tree
Hide file tree
Showing 5 changed files with 84 additions and 57 deletions.
5 changes: 5 additions & 0 deletions src/main/java/org/elasticsearch/common/logging/Loggers.java
Expand Up @@ -62,6 +62,11 @@ public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId
return getLogger(clazz, settings, shardId.index(), Lists.asList(Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
}

/** Just like {@link #getLogger(Class,Settings,ShardId,String...)} but String loggerName instead of Class. */
public static ESLogger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) {
return getLogger(loggerName, settings, Lists.asList(shardId.index().name(), Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
}

public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) {
return getLogger(clazz, settings, Lists.asList(SPACE, index.name(), prefixes).toArray(new String[0]));
}
Expand Down
73 changes: 17 additions & 56 deletions src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java
Expand Up @@ -19,72 +19,33 @@

package org.elasticsearch.common.lucene;

import org.elasticsearch.common.io.Streams;
import org.apache.lucene.util.InfoStream;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId;

import com.google.common.base.Charsets;
/** An InfoStream (for Lucene's IndexWriter) that redirects
* messages to Logger.trace. */

import java.io.OutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;

/**
* A {@link java.io.PrintStream} that logs each {@link #println(String)} into a logger
* under trace level.
* <p/>
* <p>Provides also factory methods that basically append to the logger name provide the
* {@link #SUFFIX}.
*
*
*/
public class LoggerInfoStream extends PrintStream {

public static final String SUFFIX = ".lucene";
public final class LoggerInfoStream extends InfoStream {
private final ESLogger logger;

/**
* Creates a new {@link LoggerInfoStream} based on the provided logger
* by appending to its <tt>NAME</tt> the {@link #SUFFIX}.
*/
public static LoggerInfoStream getInfoStream(ESLogger logger) {
try {
return new LoggerInfoStream(Loggers.getLogger(logger, SUFFIX));
} catch (UnsupportedEncodingException e) {
// no UTF-8 ?
throw new RuntimeException(e);
}
public LoggerInfoStream(Settings settings, ShardId shardId) {
logger = Loggers.getLogger("lucene.iw", settings, shardId);
}

/**
* Creates a new {@link LoggerInfoStream} based on the provided name
* by appending to it the {@link #SUFFIX}.
*/
public static LoggerInfoStream getInfoStream(String name) {
try {
return new LoggerInfoStream(Loggers.getLogger(name + SUFFIX));
} catch (UnsupportedEncodingException e) {
// no UTF-8 ?
throw new RuntimeException(e);
}
public void message(String component, String message) {
logger.trace("{}: {}", component, message);
}

private final ESLogger logger;

/**
* Constucts a new instance based on the provided logger. Will output
* each {@link #println(String)} operation as a trace level.
* @throws UnsupportedEncodingException
*/
public LoggerInfoStream(ESLogger logger) throws UnsupportedEncodingException {
super((OutputStream) null, false, Charsets.UTF_8.name());
this.logger = logger;

public boolean isEnabled(String component) {
// TP is a special "test point" component; we don't want
// to log it:
return logger.isTraceEnabled() && component.equals("TP") == false;
}

/**
* Override only the method Lucene actually uses.
*/
@Override
public void println(String x) {
logger.trace(x);
public void close() {
}
}
Expand Up @@ -40,6 +40,7 @@
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.HashedBytesRef;
import org.elasticsearch.common.lucene.LoggerInfoStream;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.SegmentReaderUtils;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
Expand Down Expand Up @@ -1228,6 +1229,7 @@ private IndexWriter createWriter() throws IOException {
IndexWriterConfig config = new IndexWriterConfig(Lucene.VERSION, analysisService.defaultIndexAnalyzer());
config.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND);
config.setIndexDeletionPolicy(deletionPolicy);
config.setInfoStream(new LoggerInfoStream(indexSettings, shardId));
config.setMergeScheduler(mergeScheduler.newMergeScheduler());
MergePolicy mergePolicy = mergePolicyProvider.newMergePolicy();
// Give us the opportunity to upgrade old segments while performing
Expand Down
Expand Up @@ -32,7 +32,7 @@ public class LuceneTest {


/*
* simple test that ensures that we bumb the version on Upgrade
* simple test that ensures that we bump the version on Upgrade
*/
@Test
public void testVersion() {
Expand Down
Expand Up @@ -19,6 +19,11 @@

package org.elasticsearch.index.engine.internal;

import org.apache.log4j.Appender;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
Expand Down Expand Up @@ -1133,6 +1138,60 @@ public void testCreatedFlagAfterFlush() {
assertTrue(index.created());
}

private static class MockAppender extends AppenderSkeleton {
public boolean sawIndexWriterMessage;

@Override
protected void append(LoggingEvent event) {
if (event.getLevel() == Level.TRACE &&
event.getLoggerName().equals("lucene.iw") &&
event.getMessage().toString().contains("IW: apply all deletes during flush") &&
event.getMessage().toString().contains("[index][1] ")) {
sawIndexWriterMessage = true;
}
}

@Override
public boolean requiresLayout() {
return false;
}

@Override
public void close() {
}
}

// #5891: make sure IndexWriter's infoStream output is
// sent to lucene.iw with log level TRACE:

@Test
public void testIndexWriterInfoStream() {
MockAppender mockAppender = new MockAppender();

Logger rootLogger = Logger.getRootLogger();
Level savedLevel = rootLogger.getLevel();
rootLogger.addAppender(mockAppender);
rootLogger.setLevel(Level.DEBUG);

try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(null, newUid("1"), doc));
engine.flush(new Engine.Flush());
assertFalse(mockAppender.sawIndexWriterMessage);

// Again, with TRACE, which should log IndexWriter output:
rootLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(null, newUid("2"), doc));
engine.flush(new Engine.Flush());
assertTrue(mockAppender.sawIndexWriterMessage);

} finally {
rootLogger.removeAppender(mockAppender);
rootLogger.setLevel(savedLevel);
}
}

protected Term newUid(String id) {
return new Term("_uid", id);
}
Expand Down

0 comments on commit ba73877

Please sign in to comment.