Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Separately log file deletions #8662

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -19,30 +19,47 @@

package org.elasticsearch.common.lucene;

import java.util.HashMap;

import org.apache.lucene.util.InfoStream;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId;

/** An InfoStream (for Lucene's IndexWriter) that redirects
* messages to Logger.trace. */
* messages to "lucene.iw.ifd" and "lucene.iw" Logger.trace. */

public final class LoggerInfoStream extends InfoStream {
/** Used for component-specific logging: */

/** Logger for everything */
private final ESLogger logger;

/** Logger for IndexFileDeleter */
private final ESLogger ifdLogger;

public LoggerInfoStream(Settings settings, ShardId shardId) {
logger = Loggers.getLogger("lucene.iw", settings, shardId);
ifdLogger = Loggers.getLogger("lucene.iw.ifd", settings, shardId);
}

public void message(String component, String message) {
logger.trace("{} {}: {}", Thread.currentThread().getName(), component, message);
getLogger(component).trace("{} {}: {}", Thread.currentThread().getName(), component, message);
}

public boolean isEnabled(String component) {
// TP is a special "test point" component; we don't want
// to log it:
return logger.isTraceEnabled() && component.equals("TP") == false;
return getLogger(component).isTraceEnabled() && component.equals("TP") == false;
}

private ESLogger getLogger(String component) {
if (component.equals("IFD")) {
return ifdLogger;
} else {
return logger;
}
}

@Override
Expand Down
Expand Up @@ -806,6 +806,7 @@ public void restore() throws IOException {
for (String storeFile : store.directory().listAll()) {
if (!Store.isChecksum(storeFile) && !snapshot.containPhysicalIndexFile(storeFile)) {
try {
store.logDeleteFile("restore", storeFile);
store.directory().deleteFile(storeFile);
} catch (IOException e) {
// ignore
Expand Down
25 changes: 24 additions & 1 deletion src/main/java/org/elasticsearch/index/store/Store.java
Expand Up @@ -539,8 +539,11 @@ private static final void failIfCorrupted(Directory directory, ShardId shardId)
*/
public final class StoreDirectory extends FilterDirectory {

public final ESLogger deletesLogger;

StoreDirectory(Directory delegateDirectory) throws IOException {
super(delegateDirectory);
deletesLogger = Loggers.getLogger("index.store.deletes", indexSettings, shardId);
}

public ShardId shardId() {
Expand All @@ -553,6 +556,12 @@ public void close() throws IOException {
assert false : "Nobody should close this directory except of the Store itself";
}

@Override
public void deleteFile(String name) throws IOException {
logDeleteFile("StoreDirectory.deleteFile", name);
super.deleteFile(name);
}

private void innerClose() throws IOException {
super.close();
}
Expand All @@ -563,8 +572,22 @@ public String toString() {
}
}

/** Log that we are about to delete this file, to the index.store.deletes component. */
public void logDeleteFile(String message, String fileName) {
logDeleteFile(directory(), message, fileName);
}

/** Log that we are about to delete this file, to the index.store.deletes component. */
public static void logDeleteFile(Directory dir, String message, String fileName) {
assert dir instanceof StoreDirectory;
if (dir instanceof StoreDirectory) {
((StoreDirectory) dir).deletesLogger.trace("{}: delete file {}", message, fileName);
}
// else what to do...?
}

/**
* Represents a snaphshot of the current directory build from the latest Lucene commit.
* Represents a snapshot of the current directory build from the latest Lucene commit.
* Only files that are part of the last commit are considered in this datastrucutre.
* For backwards compatibility the snapshot might include legacy checksums that
* are derived from a dedicated checksum file written by older elasticsearch version pre 1.3
Expand Down
Expand Up @@ -385,6 +385,7 @@ public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel
// don't delete snapshot file, or the checksums file (note, this is extra protection since the Store won't delete checksum)
if (!request.snapshotFiles().contains(existingFile) && !Store.isChecksum(existingFile)) {
try {
store.logDeleteFile("recovery CleanFilesRequestHandler", existingFile);
store.directory().deleteFile(existingFile);
} catch (Exception e) {
// ignore, we don't really care, will get deleted later on
Expand Down
Expand Up @@ -57,6 +57,8 @@ public void testSetIndexCompoundOnFlush() {
private void assertTotalCompoundSegments(int i, int t, String index) {
IndicesSegmentResponse indicesSegmentResponse = client().admin().indices().prepareSegments(index).get();
IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(index);
assertNotNull(indexSegments);
assertNotNull(indexSegments.getShards());
Collection<IndexShardSegments> values = indexSegments.getShards().values();
int compounds = 0;
int total = 0;
Expand Down
Expand Up @@ -1255,27 +1255,30 @@ public void testCreatedFlagAfterFlush() {
}

private static class MockAppender extends AppenderSkeleton {
public boolean sawIndexWriterMessage;

@Override
protected void append(LoggingEvent event) {
if (event.getLevel() == Level.TRACE &&
event.getLoggerName().endsWith("lucene.iw") &&
event.getMessage().toString().contains("IW: apply all deletes during flush") &&
event.getMessage().toString().contains("[index][1] ")) {

sawIndexWriterMessage = true;
public boolean sawIndexWriterMessage;
public boolean sawIndexWriterIFDMessage;

@Override
protected void append(LoggingEvent event) {
if (event.getLevel() == Level.TRACE && event.getMessage().toString().contains("[index][1] ")) {
if (event.getLoggerName().endsWith("lucene.iw") &&
event.getMessage().toString().contains("IW: apply all deletes during flush")) {
sawIndexWriterMessage = true;
}
if (event.getLoggerName().endsWith("lucene.iw.ifd")) {
sawIndexWriterIFDMessage = true;
}
}
}
}

@Override
public boolean requiresLayout() {
return false;
}
@Override
public boolean requiresLayout() {
return false;
}

@Override
public void close() {
}
@Override
public void close() {
}
}

// #5891: make sure IndexWriter's infoStream output is
Expand Down Expand Up @@ -1309,6 +1312,36 @@ public void testIndexWriterInfoStream() {
}
}

// #8603: make sure we can separately log IFD's messages
public void testIndexWriterIFDInfoStream() {
MockAppender mockAppender = new MockAppender();

Logger iwIFDLogger = Logger.getLogger("lucene.iw.ifd");
Level savedLevel = iwIFDLogger.getLevel();
iwIFDLogger.addAppender(mockAppender);
iwIFDLogger.setLevel(Level.DEBUG);

try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
engine.create(new Engine.Create(null, newUid("1"), doc));
engine.flush(new Engine.Flush());
assertFalse(mockAppender.sawIndexWriterMessage);
assertFalse(mockAppender.sawIndexWriterIFDMessage);

// Again, with TRACE, which should only log IndexWriter IFD output:
iwIFDLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(null, newUid("2"), doc));
engine.flush(new Engine.Flush());
assertFalse(mockAppender.sawIndexWriterMessage);
assertTrue(mockAppender.sawIndexWriterIFDMessage);

} finally {
iwIFDLogger.removeAppender(mockAppender);
iwIFDLogger.setLevel(null);
}
}

@Slow
@Test
public void testEnableGcDeletes() throws Exception {
Expand Down