Skip to content

Commit

Permalink
Implementation of tryMappedPagedFile
Browse files Browse the repository at this point in the history
Remove EXCLUSIVE map option for PageCache
  • Loading branch information
burqen committed Aug 18, 2016
1 parent c08d2e5 commit a58e93a
Show file tree
Hide file tree
Showing 8 changed files with 42 additions and 130 deletions.
Expand Up @@ -51,9 +51,6 @@ public interface PageCache extends AutoCloseable
* the {@link StandardOpenOption#TRUNCATE_EXISTING} will truncate any existing file <em>iff</em> it has not already * the {@link StandardOpenOption#TRUNCATE_EXISTING} will truncate any existing file <em>iff</em> it has not already
* been mapped. * been mapped.
* The {@link StandardOpenOption#DELETE_ON_CLOSE} will cause the file to be deleted after the last unmapping. * The {@link StandardOpenOption#DELETE_ON_CLOSE} will cause the file to be deleted after the last unmapping.
* The {@link PageCacheOpenOptions#EXCLUSIVE} will cause the {@code map} method to throw if the file is already
* mapped. Otherwise, the file will be mapped exclusively, and subsequent attempts at mapping the file will fail
* with an exception until the exclusively mapped file is closed.
* All other options are either silently ignored, or will cause an exception to be thrown. * All other options are either silently ignored, or will cause an exception to be thrown.
* @throws java.nio.file.NoSuchFileException if the given file does not exist, and the * @throws java.nio.file.NoSuchFileException if the given file does not exist, and the
* {@link StandardOpenOption#CREATE} option was not specified. * {@link StandardOpenOption#CREATE} option was not specified.
Expand All @@ -70,11 +67,13 @@ public interface PageCache extends AutoCloseable
* {@link #map(File, int, OpenOption...)}. * {@link #map(File, int, OpenOption...)}.
* If no mapping exist for this file, then returned {@link Optional} will report {@link Optional#isPresent()} * If no mapping exist for this file, then returned {@link Optional} will report {@link Optional#isPresent()}
* false. * false.
* <p>
* NOTE! User is responsible for closing the returned paged file.
* *
* @param file The file to try to get the mapped paged file for. * @param file The file to try to get the mapped paged file for.
* @return {@link Optional} containing the {@link PagedFile} mapped by this {@link PageCache} for given file, or an * @return {@link Optional} containing the {@link PagedFile} mapped by this {@link PageCache} for given file, or an
* empty {@link Optional} if no mapping exist. * empty {@link Optional} if no mapping exist.
* @throws IOException // TODO * @throws IOException if page cache has been closed or page eviction problems occur.
*/ */
Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException; Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException;


Expand Down
Expand Up @@ -28,10 +28,6 @@
*/ */
public enum PageCacheOpenOptions implements OpenOption public enum PageCacheOpenOptions implements OpenOption
{ {
/**
* Only allow a single mapping of the given file.
*/
EXCLUSIVE,
/** /**
* Map the file even if the specified file page size conflicts with an existing mapping of that file. * Map the file even if the specified file page size conflicts with an existing mapping of that file.
* If so, the given file page size will be ignored and a {@link PagedFile} will be returned that uses the * If so, the given file page size will be ignored and a {@link PagedFile} will be returned that uses the
Expand Down
Expand Up @@ -285,7 +285,6 @@ public synchronized PagedFile map( File file, int filePageSize, OpenOption... op
boolean createIfNotExists = false; boolean createIfNotExists = false;
boolean truncateExisting = false; boolean truncateExisting = false;
boolean deleteOnClose = false; boolean deleteOnClose = false;
boolean exclusiveMapping = false;
boolean anyPageSize = false; boolean anyPageSize = false;
for ( OpenOption option : openOptions ) for ( OpenOption option : openOptions )
{ {
Expand All @@ -301,10 +300,6 @@ else if ( option.equals( StandardOpenOption.DELETE_ON_CLOSE ) )
{ {
deleteOnClose = true; deleteOnClose = true;
} }
else if ( option.equals( PageCacheOpenOptions.EXCLUSIVE ) )
{
exclusiveMapping = true;
}
else if ( option.equals( PageCacheOpenOptions.ANY_PAGE_SIZE ) ) else if ( option.equals( PageCacheOpenOptions.ANY_PAGE_SIZE ) )
{ {
anyPageSize = true; anyPageSize = true;
Expand Down Expand Up @@ -336,19 +331,6 @@ else if ( !ignoredOpenOptions.contains( option ) )
{ {
throw new UnsupportedOperationException( "Cannot truncate a file that is already mapped" ); throw new UnsupportedOperationException( "Cannot truncate a file that is already mapped" );
} }
if ( exclusiveMapping || pagedFile.isExclusiveMapping() )
{
String msg;
if ( exclusiveMapping )
{
msg = "Cannot exclusively map file because it is already mapped: " + file;
}
else
{
msg = "Cannot map file because it is already exclusively mapped: " + file;
}
throw new IOException( msg );
}
pagedFile.incrementRefCount(); pagedFile.incrementRefCount();
pagedFile.markDeleteOnClose( deleteOnClose ); pagedFile.markDeleteOnClose( deleteOnClose );
return pagedFile; return pagedFile;
Expand All @@ -371,8 +353,7 @@ else if ( !ignoredOpenOptions.contains( option ) )
swapperFactory, swapperFactory,
tracer, tracer,
createIfNotExists, createIfNotExists,
truncateExisting, truncateExisting );
exclusiveMapping );
pagedFile.incrementRefCount(); pagedFile.incrementRefCount();
pagedFile.markDeleteOnClose( deleteOnClose ); pagedFile.markDeleteOnClose( deleteOnClose );
current = new FileMapping( file, pagedFile ); current = new FileMapping( file, pagedFile );
Expand All @@ -383,10 +364,29 @@ else if ( !ignoredOpenOptions.contains( option ) )
} }


@Override @Override
public Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException public synchronized Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException
{ {
// TODO assertHealthy();
return null; ensureThreadsInitialised();

file = file.getCanonicalFile();

FileMapping current = mappedFiles;

// find an existing mapping
while ( current != null )
{
if ( current.file.equals( file ) )
{
MuninnPagedFile pagedFile = current.pagedFile;
pagedFile.incrementRefCount();
return Optional.of( current.pagedFile );
}
current = current.next;
}

// no mapping exists
return Optional.empty();
} }


/** /**
Expand Down
Expand Up @@ -66,7 +66,6 @@ final class MuninnPagedFile implements PagedFile, Flushable


final PageSwapper swapper; final PageSwapper swapper;
private final CursorPool cursorPool; private final CursorPool cursorPool;
private final boolean exclusiveMapping;


// Guarded by the monitor lock on MuninnPageCache (map and unmap) // Guarded by the monitor lock on MuninnPageCache (map and unmap)
private boolean deleteOnClose; private boolean deleteOnClose;
Expand Down Expand Up @@ -95,14 +94,12 @@ final class MuninnPagedFile implements PagedFile, Flushable
PageSwapperFactory swapperFactory, PageSwapperFactory swapperFactory,
PageCacheTracer tracer, PageCacheTracer tracer,
boolean createIfNotExists, boolean createIfNotExists,
boolean truncateExisting, boolean truncateExisting ) throws IOException
boolean exclusiveMapping ) throws IOException
{ {
this.pageCache = pageCache; this.pageCache = pageCache;
this.filePageSize = filePageSize; this.filePageSize = filePageSize;
this.cursorPool = new CursorPool( this ); this.cursorPool = new CursorPool( this );
this.tracer = tracer; this.tracer = tracer;
this.exclusiveMapping = exclusiveMapping;


// The translation table is an array of arrays of references to either null, MuninnPage objects, or Latch // The translation table is an array of arrays of references to either null, MuninnPage objects, or Latch
// objects. The table only grows the outer array, and all the inner "chunks" all stay the same size. This // objects. The table only grows the outer array, and all the inner "chunks" all stay the same size. This
Expand Down Expand Up @@ -432,11 +429,6 @@ void increaseLastPageIdTo( long newLastPageId )
&& !UnsafeUtil.compareAndSwapLong( this, headerStateOffset, current, update ) ); && !UnsafeUtil.compareAndSwapLong( this, headerStateOffset, current, update ) );
} }


boolean isExclusiveMapping()
{
return exclusiveMapping;
}

/** /**
* Atomically increment the reference count for this mapped file. * Atomically increment the reference count for this mapped file.
*/ */
Expand Down
Expand Up @@ -71,8 +71,13 @@ public PagedFile map( File file, int pageSize, OpenOption... openOptions ) throw
@Override @Override
public Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException public Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException
{ {
// TODO adversary.injectFailure( IOException.class, SecurityException.class );
return null; final Optional<PagedFile> optional = delegate.tryMappedPagedFile( file );
if ( optional.isPresent() )
{
return Optional.of( new AdversarialPagedFile( optional.get(), adversary ) );
}
return optional;
} }


@Override @Override
Expand Down
Expand Up @@ -41,8 +41,7 @@ public PagedFile map( File file, int pageSize, OpenOption... openOptions ) throw
@Override @Override
public Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException public Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException
{ {
// TODO return delegate.tryMappedPagedFile( file );
return null;
} }


public int pageSize() public int pageSize()
Expand Down
Expand Up @@ -67,7 +67,6 @@


import static java.lang.Long.toHexString; import static java.lang.Long.toHexString;
import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
Expand Down Expand Up @@ -1042,12 +1041,14 @@ public void writeToPreviouslyBoundCursorAfterNextReturnsFalseMustThrow() throws
public void tryMappedPagedFileShouldReportMappedFilePresent() throws Exception public void tryMappedPagedFileShouldReportMappedFilePresent() throws Exception
{ {
PageCache cache = getPageCache( fs, maxPages, pageCachePageSize, PageCacheTracer.NULL ); PageCache cache = getPageCache( fs, maxPages, pageCachePageSize, PageCacheTracer.NULL );
final File file = file( "file" ); final File file = file( "a" );
try ( PagedFile pf = cache.map( file, filePageSize ) ) try ( PagedFile pf = cache.map( file, filePageSize ) )
{ {
final Optional<PagedFile> mappedPagedFile = cache.tryMappedPagedFile( file ); final Optional<PagedFile> optional = cache.tryMappedPagedFile( file );
assertTrue( mappedPagedFile.isPresent() ); assertTrue( optional.isPresent() );
assertThat( mappedPagedFile.get(), sameInstance( pf ) ); final PagedFile actual = optional.get();
assertThat( actual, sameInstance( pf ) );
actual.close();
} }
} }


Expand Down Expand Up @@ -3688,85 +3689,6 @@ public void fileMappedWithDeleteOnCloseMustNotExistAfterLastUnmap() throws Excep
pageCache.map( file, filePageSize ); pageCache.map( file, filePageSize );
} }


@Test
public void mustAllowMappingFileWithExclusiveOpenOption() throws Exception
{
getPageCache( fs, maxPages, pageCachePageSize, PageCacheTracer.NULL );
try ( PagedFile pf = pageCache.map( file( "a" ), filePageSize, PageCacheOpenOptions.EXCLUSIVE );
PageCursor cursor = pf.io( 0, PF_SHARED_WRITE_LOCK ))
{
assertTrue( cursor.next() );
}
}

@Test( expected = IOException.class )
public void mustThrowWhenMappingFileAlreadyMappedWithExclusive() throws Exception
{
getPageCache( fs, maxPages, pageCachePageSize, PageCacheTracer.NULL );
try ( PagedFile ignore = pageCache.map( file( "a" ), filePageSize, PageCacheOpenOptions.EXCLUSIVE ) )
{
pageCache.map( file( "a" ), filePageSize );
fail( "mapping should have thrown" );
}
}

@Test( expected = IOException.class )
public void mustThrowWhenExclusivelyMappingAlreadyMappedFile() throws Exception
{
getPageCache( fs, maxPages, pageCachePageSize, PageCacheTracer.NULL );
try ( PagedFile ignore = pageCache.map( file( "a" ), filePageSize ) )
{
pageCache.map( file( "a" ), filePageSize, PageCacheOpenOptions.EXCLUSIVE );
fail( "mapping should have thrown" );
}
}

@Test
public void mappingExclusivityMustApplyOnCanonicalPath() throws Exception
{
getPageCache( fs, maxPages, pageCachePageSize, PageCacheTracer.NULL );
File dir = new File( file( "a" ).getParent(), "dir" );
fs.mkdirs( dir );
File base = new File( dir, "a" );
File abs1 = new File( base.getParentFile(), "a" );
File abs2 = abs1.getAbsoluteFile();
File abs3 = new File( base, "b" ).getParentFile();
File abs4 = new File( base.getParent() + File.separator + ".." + File.separator + "dir" + File.separator + "a" );
File abs5 = abs4.getAbsoluteFile();

try ( PagedFile ignore = pageCache.map( base, filePageSize,
PageCacheOpenOptions.EXCLUSIVE, StandardOpenOption.CREATE ) )
{
assertCannotMap( abs1 );
assertCannotMap( abs2 );
assertCannotMap( abs3 );
assertCannotMap( abs4 );
assertCannotMap( abs5 );
}
}

private void assertCannotMap( File file )
{
try
{
pageCache.map( file, filePageSize, PageCacheOpenOptions.ANY_PAGE_SIZE, StandardOpenOption.CREATE ).close();
fail( "Should not have been able to map file: " + file );
}
catch ( IOException e )
{
try
{
String message = "Cannot map file because it is already exclusively mapped";
assertThat( e.getMessage(), containsString( message ) );
}
catch ( AssertionError assertionError )
{
assertionError.addSuppressed( e );
throw assertionError;
}
}
}

@Test @Test
public void mustNotThrowWhenMappingFileWithDifferentFilePageSizeAndAnyPageSizeIsSpecified() throws Exception public void mustNotThrowWhenMappingFileWithDifferentFilePageSizeAndAnyPageSizeIsSpecified() throws Exception
{ {
Expand Down
Expand Up @@ -71,8 +71,7 @@ public PagedFile map( File file, int pageSize, OpenOption... openOptions ) throw
@Override @Override
public Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException public Optional<PagedFile> tryMappedPagedFile( File file ) throws IOException
{ {
// TODO return delegate.tryMappedPagedFile( file );
return null;
} }


@Override @Override
Expand Down

0 comments on commit a58e93a

Please sign in to comment.