Skip to content

Commit

Permalink
Introduce synonyms for empty config constructor to clarify usage
Browse files Browse the repository at this point in the history
  • Loading branch information
benbc committed Feb 29, 2016
1 parent 2b38ddb commit baa6285
Show file tree
Hide file tree
Showing 75 changed files with 181 additions and 158 deletions.
Expand Up @@ -434,7 +434,7 @@ public void shouldNotReportIndexInconsistenciesIfIndexIsFailed() throws Exceptio
IndexRule rule = rules.next();
IndexDescriptor descriptor = new IndexDescriptor( rule.getLabel(), rule.getPropertyKey() );
IndexConfiguration indexConfig = IndexConfiguration.NON_UNIQUE;
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( new Config() );
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( Config.empty() );
IndexPopulator populator =
storeAccess.indexes().getPopulator( rule.getId(), descriptor, indexConfig, samplingConfig );
populator.markAsFailed( "Oh noes! I was a shiny index and then I was failed" );
Expand Down Expand Up @@ -541,7 +541,7 @@ protected void transactionData( GraphStoreFixture.TransactionDataBuilder tx,
public void shouldReportNodesThatAreNotIndexed() throws Exception
{
// given
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( new Config() );
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( Config.empty() );
for ( IndexRule indexRule : loadAllIndexRules( fixture.directStoreAccess().nativeStores().getSchemaStore() ) )
{
IndexAccessor accessor = fixture.directStoreAccess().indexes().getOnlineAccessor(
Expand All @@ -565,7 +565,7 @@ public void shouldReportNodesWithDuplicatePropertyValueInUniqueIndex() throws Ex
{
// given
IndexConfiguration indexConfig = IndexConfiguration.NON_UNIQUE;
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( new Config() );
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( Config.empty() );
for ( IndexRule indexRule : loadAllIndexRules( fixture.directStoreAccess().nativeStores().getSchemaStore() ) )
{
IndexAccessor accessor = fixture.directStoreAccess()
Expand Down
Expand Up @@ -149,7 +149,7 @@ public void shouldImportCsvData() throws Exception
ExecutionMonitor processorAssigner = eagerRandomSaturation( config.maxNumberOfProcessors() );
final BatchImporter inserter = new ParallelBatchImporter( directory.graphDbDir(),
new DefaultFileSystemAbstraction(), config, NullLogService.getInstance(),
processorAssigner, EMPTY, new Config() );
processorAssigner, EMPTY, Config.empty() );

boolean successful = false;
IdGroupDistribution groups = new IdGroupDistribution( NODE_COUNT, 5, random.random() );
Expand Down
Expand Up @@ -475,7 +475,7 @@ public void shouldNotReportIndexInconsistenciesIfIndexIsFailed() throws Exceptio
IndexRule rule = rules.next();
IndexDescriptor descriptor = new IndexDescriptor( rule.getLabel(), rule.getPropertyKey() );
IndexConfiguration indexConfig = IndexConfiguration.NON_UNIQUE;
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( new Config() );
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( Config.empty() );
IndexPopulator populator =
storeAccess.indexes().getPopulator( rule.getId(), descriptor, indexConfig, samplingConfig );
populator.markAsFailed( "Oh noes! I was a shiny index and then I was failed" );
Expand Down Expand Up @@ -582,7 +582,7 @@ protected void transactionData( GraphStoreFixture.TransactionDataBuilder tx,
public void shouldReportNodesThatAreNotIndexed() throws Exception
{
// given
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( new Config() );
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( Config.empty() );
for ( IndexRule indexRule : loadAllIndexRules( fixture.directStoreAccess().nativeStores().getSchemaStore() ) )
{
IndexAccessor accessor = fixture.directStoreAccess().indexes().getOnlineAccessor(
Expand All @@ -606,7 +606,7 @@ public void shouldReportNodesWithDuplicatePropertyValueInUniqueIndex() throws Ex
{
// given
IndexConfiguration indexConfig = IndexConfiguration.NON_UNIQUE;
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( new Config() );
IndexSamplingConfig samplingConfig = new IndexSamplingConfig( Config.empty() );
for ( IndexRule indexRule : loadAllIndexRules( fixture.directStoreAccess().nativeStores().getSchemaStore() ) )
{
IndexAccessor accessor = fixture.directStoreAccess()
Expand Down
Expand Up @@ -19,12 +19,6 @@
*/
package org.neo4j.unsafe.impl.batchimport;

import org.apache.commons.lang3.mutable.MutableLong;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;

import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
Expand All @@ -40,6 +34,12 @@
import java.util.Set;
import java.util.UUID;

import org.apache.commons.lang3.mutable.MutableLong;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;

import org.neo4j.consistency.ConsistencyCheckService;
import org.neo4j.consistency.ConsistencyCheckService.Result;
import org.neo4j.consistency.checking.full.ConsistencyCheckIncompleteException;
Expand Down Expand Up @@ -72,6 +72,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;

import static org.neo4j.helpers.collection.IteratorUtil.asSet;
import static org.neo4j.helpers.collection.MapUtil.stringMap;
import static org.neo4j.unsafe.impl.batchimport.AdditionalInitialIds.EMPTY;
Expand Down Expand Up @@ -151,7 +152,7 @@ public void shouldImportCsvData() throws Exception
ExecutionMonitor processorAssigner = eagerRandomSaturation( config.maxNumberOfProcessors() );
final BatchImporter inserter = new ParallelBatchImporter( directory.graphDbDir(),
new DefaultFileSystemAbstraction(), config, NullLogService.getInstance(),
processorAssigner, EMPTY, new Config() );
processorAssigner, EMPTY, Config.empty() );

boolean successful = false;
IdGroupDistribution groups = new IdGroupDistribution( NODE_COUNT, 5, random.random() );
Expand Down
Expand Up @@ -196,7 +196,7 @@ class LazyTest extends ExecutionEngineFunSuite {
val dependencies = mock[DependencyResolver]
val bridge = mock[ThreadToStatementContextBridge]
val monitors = new org.neo4j.kernel.monitoring.Monitors()
val config = new Config()
val config = Config.empty()

val fakeDataStatement = mock[OperationsFacade]
val fakeReadStatement = mock[ReadOperations]
Expand Down
Expand Up @@ -74,6 +74,7 @@
import static org.neo4j.helpers.Exceptions.launderedException;
import static org.neo4j.helpers.Format.bytes;
import static org.neo4j.helpers.Strings.TAB;
import static org.neo4j.kernel.configuration.Config.defaults;
import static org.neo4j.kernel.impl.util.Converters.withDefault;
import static org.neo4j.unsafe.impl.batchimport.Configuration.BAD_FILE_NAME;
import static org.neo4j.unsafe.impl.batchimport.input.Collectors.badCollector;
Expand Down Expand Up @@ -430,7 +431,7 @@ public static void main( String[] incomingArguments, boolean defaultSettingsSuit

private static Config loadDbConfig( File file ) throws IOException
{
return file != null && file.exists() ? new Config( MapUtil.load( file ) ) : new Config();
return file != null && file.exists() ? new Config( MapUtil.load( file ) ) : Config.defaults();
}

private static void printOverview( File storeDir, Collection<Option<File[]>> nodesFiles,
Expand Down
Expand Up @@ -86,7 +86,7 @@ public static void main( String[] arguments ) throws IOException
COMMAS, nodeCount, relationshipCount, new Groups(), idType, labelCount, relationshipTypeCount,
silentBadCollector( 0 ));
BatchImporter importer = new ParallelBatchImporter( dir, DEFAULT,
new SimpleLogService( sysoutLogProvider, sysoutLogProvider ), defaultVisible(), new Config() );
new SimpleLogService( sysoutLogProvider, sysoutLogProvider ), defaultVisible(), Config.defaults() );
importer.doImport( input );
}

Expand Down
Expand Up @@ -58,7 +58,7 @@
public class Config implements DiagnosticsProvider, Configuration
{
private final List<ConfigurationChangeListener> listeners = new CopyOnWriteArrayList<>();
private final Map<String, String> params = new ConcurrentHashMap<>( );
private final Map<String, String> params = new ConcurrentHashMap<>();
private final ConfigValues settingsFunction;

// Messages to this log get replayed into a real logger once logging has been
Expand All @@ -70,6 +70,16 @@ public class Config implements DiagnosticsProvider, Configuration
private ConfigurationMigrator migrator;
private ConfigurationValidator validator;

public static Config empty()
{
return new Config();
}

public static Config defaults()
{
return new Config();
}

public Config()
{
this( new HashMap<>(), Collections.<Class<?>>emptyList() );
Expand All @@ -94,6 +104,7 @@ public Config( Map<String, String> inputParams, Iterable<Class<?>> settingsClass

/**
* Returns a copy of this config with the given modifications.
*
* @return a new modified config, leaves this config unchanged.
*/
public Config with( Map<String, String> additionalConfig )
Expand Down Expand Up @@ -125,19 +136,20 @@ public <T> T get( Setting<T> setting )
* the raw setting data, meaning it can provide functionality that cross multiple settings
* and other more advanced use cases.
*/
public <T> T view( Function<ConfigValues,T> projection )
public <T> T view( Function<ConfigValues, T> projection )
{
return projection.apply( settingsFunction );
}

/**
* Augment the existing config with new settings, overriding any conflicting settings, but keeping all old
* non-overlapping ones.
*
* @param changes settings to add and override
*/
public Config augment( Map<String,String> changes )
public Config augment( Map<String, String> changes )
{
Map<String,String> params = getParams();
Map<String, String> params = getParams();
params.putAll( changes );
applyChanges( params );
return this;
Expand Down Expand Up @@ -204,7 +216,9 @@ public synchronized Config applyChanges( Map<String, String> newConfiguration )
return this;
}

/** Add more settings classes. */
/**
* Add more settings classes.
*/
public Config registerSettingsClasses( Iterable<Class<?>> settingsClasses )
{
this.settingsClasses = Iterables.concat( settingsClasses, this.settingsClasses );
Expand Down Expand Up @@ -281,24 +295,27 @@ public String toString()
}

/**
* This mechanism can be used as an argument to {@link #view(Function)} to view a set of config options that share a common base config key as a group.
* This specific version handles multiple groups, so the common base key should be followed by a number denoting the group, followed by the group config
* This mechanism can be used as an argument to {@link #view(Function)} to view a set of config options that
* share a common base config key as a group.
* This specific version handles multiple groups, so the common base key should be followed by a number denoting
* the group, followed by the group config
* values, eg:
*
* <p>
* {@code <base name>.<group key>.<config key>}
*
* The config of each group can then be accessed as if the {@code config key} in the pattern above was the entire config key. For example, given the
* <p>
* The config of each group can then be accessed as if the {@code config key} in the pattern above was the entire
* config key. For example, given the
* following configuration:
*
* <p>
* <pre>
* dbms.books.0.name=Hansel & Gretel
* dbms.books.0.author=JJ Abrams
* dbms.books.1.name=NKJV
* dbms.books.1.author=Jesus
* </pre>
*
* <p>
* We can then access these config values as groups:
*
* <p>
* <pre>
* {@code
* Setting<String> bookName = setting("name", STRING); // note that the key here is only 'name'
Expand All @@ -309,27 +326,28 @@ public String toString()
* }
* </pre>
*
* @param baseName the base name for the groups, this will be the first part of the config key, followed by a grouping number, followed by the group
* @param baseName the base name for the groups, this will be the first part of the config key, followed by a
* grouping number, followed by the group
* config options
* @return a list of grouped config options
*/
public static Function<ConfigValues,List<Configuration>> groups( String baseName )
public static Function<ConfigValues, List<Configuration>> groups( String baseName )
{
Pattern pattern = Pattern.compile( Pattern.quote( baseName ) + "\\.(\\d+)\\.(.+)" );

return ( values ) -> {
Map<String,Map<String,String>> groups = new HashMap<>();
for ( Pair<String,String> entry : values.rawConfiguration() )
Map<String, Map<String, String>> groups = new HashMap<>();
for ( Pair<String, String> entry : values.rawConfiguration() )
{
Matcher matcher = pattern.matcher( entry.first() );

if( matcher.matches() )
if ( matcher.matches() )
{
String index = matcher.group( 1 );
String configName = matcher.group( 2 );
String value = entry.other();

Map<String,String> groupConfig = groups.get( index );
Map<String, String> groupConfig = groups.get( index );
if ( groupConfig == null )
{
groupConfig = new HashMap<>();
Expand All @@ -339,7 +357,7 @@ public static Function<ConfigValues,List<Configuration>> groups( String baseName
}
}

Function<Map<String,String>,Configuration> mapper = m -> new Configuration()
Function<Map<String, String>, Configuration> mapper = m -> new Configuration()
{
@Override
public <T> T get( Setting<T> setting )
Expand Down
Expand Up @@ -43,7 +43,7 @@ private StandalonePageCacheFactory()

public static PageCache createPageCache( FileSystemAbstraction fileSystem )
{
return createPageCache( fileSystem, new Config() );
return createPageCache( fileSystem, Config.defaults() );
}

public static PageCache createPageCache(
Expand Down
Expand Up @@ -79,7 +79,7 @@ public StoreAccess( PageCache pageCache, File storeDir )

public StoreAccess( FileSystemAbstraction fileSystem, PageCache pageCache, File storeDir )
{
this( fileSystem, pageCache, storeDir, new Config() );
this( fileSystem, pageCache, storeDir, Config.defaults() );
}

private StoreAccess( FileSystemAbstraction fileSystem, PageCache pageCache, File storeDir, Config config )
Expand Down
Expand Up @@ -76,7 +76,7 @@ public StoreFactory()

public StoreFactory( FileSystemAbstraction fileSystem, File storeDir, PageCache pageCache, LogProvider logProvider )
{
this( storeDir, new Config(), new DefaultIdGeneratorFactory( fileSystem ), pageCache, fileSystem,
this( storeDir, Config.defaults(), new DefaultIdGeneratorFactory( fileSystem ), pageCache, fileSystem,
logProvider );
}

Expand Down
Expand Up @@ -52,7 +52,7 @@ public IndexLookup( SchemaStore store, SchemaIndexProvider schemaIndexProvider )
indexAccessors = new ArrayList<>();
readerCache = new HashMap<>();
indexRuleIndex = buildIndexRuleIndex( store );
samplingConfig = new IndexSamplingConfig( new Config() );
samplingConfig = new IndexSamplingConfig( Config.defaults() );
}

private PrimitiveIntObjectMap<List<IndexRule>> buildIndexRuleIndex( SchemaStore schemaStore )
Expand Down
Expand Up @@ -41,7 +41,7 @@ public class GraphDatabaseSettingsTest
@Test
public void mustHaveReasonableDefaultPageCacheMemorySizeInBytes() throws Exception
{
long bytes = new Config().get( GraphDatabaseSettings.pagecache_memory );
long bytes = Config.defaults().get( GraphDatabaseSettings.pagecache_memory );
assertThat( bytes, greaterThanOrEqualTo( ByteUnit.mebiBytes( 32 ) ) );
assertThat( bytes, lessThanOrEqualTo( ByteUnit.tebiBytes( 1 ) ) );
}
Expand All @@ -58,7 +58,7 @@ public void pageCacheSettingMustAcceptArbitraryUserSpecifiedValue() throws Excep
@Test( expected = InvalidSettingException.class )
public void pageCacheSettingMustRejectOverlyConstrainedMemorySetting() throws Exception
{
long pageSize = new Config().get( GraphDatabaseSettings.mapped_memory_page_size );
long pageSize = Config.defaults().get( GraphDatabaseSettings.mapped_memory_page_size );
Setting<Long> setting = GraphDatabaseSettings.pagecache_memory;
String name = setting.name();
// We configure the page cache to have one byte less than two pages worth of memory. This must throw:
Expand Down
Expand Up @@ -59,7 +59,7 @@ public IndexAccessorCompatibility( IndexProviderCompatibilityTestSuite testSuite
public void before() throws Exception
{
IndexConfiguration indexConfig = IndexConfiguration.of( isUnique );
IndexSamplingConfig indexSamplingConfig = new IndexSamplingConfig( new Config() );
IndexSamplingConfig indexSamplingConfig = new IndexSamplingConfig( Config.empty() );
IndexPopulator populator = indexProvider.getPopulator( 17, descriptor, indexConfig, indexSamplingConfig );
populator.create();
populator.close( true );
Expand Down

0 comments on commit baa6285

Please sign in to comment.