Skip to content
This repository has been archived by the owner on Jan 19, 2022. It is now read-only.

Commit

Permalink
whitespace, javadoc, cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
burtbeckwith committed Mar 15, 2013
1 parent 3284d91 commit e7ae440
Show file tree
Hide file tree
Showing 45 changed files with 275 additions and 330 deletions.
27 changes: 10 additions & 17 deletions build.gradle
Expand Up @@ -95,7 +95,6 @@ subprojects {
def isGormDatasource = project.name.startsWith("grails-datastore-gorm-") && !project.name.endsWith("tck") && !project.name.endsWith("plugin-support")
def isDocumentationProject = project.name.startsWith("grails-documentation")


dependencies {
if (isStandardGroovyMavenProject) {
groovy group: 'org.codehaus.groovy', name: 'groovy-all', version: groovyVersion
Expand All @@ -110,8 +109,8 @@ subprojects {
compile spockDependency
}
}
if(isDocumentationProject) {

if (isDocumentationProject) {
configurations {
documentation
}
Expand All @@ -121,7 +120,7 @@ subprojects {
documentation "org.slf4j:jcl-over-slf4j:$slf4jVersion"
documentation "org.slf4j:slf4j-api:$slf4jVersion"
documentation "org.slf4j:slf4j-simple:$slf4jVersion"
}
}
task docs << {
ant.taskdef (name: 'docs', classname : 'grails.doc.ant.DocPublisherTask') {
classpath {
Expand All @@ -131,16 +130,14 @@ subprojects {
}

}
ant.docs(src:"src/docs", dest:destinationDir, properties:"src/docs/doc.properties")
ant.docs(src:"src/docs", dest:destinationDir, properties:"src/docs/doc.properties")
}
docs.ext.destinationDir = "${buildDir}/docs"


task clean << {
ant.delete(dir:buildDir)
}


}

if (isGormDatasource) {
Expand Down Expand Up @@ -204,7 +201,6 @@ subprojects {
from javadoc.destinationDir
}


artifacts {
archives jar
archives sourcesJar
Expand Down Expand Up @@ -326,34 +322,33 @@ task allDocs(dependsOn: getTasksByName("docs", true)) << {
for(task in docTasks) {
def dir = task.destinationDir
def projectName = task.project.name
if(projectName.endsWith("core")) {
if (projectName.endsWith("core")) {
mkdir "$allDocsDir/manual"
fileTree { from dir }.copy { into "$allDocsDir/manual" }
def groovydocTask = groovydocTasks.find { it.project.name.endsWith "core" }
if(groovydocTask != null) {
if (groovydocTask != null) {
mkdir "$allDocsDir/api"
groovydocTask.actions.each { it.execute(groovydocTask) }

fileTree { from groovydocTask.destinationDir }.copy { into "$allDocsDir/api"}
}

}
else {
def storeName = projectName["grails-documentation-".size()..-1]
stores << storeName
def docsDir = "$allDocsDir/$storeName"
mkdir docsDir
def groovydocTask = groovydocTasks.find { it.project.name == "grails-datastore-$storeName" }
if(groovydocTask == null) groovydocTask = groovydocTasks.find { it.project.name == "grails-datastore-gorm-$storeName" }
if(groovydocTask != null) {
if (groovydocTask == null) groovydocTask = groovydocTasks.find { it.project.name == "grails-datastore-gorm-$storeName" }
if (groovydocTask != null) {
mkdir "$docsDir/api"
groovydocTask.actions.each { it.execute(groovydocTask) }
fileTree { from groovydocTask.destinationDir }.copy { into "$docsDir/api"}
}
mkdir "$docsDir/manual"
fileTree { from dir }.copy { into "$docsDir/manual" }
}

def engine = new groovy.text.SimpleTemplateEngine()
def binding = [
datastores:stores.collect { "<li><a href=\"$it/index.html\">GORM for ${it[0].toUpperCase()}${it[1..-1]}</a></li>" }.join(System.getProperty("line.separator"))
Expand All @@ -369,11 +364,10 @@ task allDocs(dependsOn: getTasksByName("docs", true)) << {
]
template = engine.createTemplate(new File("src/docs/resources/datastore.template")).make( binding )
new File(index).text = template.toString()

}
}

}

task test(dependsOn: getTasksByName("test", true)) << {
def reportsDir = "${buildDir}/reports"

Expand Down Expand Up @@ -422,4 +416,3 @@ task test(dependsOn: getTasksByName("test", true)) << {
ant."clover-html-report"(initstring: mergedDb, outdir:"${cloverReportsDir}/html")
}
}

Expand Up @@ -30,16 +30,16 @@ public interface TPCacheAdapter<T> {
* In cases when there is no transaction or no transactional support by the implementation, if there are any problems
* storing the entry the caller is notified about it via exception in the calling thread; also, if this method returns
* successfully it means that the logistics of putting the specified value in the cache are fully done.
* @param key
* @param entry
* @param key the entry key
* @param entry the entry
* @throws CacheException runtime exception indicating any cache-related problems
*/
void cacheEntry(Serializable key, T entry) throws CacheException;

/**
* Returns the stored value for the specified key.
* @param key
* @return
* @param key the entry key
* @return the entry
* @throws CacheException runtime exception indicating any cache-related problems
*/
T getCachedEntry(Serializable key) throws CacheException;
Expand Down
Expand Up @@ -10,32 +10,32 @@
public interface TPCacheAdapterRepository<T> {
/**
* Returns {@link TPCacheAdapter} for the specified {@link PersistentEntity}.
* @param entity
* @param entity the entity
* @return null if no {@link TPCacheAdapter} is found for the specified entity
*/
TPCacheAdapter<T> getTPCacheAdapter(PersistentEntity entity);

/**
* Sets {@link TPCacheAdapter} for the specified {@link PersistentEntity}.
* If the specified entity had another cache adapter before, the old one is ignored after this call.
* @param entity
* @param cacheAdapter
* @param entity the entity
* @param cacheAdapter the adapter
*/
void setTPCacheAdapter(PersistentEntity entity, TPCacheAdapter<T> cacheAdapter);

/**
* Sets {@link TPCacheAdapter} for the specified java class of {@link PersistentEntity}.
* If the specified entity had another cache adapter before, the old one is ignored after this call.
* @param entityJavaClass equivalent to {@link PersistentEntity.getJavaClass()}
* @param cacheAdapter
* @param entityJavaClass equivalent to {@link PersistentEntity#getJavaClass()}
* @param cacheAdapter the adapter
*/
void setTPCacheAdapter(@SuppressWarnings("rawtypes") Class entityJavaClass, TPCacheAdapter<T> cacheAdapter);

/**
* Sets {@link TPCacheAdapter} for the specified FQN java class of {@link PersistentEntity}.
* If the specified entity had another cache adapter before, the old one is ignored after this call.
* @param entityJavaClassFQN equivalent to {@link PersistentEntity.getJavaClass().getName()}
* @param cacheAdapter
* @param entityJavaClassFQN equivalent to {@link PersistentEntity#getJavaClass().getName()}
* @param cacheAdapter the adapter
*/
void setTPCacheAdapter(String entityJavaClassFQN, TPCacheAdapter<T> cacheAdapter);
}
Expand Up @@ -167,7 +167,6 @@ public PersistentEntity addPersistentEntity(Class javaClass, boolean override) {
return addPersistentEntity(javaClass);
}

@Override
public Collection<PersistentEntity> addPersistentEntities(Class... javaClasses) {
Collection<PersistentEntity> entities = new ArrayList<PersistentEntity>();

Expand Down Expand Up @@ -235,15 +234,13 @@ private void registerEntityWithContext(PersistentEntity entity) {
persistentEntitiesByName.put(entity.getName(), entity);
}

@Override
public void initialize() {
for(PersistentEntity entity : persistentEntities) {
initializePersistentEntity(entity);
}
this.initialized = true;
}

@Override
public boolean isInitialized() {
return initialized;
}
Expand Down
Expand Up @@ -72,9 +72,8 @@ public MappingContext getMappingContext() {
return context;
}

@Override
public boolean isInitialized() {
return this.initialized;
return initialized;
}

public void initialize() {
Expand Down
Expand Up @@ -129,7 +129,6 @@ public IdentityMapping getIdentityMapping(ClassMapping classMapping) {
return getDefaultIdentityMapping(classMapping);
}

@Override
public void setCanExpandMappingContext(boolean canExpandMappingContext) {
// noop
}
Expand Down
Expand Up @@ -88,7 +88,6 @@ public GormMappingConfigurationStrategy(MappingFactory propertyFactory) {
/**
* Whether the strategy can add new entities to the mapping context
*/
@Override
public void setCanExpandMappingContext(boolean canExpandMappingContext) {
this.canExpandMappingContext = canExpandMappingContext;
}
Expand Down Expand Up @@ -633,7 +632,7 @@ else if (!embedded && Collection.class.isAssignableFrom(relatedClassPropertyType
* check if mappedBy is set explicitly to null for the given property.
* @param property
* @param mappedBy
* @return
* @return true if mappedBy is set explicitly to null
*/
private boolean forceUnidirectional(PropertyDescriptor property, Map mappedBy) {
return mappedBy.containsKey(property.getName()) && (mappedBy.get(property.getName())==null);
Expand Down
Expand Up @@ -14,22 +14,23 @@
*/
package org.grails.datastore.mapping.validation;

import javax.persistence.FlushModeType;

import org.grails.datastore.mapping.core.Datastore;
import org.grails.datastore.mapping.core.Session;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.grails.datastore.mapping.engine.event.AbstractPersistenceEvent;
import org.grails.datastore.mapping.engine.event.AbstractPersistenceEventListener;
import org.grails.datastore.mapping.engine.event.PersistenceEventListener;
import org.grails.datastore.mapping.engine.event.PreInsertEvent;
import org.grails.datastore.mapping.engine.event.PreUpdateEvent;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.springframework.context.ApplicationEvent;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;

import javax.persistence.FlushModeType;

/**
* An {@link org.grails.datastore.mapping.engine.EntityInterceptor} that uses
* A {@link PersistenceEventListener} that uses
* Spring's validation mechanism to evict objects if an error occurs
*
* @author Graeme Rocher
Expand Down
Expand Up @@ -180,7 +180,7 @@ public DynamoDBAssociationInfo getAssociationInfo(Association<?> association) {
/**
* Returns table resolver for the specified entity.
* @param entity
* @return
* @return the resolver
*/
public DynamoDBTableResolver getEntityDomainResolver(PersistentEntity entity) {
return entityDomainResolverMap.get(entity);
Expand All @@ -189,7 +189,7 @@ public DynamoDBTableResolver getEntityDomainResolver(PersistentEntity entity) {
/**
* Returns id generator for the specified entity.
* @param entity
* @return
* @return the generator
*/
public DynamoDBIdGenerator getEntityIdGenerator(PersistentEntity entity) {
return entityIdGeneratorMap.get(entity);
Expand Down
Expand Up @@ -33,16 +33,16 @@ public interface DynamoDBTableResolver {
/**
* Returns domain name for the specified primary key value.
*
* @param id
* @return
* @param id the id
* @return the domain name
*/
String resolveTable(String id);

/**
* Returns all domain names for this type of entity. Without sharding this
* list contains always one element.
*
* @return
* @return the names
*/
List<String> getAllTablesForEntity();
}
Expand Up @@ -408,7 +408,7 @@ private boolean validateProjectionsAndCheckIfCountIsPresent(List<Projection> pro
* (a and b) or c = dis(con(a,b), c) ==> [ [a,b], [c] ] //2 queries
*
* @param criteria
* @return
* @return the results
*/
private List<List<PropertyCriterion>> flattenAndReplaceDisjunction(Junction criteria) {
List<List<PropertyCriterion>> result = new ArrayList<List<PropertyCriterion>>();
Expand Down Expand Up @@ -534,7 +534,7 @@ protected static String extractPropertyKey(String propertyName, PersistentEntity
*
* @param entity
* @param propertyName
* @return
* @return the key
*/
protected static String getKey(PersistentEntity entity, String propertyName) {
return extractPropertyKey(propertyName, entity);
Expand Down
Expand Up @@ -49,7 +49,7 @@ public interface DynamoDBTemplate {
* Same as get but with consistent read flag.
* @param tableName complete name of the table in DynamoDB, will be used as-is
* @param key the key for which to retrieve the data
* @return
* @return null if the item is not found
* @throws org.springframework.dao.DataAccessException
*/
Map<String,AttributeValue> getConsistent(String tableName, Key key) throws DataAccessException;
Expand All @@ -74,9 +74,9 @@ public interface DynamoDBTemplate {
* http://docs.amazonwebservices.com/amazondynamodb/latest/developerguide/LowLevelJavaItemCRUD.html#PutLowLevelAPIJava
* @param tableName complete name of the table in DynamoDB, will be used as-is
* @param key
*@param attributes
* @param attributes
* @param expectedVersion
* @throws org.springframework.dao.DataAccessException
* @throws org.springframework.dao.DataAccessException
*/
void putItemVersioned(String tableName, Key key, Map<String, AttributeValue> attributes, String expectedVersion, PersistentEntity persistentEntity) throws DataAccessException;

Expand Down Expand Up @@ -116,25 +116,25 @@ public interface DynamoDBTemplate {
/**
* Returns true if any item was deleted, in other words if domain was empty it returns false.
* @param tableName complete name of the table in DynamoDB, will be used as-is
* @return
* @return true if any item was deleted
* @throws org.springframework.dao.DataAccessException
*/
boolean deleteAllItems(String tableName) throws DataAccessException;

/**
* Executes scan Dynamo DB operation (note this operation does not scale well with the growth of the table).
* @param max maximum amount of items to return (inclusive)
* @return
* @return the scan results
* @throws org.springframework.dao.DataAccessException
*/
List<Map<String, AttributeValue>> scan(String tableName, Map<String, Condition> filter, int max) throws DataAccessException;

/**
* Executes scan Dynamo DB operation and returns the count of matched items
* (note this operation does not scale well with the growth of the table)
* @param tableName
* @param filter
* @return
* @param tableName the table name
* @param filter filters
* @return the count of matched items
*/
int scanCount(String tableName, Map<String, Condition> filter);

Expand All @@ -148,17 +148,17 @@ public interface DynamoDBTemplate {

/**
* Blocking call - internally will wait until the table is successfully created and is in ACTIVE state.
* @param tableName
* @param ks
* @param provisionedThroughput
* @param tableName the table name
* @param ks the schema
* @param provisionedThroughput the throughput
* @throws DataAccessException
*/
void createTable(String tableName, KeySchema ks, ProvisionedThroughput provisionedThroughput) throws DataAccessException;

/**
* Returns table description object containing throughput and key scheme information
* @param tableName
* @return
* @param tableName the table name
* @return the description
* @throws org.springframework.dao.DataAccessException
*/
TableDescription describeTable(String tableName) throws DataAccessException;
Expand Down

0 comments on commit e7ae440

Please sign in to comment.