Permalink
Browse files

Updating logback config to use HSQLDB for logging events, minor chang…

…es to FedoraContentMapper.java to get code convention enforcement to pass
  • Loading branch information...
escowles committed Jul 1, 2013
1 parent 5e57cb9 commit 5497546d5fbaadc8be9f9320b3f435aa9840b035
48 pom.xml
@@ -193,6 +193,13 @@
<version>${modeshape.version}</version>
</dependency>
<!-- hsqldb for db logging -->
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>2.2.9</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.apache.chemistry.opencmis</groupId>
@@ -338,6 +345,47 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>sql-maven-plugin</artifactId>
<version>1.5</version>
<dependencies>
<!-- specify the dependent jdbc driver here -->
<dependency>
<groupId>org.hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>2.2.9</version>
</dependency>
</dependencies>
<!-- common configuration shared by all executions -->
<configuration>
<driver>org.hsqldb.jdbcDriver</driver>
<url>jdbc:hsqldb:file:/tmp/audit.db;shutdown=true</url>
<username>sa</username>
<password></password>
<settingsKey>sensibleKey</settingsKey>
<!--all executions are ignored if -Dmaven.test.skip=true-->
<skip>${maven.test.skip}</skip>
</configuration>
<executions>
<execution>
<id>create-schema</id>
<phase>process-test-resources</phase>
<goals>
<goal>execute</goal>
</goals>
<configuration>
<autocommit>true</autocommit>
<srcFiles>
<srcFile>src/main/resources/logback-hsqldb.sql</srcFile>
</srcFiles>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
@@ -20,8 +20,9 @@
import org.modeshape.web.jcr.webdav.ContentMapper;
/**
* This class is almost entirely borrowed from {@link org.modeshape.web.jcr.webdav.DefaultContentMapper}
* except for the Fedora-specific behaviors.
* This class is almost entirely borrowed from
* {@link org.modeshape.web.jcr.webdav.DefaultContentMapper} except for the
* Fedora-specific behaviors.
*
*/
public class FedoraContentMapper implements ContentMapper {
@@ -60,7 +61,7 @@
private String newFolderPrimaryType;
private final Logger logger = Logger.getLogger(getClass());
private final Logger log = Logger.getLogger(getClass());
@Override
public void initialize(ServletContext servletContext) {
@@ -76,15 +77,15 @@ public void initialize(ServletContext servletContext) {
String newContentPrimaryType =
getParam(servletContext, INIT_NEW_CONTENT_PRIMARY_TYPE_NAME);
logger.debug("FedoraContentMapper initial content primary types = " +
log.debug("FedoraContentMapper initial content primary types = " +
contentPrimaryTypes);
logger.debug("FedoraContentMapper initial file primary types = " +
log.debug("FedoraContentMapper initial file primary types = " +
filePrimaryTypes);
logger.debug("FedoraContentMapper initial new folder primary types = " +
log.debug("FedoraContentMapper initial new folder primary types = " +
newFolderPrimaryType);
logger.debug("FedoraContentMapper initial new resource primary types = " +
log.debug("FedoraContentMapper initial new resource primary types = " +
newResourcePrimaryType);
logger.debug("FedoraContentMapper initial new content primary types = " +
log.debug("FedoraContentMapper initial new content primary types = " +
newContentPrimaryType);
this.contentPrimaryTypes =
@@ -103,10 +104,11 @@ protected String getParam(ServletContext servletContext, String name) {
}
/**
* Returns an unmodifiable set containing the elements passed in to this method
* Returns an unmodifiable set containing the elements passed to this method
*
* @param elements a set of elements; may not be null
* @return an unmodifiable set containing all of the elements in {@code elements}; never null
* @return an unmodifiable set containing all the elements in
* {@code elements}; never null
*/
private static Set<String> setFor(String... elements) {
Set<String> set = new HashSet<String>(elements.length);
@@ -116,19 +118,23 @@ protected String getParam(ServletContext servletContext, String name) {
}
/**
* Splits a comma-delimited string into an unmodifiable set containing the substrings between the commas in the source string.
* The elements in the set will be {@link String#trim() trimmed}.
* Splits a comma-delimited string into an unmodifiable set containing the
* substrings between the commas in the source string. The elements in the
* set will be {@link String#trim() trimmed}.
*
* @param commaDelimitedString input string; may not be null, but need not contain any commas
* @return an unmodifiable set whose elements are the trimmed substrings of the source string; never null
* @param commaDelimitedString input string; may not be null, but need not
* contain any commas
* @return an unmodifiable set whose elements are the trimmed substrings
* of the source string; never null
*/
private static Set<String> split(String commaDelimitedString) {
return setFor(commaDelimitedString.split("\\s*,\\s*"));
}
@Override
public InputStream getResourceContent(Node node) throws RepositoryException {
if (!node.hasNode(CONTENT_NODE_NAME)) return null;
public InputStream getResourceContent(Node node)
throws RepositoryException {
if (!node.hasNode(CONTENT_NODE_NAME)) { return null; }
return node.getProperty(CONTENT_NODE_NAME + "/" + DATA_PROP_NAME)
.getBinary().getStream();
}
@@ -144,7 +150,7 @@ public long getResourceLength(Node node) throws RepositoryException {
@Override
public Date getLastModified(Node node) throws RepositoryException {
if (!node.hasNode(CONTENT_NODE_NAME)) return null;
if (!node.hasNode(CONTENT_NODE_NAME)) { return null; }
return node.getProperty(CONTENT_NODE_NAME + "/" + MODIFIED_PROP_NAME)
.getDate().getTime();
@@ -157,53 +163,54 @@ public boolean isFolder(Node node) throws RepositoryException {
/**
* @param node the node to check
* @return true if {@code node}'s primary type is one of the types in {@link #filePrimaryTypes}; may not be null
* @throws RepositoryException if an error occurs checking the node's primary type
* @return true if {@code node}'s primary type is one of the types in
* {@link #filePrimaryTypes}; may not be null
* @throws RepositoryException if an error occurs checking the node's
* primary type
*/
@Override
public boolean isFile(Node node) throws RepositoryException {
for (String nodeType : filePrimaryTypes) {
if (node.isNodeType(nodeType)) return true;
if (node.isNodeType(nodeType)) { return true; }
}
return false;
}
/**
* @param node the node to check
* @return true if {@code node}'s primary type is one of the types in {@link #contentPrimaryTypes}; may not be null
* @throws RepositoryException if an error occurs checking the node's primary type
* @return true if {@code node}'s primary type is one of the types in
* {@link #contentPrimaryTypes}; may not be null
* @throws RepositoryException if an error occurs checking the node's
* primary type
*/
private boolean isContent(Node node) throws RepositoryException {
for (String nodeType : contentPrimaryTypes) {
if (node.isNodeType(nodeType)) return true;
if (node.isNodeType(nodeType)) { return true; }
}
return false;
}
@Override
public void createFile(Node parentNode, String fileName)
throws RepositoryException {
throws RepositoryException {
new Datastream(parentNode.getSession(), parentNode.getPath() +
"/" + fileName);
}
@Override
public void createFolder(Node parentNode, String folderName)
throws RepositoryException {
throws RepositoryException {
Node newFolder = parentNode.addNode(folderName, newFolderPrimaryType);
new FedoraObject(newFolder);
}
@Override
public long
setContent(Node parentNode, String resourceName,
InputStream newContent, String contentType,
String characterEncoding) throws RepositoryException,
IOException {
public long setContent(Node parentNode, String resourceName,
InputStream newContent, String contentType, String characterEncoding)
throws RepositoryException, IOException {
Datastream ds = new Datastream(parentNode);
try {
ds.setContent(newContent, contentType, null, null, null);
@@ -0,0 +1,31 @@
DROP TABLE logging_event_exception IF EXISTS;
DROP TABLE logging_event_property IF EXISTS;
DROP TABLE logging_event IF EXISTS;
CREATE TABLE logging_event (
timestmp BIGINT NOT NULL,
formatted_message LONGVARCHAR NOT NULL,
logger_name VARCHAR(256) NOT NULL,
level_string VARCHAR(256) NOT NULL,
thread_name VARCHAR(256),
reference_flag SMALLINT,
arg0 VARCHAR(256),
arg1 VARCHAR(256),
arg2 VARCHAR(256),
arg3 VARCHAR(256),
caller_filename VARCHAR(256),
caller_class VARCHAR(256),
caller_method VARCHAR(256),
caller_line CHAR(4),
event_id BIGINT NOT NULL IDENTITY);
CREATE TABLE logging_event_property (
event_id BIGINT NOT NULL,
mapped_key VARCHAR(254) NOT NULL,
mapped_value LONGVARCHAR,
PRIMARY KEY(event_id, mapped_key),
FOREIGN KEY (event_id) REFERENCES logging_event(event_id));
CREATE TABLE logging_event_exception (
event_id BIGINT NOT NULL,
i SMALLINT NOT NULL,
trace_line VARCHAR(256) NOT NULL,
PRIMARY KEY(event_id, i),
FOREIGN KEY (event_id) REFERENCES logging_event(event_id));
@@ -2,10 +2,23 @@
<!DOCTYPE configuration>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%p %d{HH:mm:ss.SSS} \(%c{0}\) %m%n</pattern>
<encoder>
<pattern>%p %d{HH:mm:ss.SSS} \(%c{0}\) %m%n</pattern>
</encoder>
</appender>
<appender name="HSQLDB" class="ch.qos.logback.classic.db.DBAppender">
<connectionSource
class="ch.qos.logback.core.db.DriverManagerConnectionSource">
<sqlDialect class="ch.qos.logback.core.db.dialect.HSQLDialect"/>
<driverClass>org.hsqldb.jdbc.JDBCDriver</driverClass>
<url>jdbc:hsqldb:file:/tmp/audit.db;shutdown=true</url>
<user>sa</user>
<password></password>
</connectionSource>
</appender>
<logger name="org.fcrepo.audit" additivity="false" level="DEBUG">
<appender-ref ref="HSQLDB"/>
</logger>
<logger name="org.fcrepo" additivity="false" level="DEBUG">
<appender-ref ref="STDOUT"/>
</logger>
@@ -18,6 +18,6 @@
</util:set>
<context:component-scan
base-package="org.fcrepo.syndication, org.fcrepo.serialization.bagit, org.fcrepo.webhooks, org.fcrepo.legacy"/>
base-package="org.fcrepo.syndication, org.fcrepo.serialization.bagit, org.fcrepo.webhooks, org.fcrepo.legacy, org.fcrepo.audit"/>
</beans>

0 comments on commit 5497546

Please sign in to comment.