diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000000..a3e764ae7b9
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,4 @@
+.settings
+.classpath
+.project
+target
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 00000000000..c9baaba9066
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,22 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+script: mvn verify -PintegrationTesting
+language: java
+jdk:
+ - openjdk6
+notifications:
+ email:
+ - oak-dev@jackrabbit.apache.org
diff --git a/README.md b/README.md
new file mode 100644
index 00000000000..c086ae241db
--- /dev/null
+++ b/README.md
@@ -0,0 +1,72 @@
+=======================================================
+Jackrabbit Oak - the next generation content repository
+=======================================================
+
+Jackrabbit Oak is an effort to implement a scalable and performant
+hierarchical content repository for use as the foundation of modern
+world-class web sites and other demanding content applications.
+
+The Oak effort is a part of the Apache Jackrabbit project.
+Apache Jackrabbit is a project of the Apache Software Foundation.
+
+Oak is currently alpha-level software. Use at your own risk with no
+stability or compatibility guarantees.
+
+Getting Started
+---------------
+
+To get started with Oak, build the latest sources with
+Maven 3 and Java 6 (or higher) like this:
+
+ mvn clean install
+
+To enable all integration tests, including the JCR TCK, use:
+
+ mvn clean install -PintegrationTesting
+
+Before committing changes or submitting a patch, please make sure that
+the above integration testing build passes without errors. If you like,
+you can enable integration tests by default by setting the
+`OAK_INTEGRATION_TESTING` environment variable.
+
+The build consists of the following main components:
+
+ - oak-parent - parent POM
+ - oak-commons - shared utility code
+ - oak-mk-api - MicroKernel API
+ - oak-mk - default MicroKernel implementation
+ - oak-mk-remote - MicroKernel remoting
+ - [oak-core][1] - Oak repository API and implementation
+ - oak-jcr - JCR binding for the Oak repository
+ - oak-sling - integration with Apache Sling
+ - oak-http - HTTP binding for Oak
+ - oak-run - runnable jar packaging
+ - oak-it - integration tests
+ - oak-it/mk - integration tests for MicroKernel
+ - oak-it/osgi - integration tests for OSGi
+ - oak-bench - performance tests
+
+ [1]: oak-core/README.md
+
+License
+-------
+
+(see [LICENSE.txt](LICENSE.txt) for full license details)
+
+Collective work: Copyright 2012 The Apache Software Foundation.
+
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
diff --git a/README.txt b/README.txt
deleted file mode 100644
index a09961ef883..00000000000
--- a/README.txt
+++ /dev/null
@@ -1,48 +0,0 @@
-============================================
-Oak - the next generation content repository
-============================================
-
-Oak is an effort implement a scalable and performant hierarchical content
-repository for use as the foundation of modern world-class web sites and
-other demanding content applications.
-
-The Oak effort is a part of the Apache Jackrabbit project.
-Apache Jackrabbit is a project of the Apache Software Foundation.
-
-
-Getting Started
----------------
-
-To get started with Oak, build the latest sources with
-Maven 3 and Java 6 (or higher) like this:
-
- mvn clean install
-
-The build consists of the following main components:
-
- oak-parent - parent POM
- oak-core - main codebase (incl. unit tests)
- oak-run - runnable jar packaging
- oak-it - integration tests
- oak-bench - performance tests
-
-
-License (see also LICENSE.txt)
-------------------------------
-
-Collective work: Copyright 2012 The Apache Software Foundation.
-
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/RELEASE-NOTES.txt b/RELEASE-NOTES.txt
new file mode 100644
index 00000000000..7847d0643f5
--- /dev/null
+++ b/RELEASE-NOTES.txt
@@ -0,0 +1,301 @@
+Release Notes -- Apache Jackrabbit Oak -- Version 0.5
+
+Introduction
+------------
+
+Jackrabbit Oak is an effort to implement a scalable and performant hierarchical content
+repository for use as the foundation of modern world-class web sites and
+other demanding content applications.
+
+The Oak effort is a part of the Apache Jackrabbit project.
+Apache Jackrabbit is a project of the Apache Software Foundation.
+
+Jackrabbit Oak 0.5 is to be considered alpha-level software. Use at your own risk
+with no stability or compatibility guarantees.
+
+Changes in Oak 0.5
+------------------
+
+Improvements
+
+[OAK-239] - MicroKernel.getRevisionHistory: maxEntries behavior should be documented
+[OAK-255] - Implement Node#getReferences() both for REFERENCE and WEAKREFERENCE
+[OAK-258] - Dummy implementation for session scoped locks
+[OAK-263] - Type of bindings should be covariant in SessionQueryEngine.executeQuery()
+[OAK-264] - MicroKernel.diff for depth limited, unspecified changes
+[OAK-274] - Split NodeFilter into its own class
+[OAK-275] - Introduce TreeLocation interface
+[OAK-282] - Use random port in oak-run tests
+[OAK-284] - Reduce memory usage of KernelNodeState
+[OAK-285] - Split CommitEditor into CommitEditor and Validator interfaces
+[OAK-289] - Remove TreeImpl.Children
+[OAK-290] - Move Query related interfaces in oak.spi.query
+[OAK-292] - Use Guava preconditions instead of asserts to enforce contract
+[OAK-315] - Separate built-in node types from ReadWriteNodeTypeManager
+
+Bug fixes
+
+[OAK-136] - NodeDelegate leakage from NodeImpl
+[OAK-221] - Clarify nature of 'path' parameter in oak-api
+[OAK-228] - inconsistent paths used in oak tests
+[OAK-229] - Review root-node shortcut in NamePathMapperImpl
+[OAK-230] - Review and fix inconsistent usage of oak-path in oak-jcr
+[OAK-238] - ValueFactory: Missing identifier validation when creating (weak)reference value from String
+[OAK-240] - mix:mergeConflict violates naming convention
+[OAK-242] - Mixin rep:MergeConflict is not a registered node type
+[OAK-243] - NodeImpl.getParent() not fully encapsulated in a SessionOperation
+[OAK-245] - Add import for org.h2 in oak-mk bundle
+[OAK-248] - Review path constants in the oak source code
+[OAK-252] - Stop sending observation events on shutdown
+[OAK-254] - waitForCommit returns null in certain situations
+[OAK-256] - JAAS Authentication failing in OSGi env due to classloading issue
+[OAK-257] - NPE in o.a.j.oak.security.privilege.PrivilegeDefinitionImpl constructor
+[OAK-265] - waitForCommit gets wrongly triggered on private branch commits
+[OAK-268] - XPathQueryEvaluator generates incorrect XPath query
+[OAK-272] - every session login causes a mk.branch operation
+[OAK-278] - Tree.getStatus() and Tree.getPropertyStatus() fail for items whose parent has been removed
+[OAK-279] - ChangeProcessor getting stuck while shutdown
+[OAK-286] - Possible NPE in LuceneIndex
+[OAK-287] - PrivilegeManagerImplTest.testJcrAll assumes that there are no custom privileges
+[OAK-291] - Clarify paths in Root and Tree
+[OAK-294] - nt:propertyDefinition has incorrect value constraints for property types
+[OAK-296] - PathUtils.isAncestor("/", "/") should return false but returns true
+[OAK-299] - Node Type support: SQL2QueryResultTest fails
+[OAK-311] - Remapping a namespace breaks existing content
+[OAK-313] - Trailing slash not removed for simple path in JCR to Oak path conversion
+[OAK-316] - CommitFailedException.throwRepositoryException swallows parts of the stack traces
+[OAK-330] - Some MongoMK tests do not use CommitImpl constructor correctly
+[OAK-332] - [MongoMK] Node is not visible in head revision
+[OAK-334] - Add read-only lucene directory
+
+Changes in Oak 0.4
+------------------
+
+New Features
+
+ [OAK-182] - Support for "invisible" internal content
+ [OAK-193] - TODO class for partially implemented features
+ [OAK-227] - MicroKernel API: add depth parameter to diff method
+
+Improvements
+
+ [OAK-153] - Split the CommitHook interface
+ [OAK-156] - Observation events need Session.refresh
+ [OAK-158] - Specify fixed memory settings for unit and integration tests
+ [OAK-161] - Refactor Tree#getChildStatus
+ [OAK-163] - Move the JCR TCK back to the integrationTesting profile
+ [OAK-164] - Replace Tree.remove(String) with Tree.remove()
+ [OAK-165] - NodeDelegate should not use Tree.getChild() but rather Root.getTree()
+ [OAK-166] - Add Tree.isRoot() method instead of relying on Tree.getParent() == null
+ [OAK-171] - Add NodeState.compareAgainstBaseState()
+ [OAK-172] - Optimize KernelNodeState equality checks
+ [OAK-174] - Refactor RootImpl and TreeImpl to take advantage of the child node state builder introduced with OAK-170
+ [OAK-176] - Reduce CoreValueFactoryImpl footprint
+ [OAK-183] - Remove duplicate fields from NodeImpl and PropertyImpl which are already in the ItemImpl super class
+ [OAK-184] - Allow PropertyState.getValues() to work on single-valued properties
+ [OAK-186] - Avoid unnecessary rebase operations
+ [OAK-192] - Define behavior of Tree#getParent() if the parent is not accessible
+ [OAK-194] - Define behavior of Tree#getProperty(String) in case of lack of access
+ [OAK-195] - State that Tree#hasProperty returns false of the property is not accessible
+ [OAK-196] - Make Root interface permission aware
+ [OAK-198] - Refactor RootImpl#merge
+ [OAK-199] - KernelNodeStore defines 2 access methods for the CommitEditor
+ [OAK-200] - Replace Commons Collections with Guava
+ [OAK-232] - Hardcoded "childOrder" in NodeDelegate
+
+Bug fixes
+
+ [OAK-155] - Query: limited support for the deprecated JCR 1.0 query language Query.SQL
+ [OAK-173] - MicroKernel filter syntax is not proper JSON
+ [OAK-177] - Too fast timeout in MicroKernelIT.waitForCommit
+ [OAK-179] - Tests should not fail if there is a jcr:system node
+ [OAK-185] - Trying to remove a missing property throws PathNotFoundException
+ [OAK-187] - ConcurrentModificationException during gc run
+ [OAK-188] - Invalid JSOP encoding in CommitBuilder and KernelNodeStoreBranch
+ [OAK-207] - TreeImpl#getStatus() never returns REMOVED
+ [OAK-208] - RootImplFuzzIT test failures
+ [OAK-209] - BlobStore: use SHA-256 instead of SHA-1, and use two directory levels for FileBlobStore
+ [OAK-211] - CompositeEditor should keep the base node state stable
+ [OAK-213] - Misleading exception message in NodeImpl#getParent
+ [OAK-215] - Make definition of ItemDelegate#getParent permission aware
+ [OAK-219] - SessionDelegate#getRoot throws IllegalStateException if the root node is not accessible
+ [OAK-224] - Allow the ContentRepositoryImpl to receive a CommitEditor in the constructor
+
+Changes in Oak 0.3
+------------------
+
+New Features
+
+ [OAK-9] - Internal tree builder
+ [OAK-12] - Implement a test suite for the MicroKernel
+ [OAK-33] - Values in oak-core
+ [OAK-45] - Add support for branching and merging of private copies to MicroKernel
+ [OAK-68] - Extension point for commit validation
+ [OAK-75] - specify format and semantics of 'filter' parameter in MicroKernel API
+ [OAK-100] - Proper CommitHook handling in NodeStore
+ [OAK-119] - Oak performance benchmark
+ [OAK-133] - Session.refresh(true) should allow for manual conflict reconciliation
+
+Improvements
+
+ [OAK-15] - Clean up oak-jcr
+ [OAK-19] - Consolidate JSON utilities
+ [OAK-32] - Drop MicroKernel.dispose()
+ [OAK-40] - Define session-info like user identification for communication with oak-api
+ [OAK-54] - IOUtils.readVarInt and readVarLong can result in an endless loop on EOF
+ [OAK-65] - Naming of NodeState and related classes
+ [OAK-80] - Implement batched writing for KernelNodeStore
+ [OAK-84] - Delegates for Session, Node, Property and Item
+ [OAK-86] - Make setProperty methods of NodeStateBuilder and Tree return the affected property
+ [OAK-87] - Declarative services and OSGi configuration
+ [OAK-89] - Improve exception handling
+ [OAK-92] - Remove org.apache.jackrabbit.mk.HelloWorld
+ [OAK-96] - PathUtils should use assertions to enable validation instead of system property
+ [OAK-97] - Implement Item.toString() for logging and debugging purposes
+ [OAK-102] - Expose the branch feature from NodeStore
+ [OAK-106] - Use NodeStateBuilder instances to record changes in TreeImpl
+ [OAK-109] - Efficient diffing against the base node state
+ [OAK-112] - Refactor ModifiedNodeState and related classes to use type safe iterator utilities
+ [OAK-113] - drop MicroKernel getNodes(String, String) convenience signature
+ [OAK-115] - ItemDelegate and sub classes should throw IllegalItemStateException on stale items
+ [OAK-116] - MicroKernel API: clarify semantics of getNodes depth, offset and count parameters
+ [OAK-120] - MicroKernel API: specific retention policy of binaries
+ [OAK-122] - Performance test suite
+ [OAK-126] - remove unused code
+ [OAK-138] - Move client/server package in oak-mk to separate project
+ [OAK-145] - Set up Travis CI builds
+ [OAK-142] - MicroKernel API: returning the :hash property should be optional
+ [OAK-143] - Refactor conflict reconciliation from OAK-133: move inner classes to o.a.j.oak.plugins.value
+ [OAK-148] - Drop feature checks from WorkspaceImpl
+ [OAK-149] - Automatic session refresh after namespace registry changes
+ [OAK-151] - Merge oak-it-jcr to oak-jcr
+ [OAK-159] - Do not use in memory Microkernel for TCK
+
+Bug fixes
+
+ [OAK-16] - Proper ValueFactory implementation and Value handling
+ [OAK-43] - Incomplete journal when move and copy operations are involved
+ [OAK-47] - Wrong results and NPE with copy operation
+ [OAK-49] - Session.getRepository() should return the object through which the Session was acquired
+ [OAK-55] - Provide reasonable way to set property on NodeStateEditor
+ [OAK-58] - connection leak in h2 persistence
+ [OAK-60] - occasional test case failure DbBlobStoreTest#testGarbageCollection
+ [OAK-73] - JsopReader and JsopWriter lack javadocs
+ [OAK-79] - Copy operation misses some child nodes
+ [OAK-83] - Copy operation would recurse indefinitely if memory permitted
+ [OAK-85] - NPE and wrong result on copy operation
+ [OAK-93] - Tree has wrong parent after move
+ [OAK-94] - oak-it/osgi fails due to required packages not being exported
+ [OAK-95] - path mapping needs to deal with relative paths
+ [OAK-99] - reading binary content fails for certain types of content
+ [OAK-105] - Workspace move operation should not do sanity checks in the scope of the current session
+ [OAK-110] - NPE in KernelNodeStoreBranch.diffToJsop
+ [OAK-121] - Occasional test failure in MicroKernelIT.testBlobs: java.net.SocketException: Broken pipe
+ [OAK-130] - Unexpected result of MicroKernel#getJournal after MicroKernel#merge
+ [OAK-131] - Session.save() silently discards pending changes
+ [OAK-134] - Session.save() should do an implicit refresh(true)
+ [OAK-135] - Better support for RangeIterators
+ [OAK-139] - Remove JsonBuilder
+ [OAK-146] - Wrong value passed to before parameter of CommitHook.afterCommit in KernelNodeStore.merge
+ [OAK-147] - Incorrect Comparator in CommitBuilder.persistStagedNodes
+
+Changes in Oak 0.2.1
+------------------
+
+New features
+
+ [OAK-59] - Implement Session.move
+ [OAK-63] - Implement workspace copy and move
+
+Improvements
+
+ [OAK-29] - Simplify SessionContext
+ [OAK-30] - Strongly typed wrapper for the MicroKernel
+ [OAK-31] - In-memory MicroKernel for testing
+ [OAK-44] - Release managements tweaks
+ [OAK-46] - Efficient diffing of large child node lists
+ [OAK-48] - MicroKernel.getNodes() should return null for not existing nodes instead of throwing an exception
+ [OAK-52] - Create smoke-test build profile
+ [OAK-53] - exclude longer running tests in the default maven profile
+ [OAK-67] - Initial OSGi Bundle Setup
+ [OAK-70] - MicroKernelInputStream test and optimization
+ [OAK-71] - Logging dependencies
+ [OAK-81] - Remove offset and count parameters from NodeState.getChildNodeEntries()
+
+Bug fixes
+
+ [OAK-20] - Remove usages of MK API from oak-jcr
+ [OAK-62] - ConnectionImpl should not acquire Microkernel instance
+ [OAK-69] - oak-run fails with NPE
+ [OAK-78] - waitForCommit() test failure for MK remoting
+ [OAK-82] - Running MicroKernelIT test with the InMem persistence creates a lot of GC threads
+
+Changes in Oak 0.1
+------------------
+
+New features
+
+ [OAK-3] - Internal tree model
+ [OAK-4] - Runnable jar packaging
+ [OAK-5] - JCR bindings for Oak
+ [OAK-6] - Setup integration tests and TCK tests
+ [OAK-7] - In-memory persistence
+
+Improvements
+
+ [OAK-1] - Setup basic build structure
+ [OAK-2] - Use Java 6 as base platform
+ [OAK-8] - Make return types of NodeState#getProperties() and NodeState#getChildNodeEntries() covariant
+ [OAK-10] - Impedance mismatch between signatures of NodeState#getChildeNodeEntries and MicroKernel#getNodes
+ [OAK-24] - Separate component for the microkernel
+ [OAK-25] - Factor repository descriptors into separate class
+ [OAK-26] - MVCC causes write skew
+ [OAK-42] - Prepare for first release
+
+Bug fixes
+
+ [OAK-27] - Remove Authenticator and CredentialsInfo in oak-jcr
+ [OAK-38] - KernelNodeState should handle multi valued properties
+ [OAK-39] - KernelNodeState does not handle boolean values correctly
+
+
+For more detailed information about all the changes in this and other
+Oak releases, please see the Oak issue tracker at
+
+ https://issues.apache.org/jira/browse/OAK
+
+Release Contents
+----------------
+
+This release consists of a single source archive packaged as a zip file.
+The archive can be unpacked with the jar tool from your JDK installation.
+See the README.md file for instructions on how to build this release.
+
+The source archive is accompanied by SHA1 and MD5 checksums and a PGP
+signature that you can use to verify the authenticity of your download.
+The public key used for the PGP signature can be found at
+https://svn.apache.org/repos/asf/jackrabbit/dist/KEYS.
+
+About Apache Jackrabbit Oak
+---------------------------
+
+Oak is an effort implement a scalable and performant hierarchical content
+repository for use as the foundation of modern world-class web sites and
+other demanding content applications.
+
+The Oak effort is a part of the Apache Jackrabbit project.
+Apache Jackrabbit is a project of the Apache Software Foundation.
+
+For more information, visit http://jackrabbit.apache.org/oak
+
+About The Apache Software Foundation
+------------------------------------
+
+Established in 1999, The Apache Software Foundation provides organizational,
+legal, and financial support for more than 100 freely-available,
+collaboratively-developed Open Source projects. The pragmatic Apache License
+enables individual and commercial users to easily deploy Apache software;
+the Foundation's intellectual property framework limits the legal exposure
+of its 2,500+ contributors.
+
+For more information, visit http://www.apache.org/
diff --git a/assembly.xml b/assembly.xml
new file mode 100644
index 00000000000..3cff072e8a6
--- /dev/null
+++ b/assembly.xml
@@ -0,0 +1,32 @@
+
+
+ src
+
+ zip
+
+
+
+ ${project.basedir}
+
+
+ **/target/**
+ **/.*/**
+
+
+
+
\ No newline at end of file
diff --git a/check-release.sh b/check-release.sh
new file mode 100755
index 00000000000..30e339e938d
--- /dev/null
+++ b/check-release.sh
@@ -0,0 +1,115 @@
+#!/bin/sh
+
+##
+## Licensed to the Apache Software Foundation (ASF) under one or more
+## contributor license agreements. See the NOTICE file distributed with
+## this work for additional information regarding copyright ownership.
+## The ASF licenses this file to You under the Apache License, Version 2.0
+## (the "License"); you may not use this file except in compliance with
+## the License. You may obtain a copy of the License at
+##
+## http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+
+USERNAME=${1}
+VERSION=${2}
+SHA=${3}
+
+if [ -z "$USERNAME" -o -z "$VERSION" -o -z "$SHA" ]
+then
+ echo "Usage: $0 [temp-directory]"
+ exit
+fi
+
+STAGING="http://people.apache.org/~$USERNAME/oak/$VERSION/"
+
+WORKDIR=${4:-target/oak-staging-`date +%s`}
+mkdir $WORKDIR -p -v
+
+echo "[INFO] ------------------------------------------------------------------------"
+echo "[INFO] DOWNLOAD STAGED REPOSITORY "
+echo "[INFO] ------------------------------------------------------------------------"
+echo "[INFO] "
+
+if [ `wget --help | grep "no-check-certificate" | wc -l` -eq 1 ]
+then
+ CHECK_SSL=--no-check-certificate
+fi
+
+wget $CHECK_SSL --wait 1 -nv -r -np "--reject=html,txt" -P "$WORKDIR" -nH "--cut-dirs=3" --ignore-length "${STAGING}"
+
+echo "[INFO] ------------------------------------------------------------------------"
+echo "[INFO] CHECK SIGNATURES AND DIGESTS "
+echo "[INFO] ------------------------------------------------------------------------"
+echo "[INFO] "
+
+## 1. check sha from release email against src.zip.sha file
+
+downloaded_sha=$(cat `find $WORKDIR -type f | grep jackrabbit-oak-$VERSION-src.zip.sha`)
+if [ "$SHA" = "$downloaded_sha" ]; then echo "[INFO] Step 1. Release checksum matches provided checksum."; else echo "[ERROR] Step 1. Release checksum does not match provided checksum!"; fi
+echo "[INFO] "
+
+## 2. check signatures on the artifacts
+echo "[INFO] Step 2. Check individual files"
+
+for f in `find ${WORKDIR} -type f | grep '\.\(zip\|rar\|jar\|war\)$'`
+do
+ echo "[INFO] $f"
+ gpg --verify $f.asc 2>/dev/null
+ if [ "$?" = "0" ]; then CHKSUM="GOOD"; else CHKSUM="BAD!!!!!!!!"; fi
+ if [ ! -f "$f.asc" ]; then CHKSUM="----"; fi
+ echo "gpg: ${CHKSUM}"
+
+ for hash in md5 sha1
+ do
+ tp=`echo $hash | cut -c 1-3`
+ if [ ! -f "$f.$tp" ]
+ then
+ CHKSUM="----"
+ else
+ A="`cat $f.$tp 2>/dev/null`"
+ B="`openssl $hash $f 2>/dev/null | sed 's/.*= *//' `"
+ if [ "$A" = "$B" ]; then CHKSUM="GOOD (`cat $f.$tp`)"; else CHKSUM="BAD!! : $A not equal to $B"; fi
+ fi
+ echo "$tp : ${CHKSUM}"
+ done
+done
+
+## 3. check tag contents vs src archive contents
+echo "[INFO] "
+echo "[INFO] Step 3. Check SVN Tag for version $VERSION with src zip file contents"
+
+echo "[INFO] doing svn checkout, please wait..."
+SVNTAGDIR="$WORKDIR/tag-svn/jackrabbit-oak-$VERSION"
+svn --quiet export http://svn.apache.org/repos/asf/jackrabbit/oak/tags/jackrabbit-oak-$VERSION $SVNTAGDIR
+
+echo "[INFO] unzipping src zip file, please wait..."
+ZIPTAG="$WORKDIR/tag-zip"
+unzip -q $WORKDIR/jackrabbit-oak-$VERSION-src.zip -d $ZIPTAG
+ZIPTAGDIR="$ZIPTAG/jackrabbit-oak-$VERSION"
+
+DIFFOUT=`diff -r $SVNTAGDIR $ZIPTAGDIR`
+if [ -n "$DIFFOUT" ]
+then
+ echo "[ERROR] Found some differences!"
+ echo "$DIFFOUT"
+else
+ echo "[INFO] No differences found."
+fi
+
+## 4. run the build with the pedantic profile to have the rat licence check enabled
+
+echo "[INFO] ------------------------------------------------------------------------"
+echo "[INFO] RUNNING MAVEN BUILD "
+echo "[INFO] ------------------------------------------------------------------------"
+echo "[INFO] "
+
+cd "$ZIPTAGDIR"
+mvn package -Ppedantic
+
diff --git a/doc/construct.md b/doc/construct.md
new file mode 100644
index 00000000000..a21e9cd3e5f
--- /dev/null
+++ b/doc/construct.md
@@ -0,0 +1,78 @@
+
+
+# Repository construction
+
+Oak comes with a simple mechanism for constructing content repositories
+for use in embedded deployments and test cases. This article describes this
+mechanism. Deployments in managed enviroments like OSGi should use the native
+construction/configuration mechanism of the environment.
+
+The core class to use is called `Oak` and can be found in the
+`org.apache.jackrabbit.oak` package inside `oak-core`. It takes a
+`MicroKernel` instance and wraps it into a `ContentRepository`:
+
+ MicroKernel kernel = ...;
+ ContentRepository repository = new Oak(kernel).createContentRepository();
+
+For test purposes you can use the default constructor that
+automatically instantiates an in-memory `MicroKernel` for use with the
+repository. And if you're only using the test repository for a single
+`ContentSession` or just a singe `Root`, then you can shortcut the login
+steps by using either of the last two statements below:
+
+ ContentRepository repository = new Oak().createContentRepository();
+ ContentSession session = new Oak().createContentSession();
+ Root root = new Oak().createRoot();
+
+By default no pluggable components are associated with the created
+repository, so all login attempts will work and result in full write
+access. There's also no need to close the sessions or otherwise
+release acquired resources, as normal garbage collection will take
+care of everything.
+
+To add extra functionality like type validation or indexing support,
+use the `with()` method. The method takes all kinds of Oak plugins and
+adds them to the repository to be created. The method returns the Oak
+instance being used, so you can chain method calls like this:
+
+ ContentRepository repository = new Oak(kernel)
+ .with(new InitialContent()) // add initial content
+ .with(new DefaultTypeEditor()) // automatically set default types
+ .with(new NameValidatorProvider()) // allow only valid JCR names
+ .with(new SecurityProviderImpl()) // use the default security
+ .with(new PropertyIndexHook()) // simple indexing support
+ .with(new PropertyIndexProvider()) // search support for the indexes
+ .createContentRepository();
+
+As you can see, constructing a fully featured JCR repository like this
+will require quite a few plugins. To avoid having to specify them all
+whenever constructing a new repository, we also have a class called
+`Jcr` in the `org.apache.jakcrabbit.oak.jcr` package in `oak-jcr`. That
+class works much like the `Oak` class, but it constructs
+`javax.jcr.Repository` instances instead of `ContentRepositories` and
+automatically includes all the plugin components needed for proper JCR
+functionality:
+
+ MicroKernel kernel = ...;
+ Repository repository = new Jcr(kernel).createRepository();
+
+The `Jcr` class supports all the same `with()` methods as the `Oak` class
+does, so you can easily extend the constructed JCR repository with custom
+functionality if you like. For test purposes the `Jcr` class also has an
+empty default constructor that works like the one in the `Oak` class.
+
diff --git a/doc/nodestate.md b/doc/nodestate.md
new file mode 100644
index 00000000000..b730795f86c
--- /dev/null
+++ b/doc/nodestate.md
@@ -0,0 +1,325 @@
+
+
+# Understanding the node state model
+
+This article describes the node state model that is the core design
+abstraction inside the oak-core component. Understanding the node state
+model is essential to working with Oak internals and to building custom
+Oak extensions.
+
+## Background
+
+Oak organizes all content in a large tree hierarchy that consists of nodes
+and properties. Each snapshot or revision of this content tree is immutable,
+and changes to the tree are expressed as a sequence of new revisions. The
+MicroKernel of an Oak repository is responsible for managing the content
+tree and its revisions.
+
+The JSON-based MicroKernel API works well as a part of a remote protocol
+but is cumbersome to use directly in oak-core. There are also many cases
+where transient or virtual content that doesn't (yet) exist in the
+MicroKernel needs to be managed by Oak. The node state model as expressed
+in the NodeState interface in oak-core is designed for these purposes. It
+provides a unified low-level abstraction for managing all tree content and
+lays the foundation for the higher-level Oak API that's visible to clients.
+
+## The state of a node
+
+A _node_ in Oak is an unordered collection of named properties and child
+nodes. As the content tree evolves through a sequence of revisions, a node
+in it will go through a series of different states. A _node state_ then is
+an _immutable_ snapshot of a specific state of a node and the subtree beneath
+it.
+
+To avoid making a special case of the root node and therefore to make it
+easy to write algorithms that can recursively process each subtree as a
+standalone content tree, a node state is _unnamed_ and does not contain
+information about it's location within a larger content tree. Instead each
+property and child node state is uniquely named within a parent node state.
+An algorithm that needs to know the path of a node can construct it from
+the encountered names as it descends the tree structure.
+
+Since node states are immutable, they are also easy to keep _thread-safe_.
+Implementations that use mutable data structures like caches or otherwise
+aren't thread-safe by default, are expected to use other mechanisms like
+synchronization to ensure thread-safety.
+
+## The NodeState interface
+
+The above design principles are reflected in the `NodeState` interface
+in the `org.apache.jackrabbit.oak.spi.state` package of oak-core. The
+interface consists of three sets of methods:
+
+ * Methods for accessing properties
+ * Methods for accessing child nodes
+ * The `builder` method for building modified states
+ * The `compareAgainstBaseState` method for comparing states
+
+You can request a property or a child node by name, get the number of
+properties or child nodes, or iterate through all of them. Even though
+properties and child nodes are accessed through separate methods, they
+share the same namespace so a given name can either refer to a property
+or a child node, but not to both at the same time.
+
+Iteration order of properties and child nodes is _unspecified but stable_,
+so that re-iterating through the items of a _specific NodeState instance_
+will return the items in the same order as before, but the specific ordering
+is not defined nor does it necessarily remain the same across different
+instances.
+
+The last two methods, `builder` and `compareAgainstBaseState`, are
+covered in the next two sections. See also the `NodeState` javadocs for
+more details about this interface and all its methods.
+
+## Building new node states
+
+Since node states are immutable, a separate builder interface,
+`NodeBuilder`, is used to construct new, modified node states. Calling
+the `builder` method on a node state returns such a builder for
+modifying that node and the subtree below it.
+
+A node builder can be thought of as a _mutable_ version of a node state.
+In addition to property and child node access methods like the ones that
+are already present in the `NodeState` interface, the `NodeBuilder`
+interface contains the following key methods:
+
+ * The `setProperty` and `removeProperty` methods for modifying properties
+ * The `removeNode` method for removing a subtree
+ * The `setNode` method for adding or replacing a subtree
+ * The `child` method for creating or modifying a subtree with
+ a connected child builder
+ * The `getNodeState` method for getting a frozen snapshot of the modified
+ content tree
+
+The concept of _connected builders_ is designed to make it easy to manage
+complex content changes. Since individual node states are always immutable,
+modifying a particular node at a path like `/foo/bar` using the `setNode`
+method would require the following overly verbose code:
+
+ NodeState root = …;
+ NodeState foo = root.getChildNode("foo")
+ NodeState bar = foo.getChildNode("bar");
+ NodeBuilder barBuilder = bar.builder();
+ barBuilder.setProperty("test", …);
+ NodeBuilder fooBuilder = foo.builder();
+ fooBuilder.setNode("bar", barBuilder.getNodeState());
+ NodeBuilder rootBuilder = root.builder();
+ rootBuilder.setNode("foo", fooBuilder.getNodeState());
+ root = rootBuilder.getNodeState();
+
+The complexity here is caused by the need to explicitly construct and
+re-connect each modified node state along the path from the root to the
+modified content in `/foo/bar`. This is because each `NodeBuilder` instance
+created by the `getBuilder` method is independent and can only be used to
+affect other builders in the manner shown above. In contrast the
+`child` method returns a builder instance that is "connected" to
+the parent builder in a way that any changes recorded in the child builder
+will automatically show up also in the node states created by the parent
+builder. With connected builders the above code can be simplified to:
+
+ NodeState root = …;
+ NodeBuilder rootBuilder = root.builder();
+ rootBuilder
+ .child("foo")
+ .child("bar")
+ .setProperty("test", …);
+ root = rootBuilder.getNodeState();
+
+Typically the only case where the `setNode` method is preferable over
+`child` is when moving or copying subtrees from one location
+to another. For example, the following code copies the `/orig` subtree
+to `/copy`:
+
+ NodeState root = …;
+ NodeBuilder rootBuilder = root.builder();
+ rootBuilder.setNode("copy", root.getChildNode("orig"));
+ root = rootBuilder.getNodeState();
+
+The node states constructed by a builder often retain an internal reference
+to the base state used by the builder. This allows common node state
+comparisons to perform really well as described in the next section.
+
+## Comparing node states
+
+As a node evolves through a sequence of states, it's often important to
+be able to tell what has changed between two states of the node. This
+functionality is available through the `compareAgainstBaseState` method.
+The method takes two arguments:
+
+ * A _base state_ for the comparison. The comparison will report all
+ changes necessary for moving from the given base state to the node
+ state on which the comparison method is invoked.
+ * A `NodeStateDiff` instance to which all detected changes are reported.
+ The diff interface contains callback methods for reporting added,
+ modified or removed properties or child nodes.
+
+The comparison method can actually be used to compare any two nodes, but the
+implementations of the method are typically heavily optimized for the case
+when the given base state actually is an earlier version of the same node.
+In practice this is by far the most common scenario for node state comparisons,
+and can typically be executed in `O(d)` time where `d` is the number of
+changes between the two states. The fallback strategy for comparing two
+completely unrelated node states can be much more expensive.
+
+An important detail of the `NodeStateDiff` mechanism is the `childNodeChanged`
+method that will get called if there are _any_ changes in the subtree starting
+at the named child node. The comparison method should thus be able to
+efficiently detect differences at any depth below the given nodes. On the
+other hand the `childNodeChanged` method is called only for the direct child
+node, and the diff implementation should explicitly recurse down the tree
+if it wants to know what exactly did change under that subtree. The code
+for such recursion typically looks something like this:
+
+ public void childNodeChanged(
+ String name, NodeState before, NodeState after) {
+ after.compareAgainstBaseState(before, ...);
+ }
+
+## The commit hook mechanism
+
+TODO
+
+## Commit validation
+
+TODO
+
+TODO: Basic validator class
+
+ class DenyContentWithName extends DefaultValidator {
+
+ private final String name;
+
+ public DenyContentWithName(String name) {
+ this.name = name;
+ }
+
+ @Override
+ public void propertyAdded(PropertyState after)
+ throws CommitFailedException {
+ if (name.equals(after.getName())) {
+ throw new CommitFailedException(
+ "Properties named " + name + " are not allowed");
+ }
+ }
+
+
+ }
+
+TODO: Example of how the validator works
+
+ Repository repository = new Jcr()
+ .with(new DenyContentWithName("bar"))
+ .createRepository();
+
+ Session session = repository.login();
+ Node root = session.getRootNode();
+ root.setProperty("foo", "abc");
+ session.save();
+ root.setProperty("bar", "def");
+ session.save(); // will throw an exception
+
+TODO: Extended example that also works below root and covers also node names
+
+ class DenyContentWithName extends DefaultValidator {
+
+ private final String name;
+
+ public DenyContentWithName(String name) {
+ this.name = name;
+ }
+
+ private void testName(String addedName) throws CommitFailedException {
+ if (name.equals(addedName)) {
+ throw new CommitFailedException(
+ "Content named " + name + " is not allowed");
+ }
+ }
+
+ @Override
+ public void propertyAdded(PropertyState after)
+ throws CommitFailedException {
+ testName(after.getName());
+ }
+
+ @Override
+ public Validator childNodeAdded(String name, NodeState after)
+ throws CommitFailedException {
+ testName(name);
+ return this;
+ }
+
+ @Override
+ public Validator childNodeChanged(
+ String name, NodeState before, NodeState after)
+ throws CommitFailedException {
+ return this;
+ }
+
+ }
+
+## Commit modification
+
+TODO
+
+TODO: Basic commit hook example
+
+ class RenameContentHook implements CommitHook {
+
+ private final String name;
+
+ private final String rename;
+
+ public RenameContentHook(String name, String rename) {
+ this.name = name;
+ this.rename = rename;
+ }
+
+ @Override @Nonnull
+ public NodeState processCommit(NodeState before, NodeState after)
+ throws CommitFailedException {
+ PropertyState property = after.getProperty(name);
+ if (property != null) {
+ NodeBuilder builder = after.builder();
+ builder.removeProperty(name);
+ if (property.isArray()) {
+ builder.setProperty(rename, property.getValues());
+ } else {
+ builder.setProperty(rename, property.getValue());
+ }
+ return builder.getNodeState();
+ }
+ return after;
+ }
+
+ }
+
+TODO: Using the commit hook to avoid the exception from a validator
+
+ Repository repository = new Jcr()
+ .with(new RenameContentHook("bar", "foo"))
+ .with(new DenyContentWithName("bar"))
+ .createRepository();
+
+ Session session = repository.login();
+ Node root = session.getRootNode();
+ root.setProperty("foo", "abc");
+ session.save();
+ root.setProperty("bar", "def");
+ session.save(); // will not throw an exception!
+ System.out.println(root.getProperty("foo").getString()); // Prints "def"!
+
diff --git a/oak-bench/README.txt b/oak-bench/README.txt
new file mode 100644
index 00000000000..bc4dbdd66c7
--- /dev/null
+++ b/oak-bench/README.txt
@@ -0,0 +1,85 @@
+---------------------------------
+Oak Performance Test Suite
+---------------------------------
+
+This directory contains a simple performance test suite that can be
+extended for ongoing Oak versions and micro kernels. Use the following
+command to run this test suite:
+
+ mvn clean install
+
+Note that the test suite will take more than an hour to complete, and to
+avoid distorting the results you should avoid putting any extra load on
+the computer while the test suite is running.
+
+The results are stored as oak*/target/*.txt report files and can
+be combined into an HTML report by running the following command on a
+(Unix) system where gnuplot is installed.
+
+ sh plot.sh
+
+Mac OS X note : if you want to execute the above script, you will need
+to install gnuplot and imagemagick2-svg from the Fink project. For
+more information : http://finkproject.org
+
+Selecting which tests to run
+----------------------------
+
+The -Donly command line parameter allows you to specify a regexp for
+selecting which performance test cases to run. To run a single test
+case, use a command like this:
+
+ mvn clean install -Donly=ConcurrentReadTest
+
+To run all concurrency tests, use:
+
+ mvn clean install -Donly=Concurrent.*Test
+
+Selecting which micro kernel to test
+----------------------------------------------------------
+
+The -Dmk command line parameter allows you to specify a regexp for
+selecting the micro kernel and configurations against which the
+performance tests are run. The default setting selects only the default
+micro kernel:
+
+ mvn clean install -Dmk=\d\.\d
+
+To run the tests against all included configurations, use:
+
+ mvn clean install -Dmk=.*
+
+Using a profiler
+----------------
+
+To enable a profiler, use the -Dagentlib= command line pameter:
+
+ mvn clean install -Dagentlib=hprof=cpu=samples,depth=10
+
+Adding a new performance test
+-----------------------------
+
+The tests run by this performance test suite are listed in the
+testPerformance() method of the AbstractPerformanceTest class in
+the org.apache.jackrabbit.oak.performance package of the oak-perf-base
+component that you can find in the ./base directory.
+
+Each test is a subclass of the AbstractTest class in that same package,
+and you need to implement at least the abstract runTest() method when
+creating a new test. The runTest() method should contain the code whose
+performance you want to measure. For best measurement results the method
+should normally take something between 0.1 to 10 seconds to execute, so
+you may need to add a constant-size loop around your code like is done
+for example in the LoginTest class. The test suite compares relative
+performance between different Oak versions, so the absolute time
+taken by the test method is irrelevant.
+
+Many performance tests need some setup and teardown code for things like
+building the content tree against which the test is being run. Such work
+should not be included in the runTest() method to prevent affecting the
+performance measurements. Instead you can override the before/afterTest()
+and before/afterSuite() methods that get called respectively before and
+after each individual test iteration and the entire test suite. See for
+example the SetPropertyTest class for an example of how these methods
+are best used.
+
diff --git a/oak-bench/base/pom.xml b/oak-bench/base/pom.xml
new file mode 100644
index 00000000000..a1a15e2b247
--- /dev/null
+++ b/oak-bench/base/pom.xml
@@ -0,0 +1,77 @@
+
+
+
+
+
+ 4.0.0
+
+
+ org.apache.jackrabbit
+ oak-bench-parent
+ 0.6-SNAPSHOT
+ ../parent/pom.xml
+
+
+ oak-bench-base
+ Oak Performance Test Utilities
+
+
+
+ javax.jcr
+ jcr
+ 2.0
+
+
+ org.apache.commons
+ commons-math
+ 2.0
+
+
+ org.apache.jackrabbit
+ oak-jcr
+ ${project.version}
+ provided
+
+
+ com.h2database
+ h2
+ 1.3.158
+
+
+ commons-io
+ commons-io
+ 1.4
+
+
+ org.slf4j
+ slf4j-api
+ 1.5.8
+
+
+ org.slf4j
+ slf4j-nop
+ 1.5.8
+
+
+ junit
+ junit
+
+
+
+
+
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/AbstractPerformanceTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/AbstractPerformanceTest.java
new file mode 100644
index 00000000000..54891999fcb
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/AbstractPerformanceTest.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.regex.Pattern;
+import javax.jcr.Credentials;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.SimpleCredentials;
+
+import org.apache.commons.io.output.FileWriterWithEncoding;
+import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;
+import org.apache.jackrabbit.mk.api.MicroKernel;
+import org.apache.jackrabbit.mk.core.MicroKernelImpl;
+import org.apache.jackrabbit.oak.jcr.Jcr;
+
+/**
+ * This class calls all known performance tests.
+ */
+public abstract class AbstractPerformanceTest {
+
+ /**
+ * The warmup time, in ms.
+ */
+ private final int warmup = Integer.getInteger("oak.performanceTest.warmup", 0);
+
+ /**
+ * How long each test is repeated, in ms.
+ */
+ private final int runtime = Integer.getInteger("oak.performanceTest.runtime", 100);
+
+ private final Credentials credentials = new SimpleCredentials("admin",
+ "admin".toCharArray());
+
+ private final Pattern microKernelPattern = Pattern.compile(System
+ .getProperty("mk", ".*"));
+ private final Pattern testPattern = Pattern.compile(System.getProperty(
+ "only", ".*"));
+
+ protected void testPerformance(String name, String microKernel)
+ throws Exception {
+
+ runTest(new LoginTest(), name, microKernel);
+ runTest(new LoginLogoutTest(), name, microKernel);
+ runTest(new ReadPropertyTest(), name, microKernel);
+ runTest(new SetPropertyTest(), name, microKernel);
+ runTest(new SmallFileReadTest(), name, microKernel);
+ runTest(new SmallFileWriteTest(), name, microKernel);
+ runTest(new ConcurrentReadTest(), name, microKernel);
+ runTest(new ConcurrentReadWriteTest(), name, microKernel);
+ runTest(new SimpleSearchTest(), name, microKernel);
+ runTest(new SQL2SearchTest(), name, microKernel);
+ runTest(new DescendantSearchTest(), name, microKernel);
+ runTest(new SQL2DescendantSearchTest(), name, microKernel);
+ runTest(new CreateManyChildNodesTest(), name, microKernel);
+ runTest(new UpdateManyChildNodesTest(), name, microKernel);
+ runTest(new TransientManyChildNodesTest(), name, microKernel);
+
+ }
+
+ private void runTest(AbstractTest test, String name, String microKernel) {
+ if (microKernelPattern.matcher(microKernel).matches()
+ && testPattern.matcher(test.toString()).matches()) {
+
+ MicroKernel mk = createMicroKernel(microKernel);
+ try {
+ Repository repository= createRepository(mk);
+
+ // Run the test
+ DescriptiveStatistics statistics = runTest(test, repository);
+ if (statistics.getN() > 0) {
+ writeReport(test.toString(), name, microKernel, statistics);
+ }
+ } catch (RepositoryException re) {
+ re.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ disposeMicroKernel(mk);
+ }
+ }
+ }
+
+ private DescriptiveStatistics runTest(AbstractTest test,
+ Repository repository) throws Exception {
+ DescriptiveStatistics statistics = new DescriptiveStatistics();
+
+ test.setUp(repository, credentials);
+ try {
+ // Run a few iterations to warm up the system
+ if (warmup > 0) {
+ long warmupEnd = System.currentTimeMillis() + warmup;
+ while (System.currentTimeMillis() < warmupEnd) {
+ test.execute();
+ }
+ }
+
+ // Run test iterations, and capture the execution times
+ long runtimeEnd = System.currentTimeMillis() + runtime;
+ while (System.currentTimeMillis() < runtimeEnd) {
+ statistics.addValue(test.execute());
+ }
+ } finally {
+ test.tearDown();
+ }
+
+ return statistics;
+ }
+
+ private static void writeReport(String test, String name, String microKernel,
+ DescriptiveStatistics statistics) throws IOException {
+ File report = new File("target", test + "-" + microKernel + ".txt");
+
+ boolean needsPrefix = !report.exists();
+ PrintWriter writer = new PrintWriter(new FileWriterWithEncoding(report,
+ "UTF-8", true));
+ try {
+ if (needsPrefix) {
+ writer.format(
+ "# %-34.34s min 10%% 50%% 90%% max%n",
+ test);
+ }
+
+ writer.format("%-36.36s %6.0f %6.0f %6.0f %6.0f %6.0f%n",
+ name, statistics.getMin(), statistics.getPercentile(10.0),
+ statistics.getPercentile(50.0),
+ statistics.getPercentile(90.0), statistics.getMax());
+ } finally {
+ writer.close();
+ }
+ }
+
+ protected MicroKernel createMicroKernel(String microKernel) {
+
+ // TODO: depending on the microKernel string a particular repository
+ // with that MK must be returned
+
+ return new MicroKernelImpl("target/mk-tck-" + System.currentTimeMillis());
+
+ }
+
+ protected void disposeMicroKernel(MicroKernel kernel) {
+ ((MicroKernelImpl) kernel).dispose();
+ }
+
+ protected Repository createRepository(MicroKernel mk) {
+ return new Jcr(mk).createRepository();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/AbstractTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/AbstractTest.java
new file mode 100644
index 00000000000..cf70617b66a
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/AbstractTest.java
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import java.util.LinkedList;
+import java.util.List;
+
+import javax.jcr.Credentials;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+/**
+ * Abstract base class for individual performance benchmarks.
+ */
+public abstract class AbstractTest {
+
+ private Repository repository;
+
+ private Credentials credentials;
+
+ private List sessions;
+
+ private List threads;
+
+ private volatile boolean running;
+
+ protected static int getScale(int def) {
+ int scale = Integer.getInteger("scale", 0);
+ if (scale == 0) {
+ scale = def;
+ }
+ return scale;
+ }
+
+ /**
+ * Prepares this performance benchmark.
+ *
+ * @param repository the repository to use
+ * @param credentials credentials of a user with write access
+ * @throws Exception if the benchmark can not be prepared
+ */
+ public void setUp(Repository repository, Credentials credentials)
+ throws Exception {
+ this.repository = repository;
+ this.credentials = credentials;
+ this.sessions = new LinkedList();
+ this.threads = new LinkedList();
+
+ this.running = true;
+
+ beforeSuite();
+ }
+
+ /**
+ * Executes a single iteration of this test.
+ *
+ * @return number of milliseconds spent in this iteration
+ * @throws Exception if an error occurs
+ */
+ public long execute() throws Exception {
+ beforeTest();
+ try {
+ long start = System.currentTimeMillis();
+ // System.out.println("execute " + this);
+ runTest();
+ return System.currentTimeMillis() - start;
+ } finally {
+ afterTest();
+ }
+ }
+ /**
+ * Cleans up after this performance benchmark.
+ *
+ * @throws Exception if the benchmark can not be cleaned up
+ */
+ public void tearDown() throws Exception {
+ this.running = false;
+ for (Thread thread : threads) {
+ thread.join();
+ }
+
+ afterSuite();
+
+ for (Session session : sessions) {
+ if (session.isLive()) {
+ session.logout();
+ }
+ }
+
+ this.threads = null;
+ this.sessions = null;
+ this.credentials = null;
+ this.repository = null;
+ }
+
+ /**
+ * Run before any iterations of this test get executed. Subclasses can
+ * override this method to set up static test content.
+ *
+ * @throws Exception if an error occurs
+ */
+ protected void beforeSuite() throws Exception {
+ }
+
+ protected void beforeTest() throws Exception {
+ }
+
+ protected abstract void runTest() throws Exception;
+
+ protected void afterTest() throws Exception {
+ }
+
+ /**
+ * Run after all iterations of this test have been executed. Subclasses can
+ * override this method to clean up static test content.
+ *
+ * @throws Exception if an error occurs
+ */
+ protected void afterSuite() throws Exception {
+ }
+
+ protected void failOnRepositoryVersions(String... versions)
+ throws RepositoryException {
+ String repositoryVersion =
+ repository.getDescriptor(Repository.REP_VERSION_DESC);
+ for (String version : versions) {
+ if (repositoryVersion.startsWith(version)) {
+ throw new RepositoryException(
+ "Unable to run " + getClass().getName()
+ + " on repository version " + version);
+ }
+ }
+ }
+
+ protected Repository getRepository() {
+ return repository;
+ }
+
+ protected Credentials getCredentials() {
+ return credentials;
+ }
+
+ /**
+ * Returns a new reader session that will be automatically closed once
+ * all the iterations of this test have been executed.
+ *
+ * @return reader session
+ */
+ protected Session loginReader() {
+ try {
+ Session session = repository.login();
+ sessions.add(session);
+ return session;
+ } catch (RepositoryException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Returns a new writer session that will be automatically closed once
+ * all the iterations of this test have been executed.
+ *
+ * @return writer session
+ */
+ protected Session loginWriter() {
+ try {
+ Session session = repository.login(credentials);
+ sessions.add(session);
+ return session;
+ } catch (RepositoryException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Adds a background thread that repeatedly executes the given job
+ * until all the iterations of this test have been executed.
+ *
+ * @param job background job
+ */
+ protected void addBackgroundJob(final Runnable job) {
+ Thread thread = new Thread("Background job " + job) {
+ @Override
+ public void run() {
+ while (running) {
+ try {
+ // rate-limit, to avoid 100% cpu usage
+ Thread.sleep(10);
+ } catch (InterruptedException e) {
+ // ignore
+ }
+ job.run();
+ }
+ }
+ };
+ thread.setDaemon(true);
+ thread.setPriority(Thread.MIN_PRIORITY);
+ thread.start();
+ threads.add(thread);
+ }
+
+ public String toString() {
+ String name = getClass().getName();
+ return name.substring(name.lastIndexOf('.') + 1);
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ConcurrentReadTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ConcurrentReadTest.java
new file mode 100644
index 00000000000..1964fd0bac2
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ConcurrentReadTest.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import java.util.Random;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.SimpleCredentials;
+
+/**
+ * Test case that traverses 10k unstructured nodes (100x100) while 50 concurrent
+ * readers randomly access nodes from within this tree.
+ */
+public class ConcurrentReadTest extends AbstractTest {
+
+ protected static final int NODE_COUNT = 100;
+
+ private static final int READER_COUNT = getScale(20);
+
+ private Session session;
+
+ protected Node root;
+
+ @Override
+ public void beforeSuite() throws Exception {
+ session = getRepository().login(
+ new SimpleCredentials("admin", "admin".toCharArray()));
+ root = session.getRootNode().addNode("testroot", "nt:unstructured");
+ for (int i = 0; i < NODE_COUNT; i++) {
+ Node node = root.addNode("node" + i, "nt:unstructured");
+ for (int j = 0; j < NODE_COUNT; j++) {
+ node.addNode("node" + j, "nt:unstructured");
+ }
+ session.save();
+ }
+
+ for (int i = 0; i < READER_COUNT; i++) {
+ addBackgroundJob(new Reader());
+ }
+ }
+
+ class Reader implements Runnable {
+
+ private Session session;
+
+ private final Random random = new Random();
+
+ public void run() {
+
+ try {
+ session = getRepository().login(
+ new SimpleCredentials("admin", "admin".toCharArray()));
+ int i = random.nextInt(NODE_COUNT);
+ int j = random.nextInt(NODE_COUNT);
+ session.getRootNode()
+ .getNode("testroot/node" + i + "/node" + j);
+ } catch (RepositoryException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ Reader reader = new Reader();
+ for (int i = 0; i < 1000; i++) {
+ reader.run();
+ }
+ }
+
+ @Override
+ public void afterSuite() throws Exception {
+ for (int i = 0; i < NODE_COUNT; i++) {
+ root.getNode("node" + i).remove();
+ session.save();
+ }
+
+ root.remove();
+ session.save();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ConcurrentReadWriteTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ConcurrentReadWriteTest.java
new file mode 100644
index 00000000000..03b89af3510
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ConcurrentReadWriteTest.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import java.util.Random;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.SimpleCredentials;
+
+/**
+ * A {@link ConcurrentReadTest} with a single writer thread that continuously
+ * updates the nodes being accessed by the readers.
+ */
+public class ConcurrentReadWriteTest extends ConcurrentReadTest {
+
+ @Override
+ public void beforeSuite() throws Exception {
+ super.beforeSuite();
+
+ addBackgroundJob(new Writer());
+ }
+
+ class Writer implements Runnable {
+
+ private Session session;
+
+ private final Random random = new Random();
+
+ private long count;
+
+ public void run() {
+ try {
+ session = getRepository().login(
+ new SimpleCredentials("admin", "admin".toCharArray()));
+ int i = random.nextInt(NODE_COUNT);
+ int j = random.nextInt(NODE_COUNT);
+ Node node = session.getRootNode().getNode(
+ "testroot/node" + i + "/node" + j);
+ node.setProperty("count", count++);
+ session.save();
+ } catch (RepositoryException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/CreateManyChildNodesTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/CreateManyChildNodesTest.java
new file mode 100644
index 00000000000..5c0b367ce87
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/CreateManyChildNodesTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+/**
+ * Test for measuring the performance of creating a node with
+ * {@value #CHILD_COUNT} child nodes.
+ */
+public class CreateManyChildNodesTest extends AbstractTest {
+
+ private static final int CHILD_COUNT = 10 * 1000;
+
+ private Session session;
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = loginWriter();
+ }
+
+ @Override
+ public void beforeTest() throws RepositoryException {
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ Node node = session.getRootNode().addNode("testnode", "nt:unstructured");
+ for (int i = 0; i < CHILD_COUNT; i++) {
+ node.addNode("node" + i, "nt:unstructured");
+ }
+ session.save();
+ }
+
+ @Override
+ public void afterTest() throws RepositoryException {
+ session.getRootNode().getNode("testnode").remove();
+ session.save();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/DescendantSearchTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/DescendantSearchTest.java
new file mode 100644
index 00000000000..cd7cc1bc648
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/DescendantSearchTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.query.Query;
+import javax.jcr.query.QueryManager;
+
+/**
+ * Performance test to check performance of queries on sub-trees.
+ */
+public class DescendantSearchTest extends AbstractTest {
+
+ private static final int NODE_COUNT = 100;
+
+ private Session session;
+
+ private Node root;
+
+ protected Query createQuery(QueryManager manager, int i)
+ throws RepositoryException {
+ @SuppressWarnings("deprecation")
+ String xpath = Query.XPATH;
+ return manager.createQuery("/jcr:root/testroot//element(*,nt:base)[@testcount=" + i + "]", xpath);
+ }
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = getRepository().login(getCredentials());
+
+ root = session.getRootNode().addNode("testroot", "nt:unstructured");
+ for (int i = 0; i < NODE_COUNT; i++) {
+ Node node = root.addNode("node" + i, "nt:unstructured");
+ for (int j = 0; j < NODE_COUNT; j++) {
+ Node child = node.addNode("node" + j, "nt:unstructured");
+ child.setProperty("testcount", j);
+ }
+ session.save();
+ }
+
+ IndexManager.createPropertyIndex(session, "testcount");
+
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ QueryManager manager = session.getWorkspace().getQueryManager();
+ for (int i = 0; i < NODE_COUNT; i++) {
+ Query query = createQuery(manager, i);
+ NodeIterator iterator = query.execute().getNodes();
+ while (iterator.hasNext()) {
+ Node node = iterator.nextNode();
+ if (node.getProperty("testcount").getLong() != i) {
+ throw new Exception("Invalid test result: " + node.getPath());
+ }
+ }
+ }
+ }
+
+ @Override
+ public void afterSuite() throws RepositoryException {
+ for (int i = 0; i < NODE_COUNT; i++) {
+ root.getNode("node" + i).remove();
+ session.save();
+ }
+
+ root.remove();
+ session.save();
+ session.logout();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/IndexManager.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/IndexManager.java
new file mode 100644
index 00000000000..6843712c503
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/IndexManager.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+import org.apache.jackrabbit.oak.commons.PathUtils;
+import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
+
+/**
+ * A utility class to manage indexes in Oak.
+ */
+public class IndexManager {
+
+ /**
+ * The root node of the index definition (configuration) nodes.
+ */
+ public static final String INDEX_CONFIG_PATH = '/' + IndexConstants.INDEX_DEFINITIONS_NAME + "/indexes";
+
+ /**
+ * Creates a property index for the given property if such an index doesn't
+ * exist yet, and if the repository supports property indexes. The session
+ * may not have pending changes.
+ *
+ * @param session the session
+ * @param propertyName the property name
+ * @return true if the index was created or already existed
+ */
+ public static boolean createPropertyIndex(Session session,
+ String propertyName) throws RepositoryException {
+ return createIndex(session, "property@" + propertyName);
+ }
+
+ private static Node getIndexNode(Session session)
+ throws RepositoryException {
+ Node n = session.getRootNode();
+ for (String e : PathUtils.elements(INDEX_CONFIG_PATH)) {
+ if (!n.hasNode(e)) {
+ return null;
+ }
+ n = n.getNode(e);
+ }
+ return n;
+ }
+
+ private static boolean createIndex(Session session, String indexNodeName)
+ throws RepositoryException {
+ if (session.hasPendingChanges()) {
+ throw new RepositoryException("The session has pending changes");
+ }
+ Node indexes = getIndexNode(session);
+ if (indexes == null) {
+ return false;
+ }
+ if (!indexes.hasNode(indexNodeName)) {
+ indexes.addNode(indexNodeName);
+ session.save();
+ }
+ return true;
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/LoginLogoutTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/LoginLogoutTest.java
new file mode 100644
index 00000000000..a5aa2ba6a6d
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/LoginLogoutTest.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Credentials;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.SimpleCredentials;
+
+public class LoginLogoutTest extends AbstractTest {
+
+ @Override
+ public void setUp(Repository repository, Credentials credentials)
+ throws Exception {
+ super.setUp(repository,
+ new SimpleCredentials("admin", "admin".toCharArray()));
+ }
+
+ @Override
+ public void runTest() throws RepositoryException {
+ Repository repository = getRepository();
+ for (int i = 0; i < 1000; i++) {
+ Session session = repository.login(getCredentials());
+ try {
+ session.getRootNode();
+ } finally {
+ session.logout();
+ }
+ }
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/LoginTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/LoginTest.java
new file mode 100644
index 00000000000..5cfc1108519
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/LoginTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Credentials;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.SimpleCredentials;
+
+public class LoginTest extends AbstractTest {
+
+ private final Session[] sessions = new Session[1000];
+
+ @Override
+ public void setUp(Repository repository, Credentials credentials)
+ throws Exception {
+ super.setUp(repository,
+ new SimpleCredentials("admin", "admin".toCharArray()));
+ }
+
+ @Override
+ public void runTest() throws RepositoryException {
+ for (int i = 0; i < sessions.length; i++) {
+ sessions[i] = getRepository().login(getCredentials(), "default");
+ }
+
+ }
+
+ @Override
+ public void afterTest() throws RepositoryException {
+ for (Session session : sessions) {
+ session.logout();
+ }
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ReadPropertyTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ReadPropertyTest.java
new file mode 100644
index 00000000000..f3221c3beb0
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/ReadPropertyTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.Session;
+
+/**
+ * {@code ReadPropertyTest} implements a performance test, which reads
+ * three properties: one with a jcr prefix, one with the empty prefix and a
+ * third one, which does not exist.
+ */
+public class ReadPropertyTest extends AbstractTest {
+
+ private Session session;
+
+ private Node root;
+
+ @Override
+ protected void beforeSuite() throws Exception {
+ session = getRepository().login(getCredentials());
+ root = session.getRootNode().addNode(
+ getClass().getSimpleName(), "nt:unstructured");
+ root.setProperty("property", "value");
+ session.save();
+ }
+
+ @Override
+ protected void runTest() throws Exception {
+ for (int i = 0; i < 10000; i++) {
+ root.getProperty("jcr:primaryType");
+ root.getProperty("property");
+ root.hasProperty("does-not-exist");
+ }
+ }
+
+ @Override
+ protected void afterSuite() throws Exception {
+ root.remove();
+ session.save();
+ session.logout();
+ }
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SQL2DescendantSearchTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SQL2DescendantSearchTest.java
new file mode 100644
index 00000000000..6919a81198c
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SQL2DescendantSearchTest.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.query.Query;
+import javax.jcr.query.QueryManager;
+
+/**
+ * SQL-2 version of the sub-tree performance test.
+ */
+public class SQL2DescendantSearchTest extends DescendantSearchTest {
+
+ @Override
+ protected Query createQuery(QueryManager manager, int i)
+ throws RepositoryException {
+ return manager.createQuery(
+ "SELECT * FROM [nt:base] AS n WHERE ISDESCENDANTNODE(n, '/testroot') AND testcount=" + i,
+ "JCR-SQL2");
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SQL2SearchTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SQL2SearchTest.java
new file mode 100644
index 00000000000..1fc4749a079
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SQL2SearchTest.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.query.Query;
+import javax.jcr.query.QueryManager;
+
+public class SQL2SearchTest extends SimpleSearchTest {
+
+ @Override
+ protected Query createQuery(QueryManager manager, int i)
+ throws RepositoryException {
+ return manager.createQuery(
+ "SELECT * FROM [nt:base] WHERE testcount=" + i,
+ "JCR-SQL2");
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SetPropertyTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SetPropertyTest.java
new file mode 100644
index 00000000000..dff5fef800b
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SetPropertyTest.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+/**
+ * Test for measuring the performance of setting a single property and
+ * saving the change.
+ */
+public class SetPropertyTest extends AbstractTest {
+
+ private Session session;
+
+ private Node node;
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = getRepository().login(getCredentials());
+ node = session.getRootNode().addNode("testnode", "nt:unstructured");
+ session.save();
+ }
+
+ @Override
+ public void beforeTest() throws RepositoryException {
+ node.setProperty("count", -1);
+ session.save();
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ for (int i = 0; i < 1000; i++) {
+ node.setProperty("count", i);
+ session.save();
+ }
+ }
+
+ @Override
+ public void afterTest() throws RepositoryException {
+ }
+
+ @Override
+ public void afterSuite() throws RepositoryException {
+ session.getRootNode().getNode("testnode").remove();
+ session.save();
+ session.logout();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SimpleSearchTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SimpleSearchTest.java
new file mode 100644
index 00000000000..18edc0029aa
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SimpleSearchTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.query.Query;
+import javax.jcr.query.QueryManager;
+
+/**
+ * Run a simple query of the form "//*[@testcount=...]".
+ */
+public class SimpleSearchTest extends AbstractTest {
+
+ private static final int NODE_COUNT = 100;
+
+ private Session session;
+
+ private Node root;
+
+ protected Query createQuery(QueryManager manager, int i)
+ throws RepositoryException {
+ @SuppressWarnings("deprecation")
+ String xpath = Query.XPATH;
+ return manager.createQuery("//*[@testcount=" + i + "]", xpath);
+ }
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = getRepository().login(getCredentials());
+
+ root = session.getRootNode().addNode("testroot", "nt:unstructured");
+ for (int i = 0; i < NODE_COUNT; i++) {
+ Node node = root.addNode("node" + i, "nt:unstructured");
+ for (int j = 0; j < NODE_COUNT; j++) {
+ Node child = node.addNode("node" + j, "nt:unstructured");
+ child.setProperty("testcount", j);
+ }
+ session.save();
+ }
+
+ IndexManager.createPropertyIndex(session, "testcount");
+
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ QueryManager manager = session.getWorkspace().getQueryManager();
+ for (int i = 0; i < NODE_COUNT; i++) {
+ Query query = createQuery(manager, i);
+ NodeIterator iterator = query.execute().getNodes();
+ while (iterator.hasNext()) {
+ Node node = iterator.nextNode();
+ if (node.getProperty("testcount").getLong() != i) {
+ throw new Exception("Invalid test result: " + node.getPath());
+ }
+ }
+ }
+ }
+
+ @Override
+ public void afterSuite() throws RepositoryException {
+ for (int i = 0; i < NODE_COUNT; i++) {
+ root.getNode("node" + i).remove();
+ session.save();
+ }
+
+ root.remove();
+ session.save();
+ session.logout();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SmallFileReadTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SmallFileReadTest.java
new file mode 100644
index 00000000000..947cfd05fcf
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SmallFileReadTest.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import java.io.InputStream;
+import java.util.Calendar;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.output.NullOutputStream;
+
+public class SmallFileReadTest extends AbstractTest {
+
+ private static final int FILE_COUNT = 1000;
+
+ private static final int FILE_SIZE = 10;
+
+ private Session session;
+
+ private Node root;
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = getRepository().login(getCredentials());
+
+ root = session.getRootNode().addNode(
+ "SmallFileReadTest", "nt:folder");
+ for (int i = 0; i < FILE_COUNT; i++) {
+ Node file = root.addNode("file" + i, "nt:file");
+ Node content = file.addNode("jcr:content", "nt:resource");
+ content.setProperty("jcr:mimeType", "application/octet-stream");
+ content.setProperty("jcr:lastModified", Calendar.getInstance());
+ content.setProperty(
+ "jcr:data", new TestInputStream(FILE_SIZE * 1024));
+ }
+ session.save();
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ for (int i = 0; i < FILE_COUNT; i++) {
+ Node file = root.getNode("file" + i);
+ Node content = file.getNode("jcr:content");
+ InputStream stream = content.getProperty("jcr:data").getStream();
+ try {
+ IOUtils.copy(stream, new NullOutputStream());
+ } finally {
+ stream.close();
+ }
+ }
+ }
+
+ @Override
+ public void afterSuite() throws RepositoryException {
+ root.remove();
+ session.save();
+ session.logout();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SmallFileWriteTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SmallFileWriteTest.java
new file mode 100644
index 00000000000..73d1770bba0
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/SmallFileWriteTest.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import java.util.Calendar;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+public class SmallFileWriteTest extends AbstractTest {
+
+ private static final int FILE_COUNT = 100;
+
+ private static final int FILE_SIZE = 10;
+
+ private Session session;
+
+ private Node root;
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = loginWriter();
+ }
+
+ @Override
+ public void beforeTest() throws RepositoryException {
+ root = session.getRootNode().addNode("SmallFileWriteTest", "nt:folder");
+ session.save();
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ for (int i = 0; i < FILE_COUNT; i++) {
+ Node file = root.addNode("file" + i, "nt:file");
+ Node content = file.addNode("jcr:content", "nt:resource");
+ content.setProperty("jcr:mimeType", "application/octet-stream");
+ content.setProperty("jcr:lastModified", Calendar.getInstance());
+ content.setProperty(
+ "jcr:data", new TestInputStream(FILE_SIZE * 1024));
+ }
+ session.save();
+ }
+
+ @Override
+ public void afterTest() throws RepositoryException {
+ root.remove();
+ session.save();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/TestInputStream.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/TestInputStream.java
new file mode 100644
index 00000000000..18957c0b08b
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/TestInputStream.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import java.io.InputStream;
+import java.util.Random;
+
+/**
+ * An input stream that returns a given number of dummy data. The returned
+ * data is designed to be non-compressible to prevent possible compression
+ * mechanisms from affecting performance measurements.
+ */
+class TestInputStream extends InputStream {
+
+ private final int n;
+
+ private int i;
+
+ /**
+ * Source of the random stream of bytes. No fixed seed is used to
+ * prevent a solution like the Jackrabbit data store from using just
+ * a single storage location for multiple streams.
+ */
+ private final Random random = new Random();
+
+ public TestInputStream(int length) {
+ n = length;
+ i = 0;
+ }
+
+ @Override
+ public int read() {
+ if (i < n) {
+ i++;
+ byte[] b = new byte[1];
+ random.nextBytes(b);
+ return b[0];
+ } else {
+ return -1;
+ }
+ }
+
+ @Override
+ public int read(byte[] b, int off, int len) {
+ if (i < n) {
+ byte[] data = new byte[Math.min(len, n - i)];
+ random.nextBytes(data);
+ System.arraycopy(data, 0, b, off, data.length);
+ i += data.length;
+ return data.length;
+ } else {
+ return -1;
+ }
+ }
+
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/TransientManyChildNodesTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/TransientManyChildNodesTest.java
new file mode 100644
index 00000000000..ae327efb2a6
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/TransientManyChildNodesTest.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+/**
+ * Test for measuring the performance of {@value #ITERATIONS} iterations of
+ * transiently adding and removing a child node to a node that already has
+ * {@value #CHILD_COUNT} existing child nodes.
+ */
+public class TransientManyChildNodesTest extends AbstractTest {
+
+ private static final int CHILD_COUNT = 10 * 1000;
+
+ private static final int ITERATIONS = 10;
+
+ private Session session;
+
+ private Node node;
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = getRepository().login(getCredentials());
+ node = session.getRootNode().addNode("testnode", "nt:unstructured");
+ for (int i = 0; i < CHILD_COUNT; i++) {
+ node.addNode("node" + i, "nt:unstructured");
+ }
+ }
+
+ @Override
+ public void beforeTest() throws RepositoryException {
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ for (int i = 0; i < ITERATIONS; i++) {
+ node.addNode("onemore", "nt:unstructured").remove();
+ }
+ }
+
+ @Override
+ public void afterTest() throws RepositoryException {
+ }
+
+ @Override
+ public void afterSuite() throws RepositoryException {
+ session.getRootNode().getNode("testnode").remove();
+ session.save();
+ session.logout();
+ }
+
+}
diff --git a/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/UpdateManyChildNodesTest.java b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/UpdateManyChildNodesTest.java
new file mode 100644
index 00000000000..a306c52e3e4
--- /dev/null
+++ b/oak-bench/base/src/main/java/org/apache/jackrabbit/oak/performance/UpdateManyChildNodesTest.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+/**
+ * Test for measuring the performance of adding one extra child node to
+ * node with {@value #CHILD_COUNT} existing child nodes.
+ */
+public class UpdateManyChildNodesTest extends AbstractTest {
+
+ private static final int CHILD_COUNT = 10 * 1000;
+
+ private Session session;
+
+ private Node node;
+
+ @Override
+ public void beforeSuite() throws RepositoryException {
+ session = getRepository().login(getCredentials());
+ node = session.getRootNode().addNode("testnode", "nt:unstructured");
+ for (int i = 0; i < CHILD_COUNT; i++) {
+ node.addNode("node" + i, "nt:unstructured");
+ }
+ }
+
+ @Override
+ public void beforeTest() throws RepositoryException {
+ }
+
+ @Override
+ public void runTest() throws Exception {
+ node.addNode("onemore", "nt:unstructured");
+ session.save();
+ }
+
+ @Override
+ public void afterTest() throws RepositoryException {
+ node.getNode("onemore").remove();
+ session.save();
+ }
+
+ @Override
+ public void afterSuite() throws RepositoryException {
+ session.getRootNode().getNode("testnode").remove();
+ session.save();
+ session.logout();
+ }
+
+}
diff --git a/oak-bench/latest/pom.xml b/oak-bench/latest/pom.xml
new file mode 100644
index 00000000000..75fb19d4490
--- /dev/null
+++ b/oak-bench/latest/pom.xml
@@ -0,0 +1,58 @@
+
+
+
+
+
+ 4.0.0
+
+
+ org.apache.jackrabbit
+ oak-bench-parent
+ 0.6-SNAPSHOT
+ ../parent/pom.xml
+
+
+ oak-bench-latest
+ Oak Performance Test
+
+
+ true
+
+
+
+
+ org.apache.jackrabbit
+ oak-bench-base
+ ${project.version}
+ test
+
+
+ javax.jcr
+ jcr
+ 2.0
+ test
+
+
+ org.apache.jackrabbit
+ oak-jcr
+ ${project.version}
+ test
+
+
+
+
diff --git a/oak-bench/latest/src/test/java/org/apache/jackrabbit/oak/performance/PerformanceBenchmark.java b/oak-bench/latest/src/test/java/org/apache/jackrabbit/oak/performance/PerformanceBenchmark.java
new file mode 100644
index 00000000000..383f8eb2e2f
--- /dev/null
+++ b/oak-bench/latest/src/test/java/org/apache/jackrabbit/oak/performance/PerformanceBenchmark.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.performance;
+
+import org.junit.Test;
+
+public class PerformanceBenchmark extends AbstractPerformanceTest {
+
+ @Test
+ public void testPerformance() throws Exception {
+ testPerformance("0.3", "default");
+ // testPerformance("0.3", "other_mk");
+ }
+}
diff --git a/oak-bench/parent/pom.xml b/oak-bench/parent/pom.xml
new file mode 100644
index 00000000000..9d1c2a0cf29
--- /dev/null
+++ b/oak-bench/parent/pom.xml
@@ -0,0 +1,119 @@
+
+
+
+
+
+ 4.0.0
+
+
+
+
+
+
+ org.apache.jackrabbit
+ oak-parent
+ 0.6-SNAPSHOT
+ ../../oak-parent/pom.xml
+
+
+ oak-bench-parent
+ Oak Performance Test Parent
+ pom
+
+
+ \d\.\d
+ .*
+ 0
+
+
+
+
+
+
+ maven-surefire-plugin
+
+ -Xms256m -Xmx512m
+ false
+
+
+ repo
+ ${repo}
+
+
+ only
+ ${only}
+
+
+ scale
+ ${scale}
+
+
+
+
+
+
+
+
+
+
+ profiler
+
+
+ agentlib
+
+
+
+
+
+
+ maven-surefire-plugin
+
+ -Xmx512m -XX:MaxPermSize=512m -agentlib:${agentlib}
+
+
+
+
+
+
+
+ benchmark
+
+
+
+ maven-failsafe-plugin
+ 2.12
+
+
+
+ integration-test
+ verify
+
+
+
+ **/*Benchmark.java
+
+
+
+
+
+
+
+
+
+
+
diff --git a/oak-bench/plot.sh b/oak-bench/plot.sh
new file mode 100644
index 00000000000..d69187be503
--- /dev/null
+++ b/oak-bench/plot.sh
@@ -0,0 +1,61 @@
+#!/bin/sh
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is an example Gnuplot script for plotting the performance results
+# produced by the Jackrabbit performance test suite. Before you run this
+# script you need to preprocess the individual performance reports.
+
+cat <target/report.html
+
+
+ Jackrabbit performance
+
+
+
Jackrabbit performance
+
+HTML
+
+for dat in */target/*.txt; do
+ cat "$dat" >>target/`basename "$dat"`
+done
+
+for dat in target/*.txt; do
+ name=`basename "$dat" .txt`
+ rows=`grep -v "#" "$dat" | wc -l`
+ gnuplot <>target/report.html
+
+HTML
+done
+
+cat <>target/report.html
+
+
+
+HTML
+
+echo file://`pwd`/target/report.html
+
diff --git a/oak-bench/pom.xml b/oak-bench/pom.xml
index d40d53f39d2..2d4b5e0fe4b 100644
--- a/oak-bench/pom.xml
+++ b/oak-bench/pom.xml
@@ -17,24 +17,32 @@
limitations under the License.
-->
-
+4.0.0
+
+
+
+
org.apache.jackrabbit
- oak-parent
- 0.1-SNAPSHOT
- ../oak-parent/pom.xml
+ oak-bench-parent
+ 0.6-SNAPSHOT
+ parent/pom.xmloak-bench
- Oak Benchmark
+ Oak Performance Benchmark
+ pomtrue
+
+ parent
+ base
+ latest
+
+
diff --git a/oak-commons/pom.xml b/oak-commons/pom.xml
new file mode 100644
index 00000000000..b371616edf2
--- /dev/null
+++ b/oak-commons/pom.xml
@@ -0,0 +1,69 @@
+
+
+
+
+
+ 4.0.0
+
+
+ org.apache.jackrabbit
+ oak-parent
+ 0.6-SNAPSHOT
+ ../oak-parent/pom.xml
+
+
+ oak-commons
+ Oak Commons
+ bundle
+
+
+
+
+ org.apache.felix
+ maven-bundle-plugin
+
+
+
+
+
+
+
+ org.slf4j
+ slf4j-api
+ 1.6.4
+
+
+ com.google.code.findbugs
+ jsr305
+ 2.0.0
+
+
+
+
+ junit
+ junit
+ test
+
+
+ ch.qos.logback
+ logback-classic
+ 1.0.1
+ test
+
+
+
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/PathUtils.java b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/PathUtils.java
similarity index 73%
rename from oak-core/src/main/java/org/apache/jackrabbit/mk/util/PathUtils.java
rename to oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/PathUtils.java
index cc5de50ba17..083deaea0e5 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/PathUtils.java
+++ b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/PathUtils.java
@@ -14,14 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.jackrabbit.mk.util;
+package org.apache.jackrabbit.oak.commons;
-import java.util.ArrayList;
import java.util.Iterator;
import java.util.NoSuchElementException;
+import javax.annotation.Nonnull;
+
/**
- * Utility methods to parse a JCR path.
+ * Utility methods to parse a path.
*
* Each method validates the input, except if the system property
* {packageName}.SKIP_VALIDATION is set, in which case only minimal validation
@@ -30,15 +31,9 @@
*/
public class PathUtils {
- /**
- * Controls whether paths passed into methods of this class are validated or
- * not. By default, paths are validated for each method call, which
- * potentially slows down processing. To disable validation, set the system
- * property org.apache.jackrabbit.mk.util.PathUtils.SKIP_VALIDATION.
- */
- private static final boolean SKIP_VALIDATION = Boolean.getBoolean(PathUtils.class.getName() + ".SKIP_VALIDATION");
-
- private static final String[] EMPTY_ARRAY = new String[0];
+ private PathUtils() {
+ // utility class
+ }
/**
* Whether the path is the root path ("/").
@@ -47,7 +42,7 @@ public class PathUtils {
* @return whether this is the root
*/
public static boolean denotesRoot(String path) {
- assertValid(path);
+ assert isValid(path);
return denotesRootPath(path);
}
@@ -63,13 +58,13 @@ private static boolean denotesRootPath(String path) {
* @return true if it starts with a slash
*/
public static boolean isAbsolute(String path) {
- assertValid(path);
+ assert isValid(path);
return isAbsolutePath(path);
}
private static boolean isAbsolutePath(String path) {
- return path.length() > 0 && path.charAt(0) == '/';
+ return !path.isEmpty() && path.charAt(0) == '/';
}
/**
@@ -79,6 +74,7 @@ private static boolean isAbsolutePath(String path) {
* @param path the path
* @return the parent path
*/
+ @Nonnull
public static String getParentPath(String path) {
return getAncestorPath(path, 1);
}
@@ -92,10 +88,11 @@ public static String getParentPath(String path) {
* @param path the path
* @return the ancestor path
*/
+ @Nonnull
public static String getAncestorPath(String path, int nth) {
- assertValid(path);
+ assert isValid(path);
- if (path.length() == 0 || denotesRootPath(path)
+ if (path.isEmpty() || denotesRootPath(path)
|| nth <= 0) {
return path;
}
@@ -123,10 +120,11 @@ public static String getAncestorPath(String path, int nth) {
* @param path the complete path
* @return the last element
*/
+ @Nonnull
public static String getName(String path) {
- assertValid(path);
+ assert isValid(path);
- if (path.length() == 0 || denotesRootPath(path)) {
+ if (path.isEmpty() || denotesRootPath(path)) {
return "";
}
int end = path.length() - 1;
@@ -145,7 +143,7 @@ public static String getName(String path) {
* @return the number of elements
*/
public static int getDepth(String path) {
- assertValid(path);
+ assert isValid(path);
int count = 1, i = 0;
if (isAbsolutePath(path)) {
@@ -164,93 +162,56 @@ public static int getDepth(String path) {
}
/**
- * Split a path into elements. The root path ("/") and the empty path ("")
- * is zero elements.
- *
- * @param path the path
- * @return the path elements
- */
- public static String[] split(String path) {
- assertValid(path);
-
- if (path.length() == 0) {
- return EMPTY_ARRAY;
- } else if (isAbsolutePath(path)) {
- if (path.length() == 1) {
- return EMPTY_ARRAY;
- }
- path = path.substring(1);
- }
- ArrayList list = new ArrayList();
- while (true) {
- int index = path.indexOf('/');
- if (index < 0) {
- list.add(path);
- break;
- }
- String s = path.substring(0, index);
- list.add(s);
- path = path.substring(index + 1);
- }
- String[] array = new String[list.size()];
- list.toArray(array);
- return array;
- }
-
- /**
- * Split a path into elements. The root path ("/") and the empty path ("")
- * is zero elements.
+ * Returns an {@code Iterable} for the path elements. The root path ("/") and the
+ * empty path ("") have zero elements.
*
* @param path the path
* @return an Iterable for the path elements
*/
+ @Nonnull
public static Iterable elements(final String path) {
- assertValid(path);
+ assert isValid(path);
final Iterator it = new Iterator() {
int pos = PathUtils.isAbsolute(path) ? 1 : 0;
String next;
+ @Override
public boolean hasNext() {
if (next == null) {
if (pos >= path.length()) {
return false;
}
- else {
- int i = path.indexOf('/', pos);
- if (i < 0) {
- next = path.substring(pos);
- pos = path.length();
- }
- else {
- next = path.substring(pos, i);
- pos = i + 1;
- }
- return true;
+ int i = path.indexOf('/', pos);
+ if (i < 0) {
+ next = path.substring(pos);
+ pos = path.length();
+ } else {
+ next = path.substring(pos, i);
+ pos = i + 1;
}
}
- else {
- return true;
- }
+ return true;
}
+ @Override
public String next() {
if (hasNext()) {
String next = this.next;
this.next = null;
return next;
}
- else {
- throw new NoSuchElementException();
- }
+ throw new NoSuchElementException();
}
+ @Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
};
return new Iterable() {
+ @Override
public Iterator iterator() {
return it;
}
@@ -264,20 +225,20 @@ public Iterator iterator() {
* @param relativePaths the relative path elements to add
* @return the concatenated path
*/
+ @Nonnull
public static String concat(String parentPath, String... relativePaths) {
- assertValid(parentPath);
+ assert isValid(parentPath);
int parentLen = parentPath.length();
int size = relativePaths.length;
StringBuilder buff = new StringBuilder(parentLen + size * 5);
buff.append(parentPath);
boolean needSlash = parentLen > 0 && !denotesRootPath(parentPath);
- for (int i = 0; i < size; i++) {
- String s = relativePaths[i];
- assertValid(s);
+ for (String s : relativePaths) {
+ assert isValid(s);
if (isAbsolutePath(s)) {
throw new IllegalArgumentException("Cannot append absolute path " + s);
}
- if (s.length() > 0) {
+ if (!s.isEmpty()) {
if (needSlash) {
buff.append('/');
}
@@ -295,13 +256,14 @@ public static String concat(String parentPath, String... relativePaths) {
* @param subPath the subPath path to add
* @return the concatenated path
*/
+ @Nonnull
public static String concat(String parentPath, String subPath) {
- assertValid(parentPath);
- assertValid(subPath);
+ assert isValid(parentPath);
+ assert isValid(subPath);
// special cases
- if (parentPath.length() == 0) {
+ if (parentPath.isEmpty()) {
return subPath;
- } else if (subPath.length() == 0) {
+ } else if (subPath.isEmpty()) {
return parentPath;
} else if (isAbsolutePath(subPath)) {
throw new IllegalArgumentException("Cannot append absolute path " + subPath);
@@ -322,12 +284,16 @@ public static String concat(String parentPath, String subPath) {
* @return true if the path is an offspring of the ancestor
*/
public static boolean isAncestor(String ancestor, String path) {
- assertValid(ancestor);
- assertValid(path);
- if (ancestor.length() == 0 || path.length() == 0) {
+ assert isValid(ancestor);
+ assert isValid(path);
+ if (ancestor.isEmpty() || path.isEmpty()) {
return false;
}
- if (!denotesRoot(ancestor)) {
+ if (denotesRoot(ancestor)) {
+ if (denotesRoot(path)) {
+ return false;
+ }
+ } else {
ancestor += "/";
}
return path.startsWith(ancestor);
@@ -335,16 +301,17 @@ public static boolean isAncestor(String ancestor, String path) {
/**
* Relativize a path wrt. a parent path such that
- * relativize(parentPath, concat(parentPath, path)) == paths
+ * {@code relativize(parentPath, concat(parentPath, path)) == paths}
* holds.
*
* @param parentPath parent pth
* @param path path to relativize
* @return relativized path
*/
+ @Nonnull
public static String relativize(String parentPath, String path) {
- assertValid(parentPath);
- assertValid(path);
+ assert isValid(parentPath);
+ assert isValid(path);
if (parentPath.equals(path)) {
return "";
@@ -369,7 +336,7 @@ public static String relativize(String parentPath, String path) {
* if not found
*/
public static int getNextSlash(String path, int index) {
- assertValid(path);
+ assert isValid(path);
return path.indexOf('/', index);
}
@@ -384,7 +351,7 @@ public static int getNextSlash(String path, int index) {
* @param path the path
*/
public static void validate(String path) {
- if (path.length() == 0 || denotesRootPath(path)) {
+ if (path.isEmpty() || denotesRootPath(path)) {
return;
} else if (path.charAt(path.length() - 1) == '/') {
throw new IllegalArgumentException("Path may not end with '/': " + path);
@@ -401,12 +368,32 @@ public static void validate(String path) {
}
}
- //------------------------------------------< private >---
-
- private static void assertValid(String path) {
- if (!SKIP_VALIDATION) {
- validate(path);
+ /**
+ * Check if the path is valid. A valid path is absolute (starts with a '/')
+ * or relative (doesn't start with '/'), and contains none or more elements.
+ * A path may not end with '/', except for the root path. Elements itself must
+ * be at least one character long.
+ *
+ * @param path the path
+ * @return {@code true} iff the path is valid.
+ */
+ public static boolean isValid(String path) {
+ if (path.isEmpty() || denotesRootPath(path)) {
+ return true;
+ } else if (path.charAt(path.length() - 1) == '/') {
+ return false;
+ }
+ char last = 0;
+ for (int index = 0, len = path.length(); index < len; index++) {
+ char c = path.charAt(index);
+ if (c == '/') {
+ if (last == '/') {
+ return false;
+ }
+ }
+ last = c;
}
+ return true;
}
}
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/mk/util/PathTest.java b/oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/PathTest.java
similarity index 65%
rename from oak-core/src/test/java/org/apache/jackrabbit/mk/util/PathTest.java
rename to oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/PathTest.java
index 486d13e87ef..791b3b9f85b 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/mk/util/PathTest.java
+++ b/oak-commons/src/test/java/org/apache/jackrabbit/oak/commons/PathTest.java
@@ -14,27 +14,42 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.jackrabbit.mk.util;
+package org.apache.jackrabbit.oak.commons;
+import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
-import java.util.Iterator;
-
+/**
+ * Test the PathUtils class.
+ */
public class PathTest extends TestCase {
+ static boolean assertsEnabled;
+
+ static {
+ assert assertsEnabled = true;
+ }
public void test() {
try {
PathUtils.getParentPath("invalid/path/");
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.getName("invalid/path/");
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
@@ -42,29 +57,59 @@ public void test() {
test("x", "y");
}
- private void test(String parent, String child) {
+ private static int getElementCount(String path) {
+ int count = 0;
+ for (String p : PathUtils.elements(path)) {
+ assertFalse(PathUtils.isAbsolute(p));
+ count++;
+ }
+ return count;
+ }
+
+ private static String getElement(String path, int index) {
+ int count = 0;
+ for (String p : PathUtils.elements(path)) {
+ if (index == count++) {
+ return p;
+ }
+ }
+ fail();
+ return "";
+ }
+
+ private static void test(String parent, String child) {
// split
- assertEquals(0, PathUtils.split("").length);
- assertEquals(0, PathUtils.split("/").length);
- assertEquals(1, PathUtils.split(parent).length);
- assertEquals(2, PathUtils.split(parent + "/" + child).length);
- assertEquals(1, PathUtils.split("/" + parent).length);
- assertEquals(2, PathUtils.split("/" + parent + "/" + child).length);
- assertEquals(3, PathUtils.split("/" + parent + "/" + child + "/" + child).length);
- assertEquals(parent, PathUtils.split(parent)[0]);
- assertEquals(parent, PathUtils.split(parent + "/" + child)[0]);
- assertEquals(child, PathUtils.split(parent + "/" + child)[1]);
- assertEquals(child, PathUtils.split(parent + "/" + child + "/" + child + "1")[1]);
- assertEquals(child + "1", PathUtils.split(parent + "/" + child + "/" + child + "1")[2]);
+ assertEquals(0, getElementCount(""));
+ assertEquals(0, getElementCount("/"));
+ assertEquals(1, getElementCount(parent));
+ assertEquals(2, getElementCount(parent + "/" + child));
+ assertEquals(1, getElementCount("/" + parent));
+ assertEquals(2, getElementCount("/" + parent + "/" + child));
+ assertEquals(3, getElementCount("/" + parent + "/" + child + "/" + child));
+ assertEquals(parent, getElement(parent, 0));
+ assertEquals(parent, getElement(parent + "/" + child, 0));
+ assertEquals(child, getElement(parent + "/" + child, 1));
+ assertEquals(child, getElement(parent + "/" + child + "/" + child + "1", 1));
+ assertEquals(child + "1", getElement(parent + "/" + child + "/" + child + "1", 2));
// concat
assertEquals(parent + "/" + child, PathUtils.concat(parent, child));
try {
assertEquals(parent + "/" + child, PathUtils.concat(parent + "/", "/" + child));
- fail();
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
} catch (IllegalArgumentException e) {
- // expected
+ if (assertsEnabled) {
+ throw e;
+ }
+ } catch (AssertionError e) {
+ if (!assertsEnabled) {
+ throw e;
+ }
}
try {
assertEquals(parent + "/" + child, PathUtils.concat(parent, "/" + child));
@@ -90,13 +135,21 @@ private void test(String parent, String child) {
}
try {
PathUtils.concat("", "//");
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.concat("/", "/");
- fail();
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
} catch (IllegalArgumentException e) {
// expected
}
@@ -143,6 +196,9 @@ private void test(String parent, String child) {
assertEquals(false, PathUtils.isAbsolute(parent + "/" + child));
// isAncestor
+ assertFalse(PathUtils.isAncestor("/", "/"));
+ assertFalse(PathUtils.isAncestor("/" + parent, "/" + parent));
+ assertFalse(PathUtils.isAncestor(parent, parent));
assertTrue(PathUtils.isAncestor("/", "/" + parent));
assertTrue(PathUtils.isAncestor(parent, parent + "/" + child));
assertFalse(PathUtils.isAncestor("/", parent + "/" + child));
@@ -252,102 +308,145 @@ public void testValidateEverything() {
String invalid = "/test/test//test/test";
try {
PathUtils.denotesRoot(invalid);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.concat(invalid, "x");
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.concat("/x", invalid);
- fail();
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
} catch (IllegalArgumentException e) {
- // expected
+ if (assertsEnabled) {
+ throw e;
+ }
+ } catch (AssertionError e) {
+ if (!assertsEnabled) {
+ throw e;
+ }
}
try {
PathUtils.concat("/x", "y", invalid);
- fail();
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
} catch (IllegalArgumentException e) {
- // expected
+ if (assertsEnabled) {
+ throw e;
+ }
+ } catch (AssertionError e) {
+ if (!assertsEnabled) {
+ throw e;
+ }
}
try {
PathUtils.concat(invalid, "y", "z");
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.getDepth(invalid);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.getName(invalid);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.getNextSlash(invalid, 0);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.getParentPath(invalid);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.isAbsolute(invalid);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.relativize(invalid, invalid);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
try {
PathUtils.relativize("/test", invalid);
- fail();
- } catch (IllegalArgumentException e) {
- // expected
- }
- try {
- PathUtils.split(invalid);
- fail();
- } catch (IllegalArgumentException e) {
+ if (assertsEnabled) {
+ fail();
+ }
+ } catch (AssertionFailedError e) {
+ throw e;
+ } catch (AssertionError e) {
// expected
}
}
public void testPathElements() {
- String[] paths = new String[]{"", "/", "/a", "a", "/abc/def/ghj", "abc/def/ghj"};
- for (String path : paths) {
- String[] elements = PathUtils.split(path);
- Iterator it = PathUtils.elements(path).iterator();
- for (String element : elements) {
- assertTrue(it.hasNext());
- assertEquals(element, it.next());
- }
- assertFalse(it.hasNext());
- }
-
String[] invalidPaths = new String[]{"//", "/a/", "a/", "/a//", "a//b"};
for (String path: invalidPaths) {
try {
PathUtils.elements(path);
fail();
- } catch (IllegalArgumentException e) {
+ } catch (AssertionError e) {
// expected
}
}
diff --git a/oak-core/README.md b/oak-core/README.md
new file mode 100644
index 00000000000..f94fdc92c3e
--- /dev/null
+++ b/oak-core/README.md
@@ -0,0 +1,120 @@
+Oak Core
+========
+
+Oak API
+-------
+
+The API for accessing core Oak functionality is located in the
+`org.apache.jackrabbit.oak.api` package and consists of the following
+key interfaces:
+
+ * ContentRepository
+ * ContentSession
+ * Root / Tree
+
+The `ContentRepository` interface represents an entire Oak content repository.
+The repository may local or remote, or a cluster of any size. These deployment
+details are all hidden behind this interface.
+
+Starting and stopping `ContentRepository` instances is the responsibility of
+each particular deployment and not covered by these interfaces. Repository
+clients should use a deployment-specific mechanism (JNDI, OSGi service, etc.)
+to acquire references to `ContentRepository` instances.
+
+All content in the repository is accessed through authenticated sessions
+acquired through the `ContentRepository.login()` method. The method takes
+explicit access credentials and other login details and, assuming the
+credentials are valid, returns a `ContentSession` instance that encapsulates
+this information. Session instances are `Closeable` and need to be closed
+to release associated resources once no longer used. The recommended access
+pattern is:
+
+ ContentRepository repository = ...;
+ ContentSession session = repository.login(...);
+ try {
+ ...; // Use the session
+ } finally {
+ session.close();
+ }
+
+All `ContentRepository` and `ContentSession` instances are thread-safe.
+
+The authenticated `ContentSession` gives you properly authorized access to
+the hierarchical content tree inside the repository through instances of the
+`Root` and `Tree` interfaces. The `getCurrentRoot()` method returns a
+snapshot of the current state of the content tree:
+
+ ContentSession session = ...;
+ Root root = session.getCurrentRoot();
+ Tree tree = root.getTree("/");
+
+The returned `Tree` instance belongs to the client and its state is only
+modified in response to method calls made by the client. `Tree` instances
+are *not* thread-safe for write access, so writing clients need to ensure
+that they are not accessed concurrently from multiple threads. `Tree`
+instances *are* however thread-safe for read access, so implementations
+need to ensure that all reading clients see a coherent state.
+
+Content trees are recursive data structures that consist of named properties
+and subtrees that share the same namespace, but are accessed through separate
+methods like outlined below:
+
+ Tree tree = ...;
+ for (PropertyState property : tree.getProperties()) {
+ ...;
+ }
+ for (Tree subtree : tree.getChildren()) {
+ ...;
+ }
+
+The repository content snapshot exposed by a `Tree` instance may become
+invalid over time due to garbage collection of old content, at which point
+an outdated snapshot will start throwing `IllegalStateExceptions` to
+indicate that the snapshot is no longer available. To access more recent
+content, a client should either call `ContentSession.getCurrentRoot()` to
+acquire a fresh new content snapshot or use the `refresh()` method to update
+a given `Root` to the latest state of the content repository:
+
+ Root root = ...;
+ root.refresh();
+
+In addition to reading repository content, the client can also make
+modifications to the content tree. Such content changes remain local to the
+particular `Root` instance (and related subtrees) until explicitly committed.
+For example, the following code creates and commits a new subtree containing
+nothing but a simple property:
+
+ ContentSession session = ...;
+ Root root = session.getCurrentRoot();
+ Tree tree = root.getTree("/");
+ Tree subtree = tree.addSubtree("hello");
+ subtree.setProperty("message", "Hello, World!");
+ root.commit();
+
+Even other `Root` instances acquired from the same `ContentSession` won't
+see such changes until they've been committed and the other trees refreshed.
+This allows a client to track multiple parallel sets of changes with just a
+single authenticated session.
+
+License
+-------
+
+(see the top-level [LICENSE.txt](../LICENSE.txt) for full license details)
+
+Collective work: Copyright 2012 The Apache Software Foundation.
+
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
diff --git a/oak-core/pom.xml b/oak-core/pom.xml
index d509544c163..c4e840fbbb0 100644
--- a/oak-core/pom.xml
+++ b/oak-core/pom.xml
@@ -17,36 +17,137 @@
limitations under the License.
-->
-
+4.0.0org.apache.jackrabbitoak-parent
- 0.1-SNAPSHOT
+ 0.6-SNAPSHOT../oak-parent/pom.xmloak-coreOak Core
+ bundle
+
+
+
+
+ org.apache.felix
+ maven-bundle-plugin
+
+
+
+ org.apache.jackrabbit.oak,
+ org.apache.jackrabbit.oak.api,
+ org.apache.jackrabbit.oak.core,
+ org.apache.jackrabbit.oak.kernel,
+ org.apache.jackrabbit.oak.util,
+ org.apache.jackrabbit.oak.namepath,
+ org.apache.jackrabbit.oak.plugins.value,
+ org.apache.jackrabbit.oak.plugins.commit,
+ org.apache.jackrabbit.oak.plugins.identifier,
+ org.apache.jackrabbit.oak.plugins.index,
+ org.apache.jackrabbit.oak.plugins.index.lucene,
+ org.apache.jackrabbit.oak.plugins.index.property,
+ org.apache.jackrabbit.oak.plugins.memory,
+ org.apache.jackrabbit.oak.plugins.name,
+ org.apache.jackrabbit.oak.plugins.nodetype,
+ org.apache.jackrabbit.oak.plugins.observation,
+ org.apache.jackrabbit.oak.spi.query,
+ org.apache.jackrabbit.oak.spi.commit,
+ org.apache.jackrabbit.oak.spi.lifecycle,
+ org.apache.jackrabbit.oak.spi.state,
+ org.apache.jackrabbit.oak.spi.security,
+ org.apache.jackrabbit.oak.spi.security.authentication,
+ org.apache.jackrabbit.oak.spi.security.principal,
+ org.apache.jackrabbit.oak.spi.security.privilege,
+ org.apache.jackrabbit.oak.spi.security.user,
+ org.apache.jackrabbit.oak.spi.security.user.action,
+ org.apache.jackrabbit.oak.spi.security.user.util,
+ org.apache.jackrabbit.oak.security
+
+
+ org.apache.jackrabbit.oak.osgi.Activator
+
+
+
+
+
+ org.apache.felix
+ maven-scr-plugin
+
+
+
+
+
+ org.apache.rat
+ apache-rat-plugin
+
+
+ src/test/resources/org/apache/jackrabbit/oak/util/test.json
+
+
+
+
+
+
-
+
- com.h2database
- h2
- 1.3.158
- true
+ org.osgi
+ org.osgi.core
+ provided
- com.sleepycat
- je
- 4.0.92
- true
+ org.osgi
+ org.osgi.compendium
+ provided
+
+
+ biz.aQute
+ bndlib
+ provided
+
+
+ org.apache.felix
+ org.apache.felix.scr.annotations
+ provided
+
+
+
+ org.apache.jackrabbit
+ oak-mk-api
+ ${project.version}
+
+
+ org.apache.jackrabbit
+ oak-mk
+ ${project.version}
+
+
+
+ org.apache.jackrabbit
+ oak-mk-remote
+ ${project.version}
+
+
+
+ org.apache.jackrabbit
+ oak-commons
+ ${project.version}
+
+
+
+ com.google.guava
+ guava
+ ${guava.version}
+
+
+
org.mongodbmongo-java-driver
@@ -54,18 +155,81 @@
true
-
+
- com.googlecode.json-simple
- json-simple
- 1.1
- test
+ javax.jcr
+ jcr
+ 2.0
+
+
+ org.apache.jackrabbit
+ jackrabbit-api
+ ${jackrabbit.version}
+
+ org.apache.jackrabbit
+ jackrabbit-jcr-commons
+ ${jackrabbit.version}
+
+
+
+
+ org.apache.lucene
+ lucene-core
+ 4.0.0-BETA
+ true
+
+
+ org.apache.lucene
+ lucene-analyzers-common
+ 4.0.0-BETA
+ true
+
+
+ org.apache.tika
+ tika-core
+ 1.2
+ true
+
+
+
+
+ org.slf4j
+ slf4j-api
+ 1.6.4
+
+
+
+
+ com.google.code.findbugs
+ jsr305
+ 2.0.0
+ provided
+
+
+
junitjunittest
+
+ org.apache.jackrabbit
+ oak-it-mk
+ ${project.version}
+ test
+
+
+ com.h2database
+ h2
+ 1.3.158
+ test
+
+
+ ch.qos.logback
+ logback-classic
+ 1.0.1
+ test
+
-
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/MicroKernelFactory.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/MicroKernelFactory.java
deleted file mode 100644
index 5629032f695..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/MicroKernelFactory.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk;
-
-import java.io.IOException;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.client.Client;
-import org.apache.jackrabbit.mk.fs.FileUtils;
-import org.apache.jackrabbit.mk.simple.SimpleKernelImpl;
-import org.apache.jackrabbit.mk.util.ExceptionFactory;
-import org.apache.jackrabbit.mk.wrapper.IndexWrapper;
-import org.apache.jackrabbit.mk.wrapper.LogWrapper;
-import org.apache.jackrabbit.mk.wrapper.SecurityWrapper;
-import org.apache.jackrabbit.mk.wrapper.VirtualRepositoryWrapper;
-
-/**
- * A factory to create a MicroKernel instance.
- */
-public class MicroKernelFactory {
-
- /**
- * Get an instance. Supported URLs:
- *
- *
fs:target/mk-test (using the directory ./target/mk-test)
- *
fs:target/mk-test;clean (same, but delete the old repository first)
- *
fs:{homeDir} (use the system property homeDir or '.' if not set)
- *
simple: (in-memory implementation)
- *
simple:fs:target/temp (using the directory ./target/temp)
- *
- *
- * @param url the repository URL
- * @return a new instance
- */
- public static MicroKernel getInstance(String url) {
- if (url.startsWith("mem:")) {
- return SimpleKernelImpl.get(url);
- } else if (url.startsWith("simple:")) {
- return SimpleKernelImpl.get(url);
- } else if (url.startsWith("log:")) {
- return LogWrapper.get(url);
- } else if (url.startsWith("sec:")) {
- return SecurityWrapper.get(url);
- } else if (url.startsWith("virtual:")) {
- return VirtualRepositoryWrapper.get(url);
- } else if (url.startsWith("index:")) {
- return IndexWrapper.get(url);
- } else if (url.startsWith("fs:")) {
- boolean clean = false;
- if (url.endsWith(";clean")) {
- url = url.substring(0, url.length() - ";clean".length());
- clean = true;
- }
- String dir = url.substring("fs:".length());
- dir = dir.replaceAll("\\{homeDir\\}", System.getProperty("homeDir", "."));
- if (clean) {
- try {
- FileUtils.deleteRecursive(dir + "/" + ".mk", false);
- } catch (IOException e) {
- throw ExceptionFactory.convert(e);
- }
- }
- return new MicroKernelImpl(dir);
- } else if (url.startsWith("http:")) {
- return Client.createHttpClient(url);
- } else if (url.startsWith("http-bridge:")) {
- MicroKernel mk = MicroKernelFactory.getInstance(url.substring("http-bridge:".length()));
- return Client.createHttpBridge(mk);
- } else {
- throw new IllegalArgumentException(url);
- }
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/MicroKernelImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/MicroKernelImpl.java
deleted file mode 100644
index b079e0ea4bf..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/MicroKernelImpl.java
+++ /dev/null
@@ -1,657 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.api.MicroKernelException;
-import org.apache.jackrabbit.mk.json.JsopBuilder;
-import org.apache.jackrabbit.mk.json.JsopTokenizer;
-import org.apache.jackrabbit.mk.model.Commit;
-import org.apache.jackrabbit.mk.model.CommitBuilder;
-import org.apache.jackrabbit.mk.model.Id;
-import org.apache.jackrabbit.mk.model.StoredCommit;
-import org.apache.jackrabbit.mk.model.TraversingNodeDiffHandler;
-import org.apache.jackrabbit.mk.store.NotFoundException;
-import org.apache.jackrabbit.mk.store.RevisionProvider;
-import org.apache.jackrabbit.mk.util.CommitGate;
-import org.apache.jackrabbit.mk.util.PathUtils;
-import org.apache.jackrabbit.mk.util.SimpleLRUCache;
-import org.apache.jackrabbit.oak.model.NodeState;
-import org.apache.jackrabbit.oak.model.PropertyState;
-
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- */
-public class MicroKernelImpl implements MicroKernel {
-
- protected Repository rep;
- private final CommitGate gate = new CommitGate();
-
- /**
- * Key: revision id, Value: diff string
- */
- private final Map diffCache = Collections.synchronizedMap(SimpleLRUCache.newInstance(100));
-
- public MicroKernelImpl(String homeDir) throws MicroKernelException {
- init(homeDir);
- }
-
- /**
- * Alternate constructor, used for testing.
- *
- * @param rep repository, already initialized
- */
- public MicroKernelImpl(Repository rep) {
- this.rep = rep;
- }
-
- protected void init(String homeDir) throws MicroKernelException {
- try {
- rep = new Repository(homeDir);
- rep.init();
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public void dispose() {
- gate.commit("end");
- if (rep != null) {
- try {
- rep.shutDown();
- } catch (Exception ignore) {
- // fail silently
- }
- rep = null;
- }
- diffCache.clear();
- }
-
- public String getHeadRevision() throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
- return getHeadRevisionId().toString();
- }
-
- /**
- * Same as getHeadRevisionId, with typed Id return value instead of string.
- *
- * @see #getHeadRevision()
- */
- private Id getHeadRevisionId() throws MicroKernelException {
- try {
- return rep.getHeadRevision();
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public String getRevisions(long since, int maxEntries) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
- maxEntries = maxEntries < 0 ? Integer.MAX_VALUE : maxEntries;
- List history = new ArrayList();
- try {
- StoredCommit commit = rep.getHeadCommit();
- while (commit != null
- && history.size() < maxEntries
- && commit.getCommitTS() >= since) {
- history.add(commit);
-
- Id commitId = commit.getParentId();
- if (commitId == null) {
- break;
- }
- commit = rep.getCommit(commitId);
- }
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
-
- JsopBuilder buff = new JsopBuilder().array();
- for (int i = history.size() - 1; i >= 0; i--) {
- StoredCommit commit = history.get(i);
- buff.object().
- key("id").value(commit.getId().toString()).
- key("ts").value(commit.getCommitTS()).
- endObject();
- }
- return buff.endArray().toString();
- }
-
- public String waitForCommit(String oldHeadRevision, long maxWaitMillis) throws MicroKernelException, InterruptedException {
- return gate.waitForCommit(oldHeadRevision, maxWaitMillis);
- }
-
- public String getJournal(String fromRevision, String toRevision, String filter) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
-
- Id fromRevisionId = Id.fromString(fromRevision);
- Id toRevisionId = toRevision == null ? getHeadRevisionId() : Id.fromString(toRevision);
-
- List commits = new ArrayList();
- try {
- StoredCommit toCommit = rep.getCommit(toRevisionId);
-
- Commit fromCommit;
- if (toRevisionId.equals(fromRevisionId)) {
- fromCommit = toCommit;
- } else {
- fromCommit = rep.getCommit(fromRevisionId);
- if (fromCommit.getCommitTS() > toCommit.getCommitTS()) {
- // negative range, return empty array
- return "[]";
- }
- }
-
- // collect commits, starting with toRevisionId
- // and traversing parent commit links until we've reached
- // fromRevisionId
- StoredCommit commit = toCommit;
- while (commit != null) {
- commits.add(commit);
- if (commit.getId().equals(fromRevisionId)) {
- break;
- }
- Id commitId = commit.getParentId();
- if (commitId == null) {
- break;
- }
- commit = rep.getCommit(commitId);
- }
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
-
- JsopBuilder commitBuff = new JsopBuilder().array();
- // iterate over commits in chronological order,
- // starting with oldest commit
- for (int i = commits.size() - 1; i >= 0; i--) {
- StoredCommit commit = commits.get(i);
- if (commit.getParentId() == null) {
- continue;
- }
- commitBuff.object().
- key("id").value(commit.getId().toString()).
- key("ts").value(commit.getCommitTS()).
- key("msg").value(commit.getMsg());
- String diff = diffCache.get(commit.getId());
- if (diff == null) {
- diff = diff(commit.getParentId(), commit.getId(), filter);
- diffCache.put(commit.getId(), diff);
- }
- commitBuff.key("changes").value(diff).endObject();
- }
- return commitBuff.endArray().toString();
- }
-
- public String diff(String fromRevision, String toRevision, String filter) throws MicroKernelException {
- Id toRevisionId = toRevision == null ? getHeadRevisionId() : Id.fromString(toRevision);
-
- return diff(Id.fromString(fromRevision), toRevisionId, filter);
- }
-
- /**
- * Same as diff, with typed Id arguments instead of strings.
- *
- * @see #diff(String, String, String)
- */
- private String diff(Id fromRevisionId, Id toRevisionId, String filter) throws MicroKernelException {
- // TODO extract and evaluate filter criteria (such as e.g. 'path') specified in 'filter' parameter
- String path = "/";
-
- try {
- final JsopBuilder buff = new JsopBuilder();
- final RevisionProvider rp = rep.getRevisionStore();
- // maps (key: id of target node, value: path/to/target)
- // for tracking added/removed nodes; this allows us
- // to detect 'move' operations
- final HashMap addedNodes = new HashMap();
- final HashMap removedNodes = new HashMap();
- NodeState node1, node2;
- try {
- node1 = rep.getNodeState(fromRevisionId, path);
- } catch (NotFoundException e) {
- node1 = null;
- }
- try {
- node2 = rep.getNodeState(toRevisionId, path);
- } catch (NotFoundException e) {
- node2 = null;
- }
-
- if (node1 == null) {
- if (node2 != null) {
- buff.tag('+').key(path).object();
- toJson(buff, node2, Integer.MAX_VALUE, 0, -1, false);
- return buff.endObject().newline().toString();
- } else {
- throw new MicroKernelException("path doesn't exist in the specified revisions: " + path);
- }
- } else if (node2 == null) {
- buff.tag('-');
- buff.value(path);
- return buff.newline().toString();
- }
-
- TraversingNodeDiffHandler diffHandler = new TraversingNodeDiffHandler() {
- @Override
- public void propertyAdded(PropertyState after) {
- buff.tag('+').
- key(PathUtils.concat(getCurrentPath(), after.getName())).
- encodedValue(after.getEncodedValue()).
- newline();
- }
-
- @Override
- public void propertyChanged(PropertyState before, PropertyState after) {
- buff.tag('^').
- key(PathUtils.concat(getCurrentPath(), after.getName())).
- encodedValue(after.getEncodedValue()).
- newline();
- }
-
- @Override
- public void propertyDeleted(PropertyState before) {
- // since property and node deletions can't be distinguished
- // using the "- " notation we're representing
- // property deletions as "^ :null"
- buff.tag('^').
- key(PathUtils.concat(getCurrentPath(), before.getName())).
- value(null).
- newline();
- }
-
- @Override
- public void childNodeAdded(String name, NodeState after) {
- addedNodes.put(rp.getId(after), PathUtils.concat(getCurrentPath(), name));
- buff.tag('+').
- key(PathUtils.concat(getCurrentPath(), name)).object();
- toJson(buff, after, Integer.MAX_VALUE, 0, -1, false);
- buff.endObject().newline();
- }
-
- @Override
- public void childNodeDeleted(String name, NodeState before) {
- removedNodes.put(rp.getId(before), PathUtils.concat(getCurrentPath(), name));
- buff.tag('-');
- buff.value(PathUtils.concat(getCurrentPath(), name));
- buff.newline();
- }
- };
- diffHandler.start(node1, node2, path);
-
- // check if this commit includes 'move' operations
- // by building intersection of added and removed nodes
- addedNodes.keySet().retainAll(removedNodes.keySet());
- if (!addedNodes.isEmpty()) {
- // this commit includes 'move' operations
- removedNodes.keySet().retainAll(addedNodes.keySet());
- // addedNodes & removedNodes now only contain information about moved nodes
-
- // re-build the diff in a 2nd pass, this time representing moves correctly
- buff.resetWriter();
-
- // TODO refactor code, avoid duplication
-
- diffHandler = new TraversingNodeDiffHandler() {
- @Override
- public void propertyAdded(PropertyState after) {
- buff.tag('+').
- key(PathUtils.concat(getCurrentPath(), after.getName())).
- encodedValue(after.getEncodedValue()).
- newline();
- }
-
- @Override
- public void propertyChanged(PropertyState before, PropertyState after) {
- buff.tag('^').
- key(PathUtils.concat(getCurrentPath(), after.getName())).
- encodedValue(after.getEncodedValue()).
- newline();
- }
-
- @Override
- public void propertyDeleted(PropertyState before) {
- // since property and node deletions can't be distinguished
- // using the "- " notation we're representing
- // property deletions as "^ :null"
- buff.tag('^').
- key(PathUtils.concat(getCurrentPath(), before.getName())).
- value(null).
- newline();
- }
-
- @Override
- public void childNodeAdded(String name, NodeState after) {
- if (addedNodes.containsKey(rp.getId(after))) {
- // moved node, will be processed separately
- return;
- }
- buff.tag('+').
- key(PathUtils.concat(getCurrentPath(), name)).object();
- toJson(buff, after, Integer.MAX_VALUE, 0, -1, false);
- buff.endObject().newline();
- }
-
- @Override
- public void childNodeDeleted(String name, NodeState before) {
- if (addedNodes.containsKey(rp.getId(before))) {
- // moved node, will be processed separately
- return;
- }
- buff.tag('-');
- buff.value(PathUtils.concat(getCurrentPath(), name));
- buff.newline();
- }
-
- };
- diffHandler.start(node1, node2, path);
-
- // finally process moved nodes
- for (Map.Entry entry : addedNodes.entrySet()) {
- buff.tag('>').
- // path/to/deleted/node
- key(removedNodes.get(entry.getKey())).
- // path/to/added/node
- value(entry.getValue()).
- newline();
- }
- }
- return buff.toString();
-
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public boolean nodeExists(String path, String revision) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
-
- Id revisionId = revision == null ? getHeadRevisionId() : Id.fromString(revision);
- return rep.nodeExists(revisionId, path);
- }
-
- public long getChildNodeCount(String path, String revision) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
-
- Id revisionId = revision == null ? getHeadRevisionId() : Id.fromString(revision);
-
- try {
- return rep.getNodeState(revisionId, path).getChildNodeCount();
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public String getNodes(String path, String revision) throws MicroKernelException {
- return getNodes(path, revision, 1, 0, -1, null);
- }
-
- public String getNodes(String path, String revision, int depth, long offset, int count, String filter) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
-
- Id revisionId = revision == null ? getHeadRevisionId() : Id.fromString(revision);
-
- // TODO extract and evaluate filter criteria (such as e.g. ':hash') specified in 'filter' parameter
-
- try {
- JsopBuilder buf = new JsopBuilder().object();
- toJson(buf, rep.getNodeState(revisionId, path), depth, (int) offset, count, true);
- return buf.endObject().toString();
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public String commit(String path, String jsonDiff, String revision, String message) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
- if (path.length() > 0 && !PathUtils.isAbsolute(path)) {
- throw new IllegalArgumentException("absolute path expected: " + path);
- }
-
- Id revisionId = revision == null ? getHeadRevisionId() : Id.fromString(revision);
-
- try {
- JsopTokenizer t = new JsopTokenizer(jsonDiff);
- CommitBuilder cb = rep.getCommitBuilder(revisionId, message);
- while (true) {
- int r = t.read();
- if (r == JsopTokenizer.END) {
- break;
- }
- int pos; // used for error reporting
- switch (r) {
- case '+': {
- pos = t.getLastPos();
- String subPath = t.readString();
- t.read(':');
- if (t.matches('{')) {
- String nodePath = PathUtils.concat(path, subPath);
- if (!PathUtils.isAbsolute(nodePath)) {
- throw new Exception("absolute path expected: " + nodePath + ", pos: " + pos);
- }
- String parentPath = PathUtils.getParentPath(nodePath);
- String nodeName = PathUtils.getName(nodePath);
- // build the list of added nodes recursively
- LinkedList list = new LinkedList();
- addNode(list, parentPath, nodeName, t);
- for (AddNodeOperation op : list) {
- cb.addNode(op.path, op.name, op.props);
- }
- } else {
- String value;
- if (t.matches(JsopTokenizer.NULL)) {
- value = null;
- } else {
- value = t.readRawValue().trim();
- }
- String targetPath = PathUtils.concat(path, subPath);
- if (!PathUtils.isAbsolute(targetPath)) {
- throw new Exception("absolute path expected: " + targetPath + ", pos: " + pos);
- }
- String parentPath = PathUtils.getParentPath(targetPath);
- String propName = PathUtils.getName(targetPath);
- cb.setProperty(parentPath, propName, value);
- }
- break;
- }
- case '-': {
- pos = t.getLastPos();
- String subPath = t.readString();
- String targetPath = PathUtils.concat(path, subPath);
- if (!PathUtils.isAbsolute(targetPath)) {
- throw new Exception("absolute path expected: " + targetPath + ", pos: " + pos);
- }
- cb.removeNode(targetPath);
- break;
- }
- case '^': {
- pos = t.getLastPos();
- String subPath = t.readString();
- t.read(':');
- String value;
- if (t.matches(JsopTokenizer.NULL)) {
- value = null;
- } else {
- value = t.readRawValue().trim();
- }
- String targetPath = PathUtils.concat(path, subPath);
- if (!PathUtils.isAbsolute(targetPath)) {
- throw new Exception("absolute path expected: " + targetPath + ", pos: " + pos);
- }
- String parentPath = PathUtils.getParentPath(targetPath);
- String propName = PathUtils.getName(targetPath);
- cb.setProperty(parentPath, propName, value);
- break;
- }
- case '>': {
- pos = t.getLastPos();
- String subPath = t.readString();
- String srcPath = PathUtils.concat(path, subPath);
- if (!PathUtils.isAbsolute(srcPath)) {
- throw new Exception("absolute path expected: " + srcPath + ", pos: " + pos);
- }
- t.read(':');
- pos = t.getLastPos();
- String targetPath = t.readString();
- if (!PathUtils.isAbsolute(targetPath)) {
- targetPath = PathUtils.concat(path, targetPath);
- if (!PathUtils.isAbsolute(targetPath)) {
- throw new Exception("absolute path expected: " + targetPath + ", pos: " + pos);
- }
- }
- cb.moveNode(srcPath, targetPath);
- break;
- }
- case '*': {
- pos = t.getLastPos();
- String subPath = t.readString();
- String srcPath = PathUtils.concat(path, subPath);
- if (!PathUtils.isAbsolute(srcPath)) {
- throw new Exception("absolute path expected: " + srcPath + ", pos: " + pos);
- }
- t.read(':');
- pos = t.getLastPos();
- String targetPath = t.readString();
- if (!PathUtils.isAbsolute(targetPath)) {
- targetPath = PathUtils.concat(path, targetPath);
- if (!PathUtils.isAbsolute(targetPath)) {
- throw new Exception("absolute path expected: " + targetPath + ", pos: " + pos);
- }
- }
- cb.copyNode(srcPath, targetPath);
- break;
- }
- default:
- throw new AssertionError("token type: " + t.getTokenType());
- }
- }
- Id newHead = cb.doCommit();
- if (!newHead.equals(revisionId)) {
- // non-empty commit
- gate.commit(newHead.toString());
- }
- return newHead.toString();
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public long getLength(String blobId) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
- try {
- return rep.getRevisionStore().getBlobLength(blobId);
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public int read(String blobId, long pos, byte[] buff, int off, int length) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
- try {
- return rep.getRevisionStore().getBlob(blobId, pos, buff, off, length);
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- public String write(InputStream in) throws MicroKernelException {
- if (rep == null) {
- throw new IllegalStateException("this instance has already been disposed");
- }
- try {
- return rep.getRevisionStore().putBlob(in);
- } catch (Exception e) {
- throw new MicroKernelException(e);
- }
- }
-
- //-------------------------------------------------------< implementation >
-
- void toJson(JsopBuilder builder, NodeState node, int depth, int offset, int count, boolean inclVirtualProps) {
- for (PropertyState property : node.getProperties()) {
- builder.key(property.getName()).encodedValue(property.getEncodedValue());
- }
- long childCount = node.getChildNodeCount();
- if (inclVirtualProps) {
- builder.key(":childNodeCount").value(childCount);
- }
- if (childCount > 0 && depth >= 0) {
- // TODO: Use an import once the conflict with .mk.model is resolved
- for (org.apache.jackrabbit.oak.model.ChildNodeEntry entry
- : node.getChildNodeEntries(offset, count)) {
- builder.key(entry.getName()).object();
- if (depth > 0) {
- toJson(builder, entry.getNode(), depth - 1, 0, -1, inclVirtualProps);
- }
- builder.endObject();
- }
- }
- }
-
- static void addNode(LinkedList list, String path, String name, JsopTokenizer t) throws Exception {
- AddNodeOperation op = new AddNodeOperation();
- op.path = path;
- op.name = name;
- list.add(op);
- if (!t.matches('}')) {
- do {
- String key = t.readString();
- t.read(':');
- if (t.matches('{')) {
- addNode(list, PathUtils.concat(path, name), key, t);
- } else {
- op.props.put(key, t.readRawValue().trim());
- }
- } while (t.matches(','));
- t.read('}');
- }
- }
-
- //--------------------------------------------------------< inner classes >
- static class AddNodeOperation {
- String path;
- String name;
- Map props = new HashMap();
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/api/MicroKernel.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/api/MicroKernel.java
deleted file mode 100644
index b78bae053f1..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/api/MicroKernel.java
+++ /dev/null
@@ -1,324 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.api;
-
-import java.io.InputStream;
-
-/**
- * The MicroKernel design goals/principles:
- *
- *
manage huge trees of nodes and properties efficiently
integrated API for storing/retrieving large binaries (similar to existing DataStore API)
- *
human-readable data serialization (JSON)
- *
- *
- * The MicroKernel Data Model:
- *
- *
simple JSON-inspired data model: just nodes and properties
- *
a node consists of an unordered set of name -> item mappings. each
- * property and child node is uniquely named and a single name can only
- * refer to a property or a child node, not both at the same time.
- *
properties are represented as name/value pairs
- *
supported property types: string, number
- *
other property types (weak/hard reference, date, etc) would need to be
- * encoded/mangled in name or value
- *
no support for JCR/XML-like namespaces, "foo:bar" is just an ordinary name
- *
- */
-public interface MicroKernel {
-
- /**
- * Dispose this instance.
- */
- void dispose();
-
- //---------------------------------------------------------< REVISION ops >
-
- /**
- * Return the id of the current head revision.
- *
- * @return id of head revision
- * @throws MicroKernelException if an error occurs
- */
- String getHeadRevision() throws MicroKernelException;
-
- /**
- * Returns a chronological list of all revisions since a specific point
- * in time.
- *
- * Format:
- *
- * [ { "id" : "", "ts" : }, ... ]
- *
- *
- * @param since timestamp (ms) of earliest revision to be returned
- * @param maxEntries maximum #entries to be returned;
- * if < 0, no limit will be applied.
- * @return a chronological list of revisions in JSON format.
- * @throws MicroKernelException if an error occurs
- */
- String /* jsonArray */ getRevisions(long since, int maxEntries)
- throws MicroKernelException;
-
- /**
- * Wait for a commit to occur that is newer than the given revision number.
- *
- * This method is useful efficient polling. The method will return the current head revision
- * if it is newer than the given old revision number, or wait until the given number of
- * milliseconds passed or a new head revision is available.
- *
- * @param maxWaitMillis the maximum number of milliseconds to wait (0 if the
- * method should not wait).
- * @return the current head revision
- * @throws MicroKernelException if an error occurs
- * @throws InterruptedException if the thread was interrupted
- */
- String waitForCommit(String oldHeadRevision, long maxWaitMillis) throws MicroKernelException, InterruptedException;
-
- /**
- * Returns a revision journal, starting with fromRevisionId
- * and ending with toRevisionId.
- *
- * Format:
- *
- *
- * @param fromRevisionId first revision to be returned in journal
- * @param toRevisionId last revision to be returned in journal, if null the current head revision is assumed
- * @param filter (optional) filter criteria
- * (e.g. path, property names, etc);
- * TODO specify format and semantics
- * @return a chronological list of revisions in JSON format
- * @throws MicroKernelException if an error occurs
- */
- String /* jsonArray */ getJournal(String fromRevisionId, String toRevisionId,
- String filter)
- throws MicroKernelException;
-
- /**
- * Returns the JSON diff representation of the changes between the specified
- * revisions. The changes will be consolidated if the specified range
- * covers intermediary revisions. The revisions need not be in a specified
- * chronological order.
- *
- *
- * Format:
- *
- *
- * @param fromRevisionId a revision
- * @param toRevisionId another revision, if null the current head revision is assumed
- * @param filter (optional) filter criteria
- * (e.g. path, property names, etc);
- * TODO specify format and semantics
- * @return JSON diff representation of the changes
- * @throws MicroKernelException if an error occurs
- */
- String /* JSON diff */ diff(String fromRevisionId, String toRevisionId,
- String filter)
- throws MicroKernelException;
-
- //-------------------------------------------------------------< READ ops >
-
- /**
- * Determines whether the specified node exists.
- *
- * @param path path denoting node
- * @param revisionId revision, if null the current head revision is assumed
- * @return true if the specified node exists, otherwise false
- * @throws MicroKernelException if an error occurs
- */
- boolean nodeExists(String path, String revisionId) throws MicroKernelException;
-
- /**
- * Returns the number of child nodes of the specified node.
- *
- * This is a convenience method since this information could gathered by
- * calling getNodes(path, revisionId, 0, 0, 0) and evaluating
- * the :childNodeCount property.
- *
- *
- * @param path path denoting node
- * @param revisionId revision, if null the current head revision is assumed
- * @return the number of child nodes
- * @throws MicroKernelException if an error occurs
- */
- long getChildNodeCount(String path, String revisionId) throws MicroKernelException;
-
- /**
- * Returns the node tree rooted at the specified parent node with depth 1.
- * Depth 1 means all properties of the node are returned, including the list
- * of child nodes and their properties (including
- * :childNodeCount). Example:
- *
- * The collection of name/value pairs denoting child nodes is assumed to be
- * ordered.
- *
- * Remarks:
- *
- *
If the property :childNodeCount equals 0, then the
- * node does not have any child nodes.
- *
If the value of :childNodeCount is larger than the list
- * of returned child nodes, then the node has more child nodes than those
- * included in the tree. Large number of child nodes can be retrieved in
- * chunks using {@link #getNodes(String, String, int, long, int, String)}
- *
- * This method is a convenience method for
- * getNodes(path, revisionId, 1, 0, -1, null)
- *
- * @param path path denoting root of node tree to be retrieved
- * @param revisionId revision, if null the current head revision is assumed
- * @return node tree in JSON format
- * @throws MicroKernelException if an error occurs
- */
- String /* jsonTree */ getNodes(String path, String revisionId) throws MicroKernelException;
-
- /**
- * Returns the node tree rooted at the specified parent node with the
- * specified depth, maximum child node count and offset. The depth of the
- * returned tree is governed by the depth parameter:
- *
- *
- *
depth = 0
- *
properties, including :childNodeCount and the list
- * of child node names (as empty objects)
- *
- *
- *
depth = 1
- *
properties, child nodes and their properties (including
- * :childNodeCount)
- *
- *
- *
depth = 2
- *
[and so on...]
- *
- *
- * Offset and count only affect the returned child node list of this node.
- *
- * @param path path denoting root of node tree to be retrieved
- * @param revisionId revision, if null the current head revision is assumed
- * @param depth maximum depth of returned tree
- * @param offset start position in child node list (0 to start at the
- * beginning)
- * @param count maximum number of child nodes to retrieve (-1 for as many as
- * possible)
- * @param filter (optional) filter criteria
- * (e.g. names of properties to be included, etc);
- * TODO specify format and semantics
- * @return node tree in JSON format
- * @throws MicroKernelException if an error occurs
- */
- String /* jsonTree */ getNodes(String path, String revisionId, int depth, long offset, int count, String filter) throws MicroKernelException;
-
- //------------------------------------------------------------< WRITE ops >
-
- /**
- * Applies the specified changes on the specified target node.
- *
- * If path.length() == 0 the paths specified in the
- * jsonDiff are expected to be absolute.
- *
- * The implementation tries to merge changes if the revision id of the
- * commit is set accordingly. As an example, deleting a node is allowed if
- * the node existed in the given revision, even if it was deleted in the
- * meantime.
- *
- * @param path path denoting target node
- * @param jsonDiff changes to be applied in JSON diff format.
- * @param revisionId revision the changes are based on, if null the current head revision is assumed
- * @param message commit message
- * @return id of newly created revision
- * @throws MicroKernelException if an error occurs
- */
- String /* revisionId */ commit(String path, String jsonDiff, String revisionId, String message)
- throws MicroKernelException;
-
-
- //--------------------------------------------------< BLOB READ/WRITE ops >
-
- /**
- * Returns the length of the specified blob.
- *
- * @param blobId blob identifier
- * @return length of the specified blob
- * @throws MicroKernelException if an error occurs
- */
- long getLength(String blobId) throws MicroKernelException;
-
- /**
- * Reads up to length bytes of data from the specified blob into
- * the given array of bytes. An attempt is made to read as many as
- * length bytes, but a smaller number may be read.
- * The number of bytes actually read is returned as an integer.
- *
- * @param blobId blob identifier
- * @param pos the offset within the blob
- * @param buff the buffer into which the data is read.
- * @param off the start offset in array buff
- * at which the data is written.
- * @param length the maximum number of bytes to read
- * @return the total number of bytes read into the buffer, or
- * -1 if there is no more data because the end of
- * the blob content has been reached.
- * @throws MicroKernelException if an error occurs
- */
- int /* count */ read(String blobId, long pos, byte[] buff, int off, int length)
- throws MicroKernelException;
-
- /**
- * Stores the content of the given stream and returns an associated
- * identifier for later retrieval.
- *
- * If identical stream content has been stored previously, then the existing
- * identifier will be returned instead of storing a redundant copy.
- *
- * The stream is closed by this method.
- *
- * @param in InputStream providing the blob content
- * @return blob identifier associated with the given content
- * @throws MicroKernelException if an error occurs
- */
- String /* blobId */ write(InputStream in) throws MicroKernelException;
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/blobs/FileBlobStore.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/blobs/FileBlobStore.java
deleted file mode 100644
index 552e238d7a7..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/blobs/FileBlobStore.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.blobs;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.security.DigestInputStream;
-import java.security.MessageDigest;
-import org.apache.jackrabbit.mk.fs.FilePath;
-import org.apache.jackrabbit.mk.fs.FileUtils;
-import org.apache.jackrabbit.mk.util.ExceptionFactory;
-import org.apache.jackrabbit.mk.util.IOUtils;
-import org.apache.jackrabbit.mk.util.StringUtils;
-
-/**
- * A file blob store.
- */
-public class FileBlobStore extends AbstractBlobStore {
-
- private static final String OLD_SUFFIX = "_old";
-
- private final FilePath baseDir;
- private final byte[] buffer = new byte[16 * 1024];
- private boolean mark;
-
- public FileBlobStore(String dir) throws IOException {
- baseDir = FilePath.get(dir);
- FileUtils.createDirectories(dir);
- }
-
- @Override
- public String addBlob(String tempFilePath) {
- try {
- FilePath file = FilePath.get(tempFilePath);
- InputStream in = file.newInputStream();
- MessageDigest messageDigest = MessageDigest.getInstance(HASH_ALGORITHM);
- DigestInputStream din = new DigestInputStream(in, messageDigest);
- long length = file.size();
- try {
- while (true) {
- int len = din.read(buffer, 0, buffer.length);
- if (len < 0) {
- break;
- }
- }
- } finally {
- din.close();
- }
- ByteArrayOutputStream idStream = new ByteArrayOutputStream();
- idStream.write(TYPE_HASH);
- IOUtils.writeVarInt(idStream, 0);
- IOUtils.writeVarLong(idStream, length);
- byte[] digest = messageDigest.digest();
- FilePath f = getFile(digest, false);
- if (f.exists()) {
- file.delete();
- } else {
- FilePath parent = f.getParent();
- if (!parent.exists()) {
- FileUtils.createDirectories(parent.toString());
- }
- file.moveTo(f);
- }
- IOUtils.writeVarInt(idStream, digest.length);
- idStream.write(digest);
- byte[] id = idStream.toByteArray();
- String blobId = StringUtils.convertBytesToHex(id);
- usesBlobId(blobId);
- return blobId;
- } catch (Exception e) {
- throw ExceptionFactory.convert(e);
- }
- }
-
- @Override
- protected synchronized void storeBlock(byte[] digest, int level, byte[] data) throws IOException {
- FilePath f = getFile(digest, false);
- if (f.exists()) {
- return;
- }
- FilePath parent = f.getParent();
- if (!parent.exists()) {
- FileUtils.createDirectories(parent.toString());
- }
- FilePath temp = parent.resolve(f.getName() + ".temp");
- OutputStream out = temp.newOutputStream(false);
- out.write(data);
- out.close();
- temp.moveTo(f);
- }
-
- private FilePath getFile(byte[] digest, boolean old) {
- String id = StringUtils.convertBytesToHex(digest);
- String sub = id.substring(id.length() - 2);
- if (old) {
- sub += OLD_SUFFIX;
- }
- return baseDir.resolve(sub).resolve(id + ".dat");
- }
-
- @Override
- protected byte[] readBlockFromBackend(BlockId id) throws IOException {
- FilePath f = getFile(id.digest, false);
- if (!f.exists()) {
- FilePath old = getFile(id.digest, true);
- f.getParent().createDirectory();
- old.moveTo(f);
- f = getFile(id.digest, false);
- }
- int length = (int) Math.min(f.size(), getBlockSize());
- byte[] data = new byte[length];
- InputStream in = f.newInputStream();
- try {
- IOUtils.skipFully(in, id.pos);
- IOUtils.readFully(in, data, 0, length);
- } finally {
- in.close();
- }
- return data;
- }
-
- @Override
- public void startMark() throws Exception {
- mark = true;
- for (int i = 0; i < 256; i++) {
- String sub = StringUtils.convertBytesToHex(new byte[] { (byte) i });
- FilePath d = baseDir.resolve(sub);
- FilePath old = baseDir.resolve(sub + OLD_SUFFIX);
- if (d.exists()) {
- if (old.exists()) {
- for (FilePath p : d.newDirectoryStream()) {
- String name = p.getName();
- FilePath newName = old.resolve(name);
- p.moveTo(newName);
- }
- } else {
- d.moveTo(old);
- }
- }
- }
- markInUse();
- }
-
- @Override
- protected boolean isMarkEnabled() {
- return mark;
- }
-
- @Override
- protected void mark(BlockId id) throws IOException {
- FilePath f = getFile(id.digest, false);
- if (!f.exists()) {
- FilePath old = getFile(id.digest, true);
- f.getParent().createDirectory();
- old.moveTo(f);
- f = getFile(id.digest, false);
- }
- }
-
- @Override
- public int sweep() throws IOException {
- int count = 0;
- for (int i = 0; i < 256; i++) {
- String sub = StringUtils.convertBytesToHex(new byte[] { (byte) i });
- FilePath old = baseDir.resolve(sub + OLD_SUFFIX);
- if (old.exists()) {
- for (FilePath p : old.newDirectoryStream()) {
- String name = p.getName();
- FilePath file = old.resolve(name);
- file.delete();
- count++;
- }
- old.delete();
- }
- }
- mark = false;
- return count;
- }
-
-}
\ No newline at end of file
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/blobs/MongoBlobStore.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/blobs/MongoBlobStore.java
deleted file mode 100644
index 255c1507f82..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/blobs/MongoBlobStore.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.blobs;
-
-import java.io.IOException;
-import com.mongodb.BasicDBObject;
-import com.mongodb.DB;
-import com.mongodb.DBCollection;
-import com.mongodb.DBObject;
-import com.mongodb.Mongo;
-import com.mongodb.MongoException;
-import com.mongodb.WriteConcern;
-
-/**
- * A blob store that uses MongoDB.
- */
-public class MongoBlobStore extends AbstractBlobStore {
-
- private static final String DB = "ds";
- private static final String DATASTORE_COLLECTION = "dataStore";
- private static final String DIGEST_FIELD = "digest";
- private static final String DATA_FIELD = "data";
-
- private Mongo con;
- private DB db;
- private DBCollection dataStore;
-
- public MongoBlobStore() throws IOException {
- con = new Mongo();
- db = con.getDB(DB);
- db.setWriteConcern(WriteConcern.SAFE);
- dataStore = db.getCollection(DATASTORE_COLLECTION);
- dataStore.ensureIndex(
- new BasicDBObject(DIGEST_FIELD, 1),
- new BasicDBObject("unique", true));
- }
-
- @Override
- protected byte[] readBlockFromBackend(BlockId id) {
- BasicDBObject key = new BasicDBObject(DIGEST_FIELD, id.digest);
- DBObject dataObject = dataStore.findOne(key);
- return (byte[]) dataObject.get(DATA_FIELD);
- }
-
- @Override
- protected void storeBlock(byte[] digest, int level, byte[] data) {
- BasicDBObject dataObject = new BasicDBObject(DIGEST_FIELD, digest);
- dataObject.append(DATA_FIELD, data);
- try {
- dataStore.insert(dataObject);
- } catch (MongoException.DuplicateKey ignore) {
- // ignore
- }
- }
-
- @Override
- public void close() {
- con.close();
- }
-
- @Override
- public void startMark() throws Exception {
- // TODO
- markInUse();
- }
-
- @Override
- protected boolean isMarkEnabled() {
- // TODO
- return false;
- }
-
- @Override
- protected void mark(BlockId id) throws Exception {
- // TODO
- }
-
- @Override
- public int sweep() throws Exception {
- // TODO
- return 0;
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/cluster/HotBackup.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/cluster/HotBackup.java
deleted file mode 100644
index 7105192992a..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/cluster/HotBackup.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.cluster;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.json.JsopBuilder;
-import org.apache.jackrabbit.mk.json.fast.Jsop;
-import org.apache.jackrabbit.mk.json.fast.JsopArray;
-import org.apache.jackrabbit.mk.json.fast.JsopObject;
-import org.apache.jackrabbit.mk.util.PathUtils;
-
-/**
- * This class connects two MicroKernel instances, where one
- * instance will periodically commit the changes made to the other.
- *
- * TODO do a full sync on first call
- * TODO add periodic background check
- */
-public class HotBackup {
-
- private static final String PATH_PROPERTY_LASTREV = "/:lastrev";
- private final MicroKernel source;
- private final MicroKernel target;
- private String lastRev;
-
- /**
- * Create a new instance of this class.
- *
- * @param source source microkernel where changes are read
- * @param target target microkernel where changes are committed
- */
- public HotBackup(MicroKernel source, MicroKernel target) {
- this.source = source;
- this.target = target;
-
- init();
- }
-
- private void init() {
- lastRev = getProperty(target, PATH_PROPERTY_LASTREV);
- if (lastRev == null) {
- lastRev = source.getHeadRevision();
-
- // TODO never sync'ed, so do a full copy
-
-
- setProperty(target, PATH_PROPERTY_LASTREV, lastRev);
- }
- sync();
- }
-
- /**
- * Read all changes from the source microkernel and commit them to
- * the target microkernel.
- */
- public void sync() {
- String headRev = source.getHeadRevision();
- if (lastRev != headRev) {
- JsopArray journal = (JsopArray) Jsop.parse(source.getJournal(lastRev, headRev, null));
- for (int i = 0; i < journal.size(); i++) {
- JsopObject record = (JsopObject) journal.get(i);
- String diff = (String) record.get("changes");
- String message = (String) record.get("msg");
- target.commit("", diff, target.getHeadRevision(), message);
- }
- lastRev = headRev;
- setProperty(target, PATH_PROPERTY_LASTREV, lastRev);
- }
- }
-
- private static String getProperty(MicroKernel mk, String path) {
- String parent = PathUtils.getParentPath(path);
- String name = PathUtils.getName(path);
-
- // todo use filter parameter for specifying the property?
- JsopObject props = (JsopObject) Jsop.parse(mk.getNodes(parent, mk.getHeadRevision(), -1, 0, -1, null));
- return (String) props.get(name);
- }
-
- private static void setProperty(MicroKernel mk, String path, String value) {
- String parent = PathUtils.getParentPath(path);
- String name = PathUtils.getName(path);
-
- if (value == null) {
- String diff = new JsopBuilder().tag('-').key(name).value(null).toString();
- mk.commit(parent, diff, mk.getHeadRevision(), null);
- } else {
- String diff = new JsopBuilder().tag('+').key(name).value(value).toString();
- mk.commit(parent, diff, mk.getHeadRevision(), null);
- }
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileBase.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileBase.java
deleted file mode 100644
index 7fd5be2ca98..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileBase.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.fs;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.MappedByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.channels.FileLock;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-
-/**
- * The base class for file implementations.
- */
-public abstract class FileBase extends FileChannel {
-
- public void force(boolean metaData) throws IOException {
- // ignore
- }
-
- public FileLock lock(long position, long size, boolean shared) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- public MappedByteBuffer map(MapMode mode, long position, long size) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- public abstract long position() throws IOException;
-
- public abstract FileChannel position(long newPosition) throws IOException;
-
- public abstract int read(ByteBuffer dst) throws IOException;
-
- public int read(ByteBuffer dst, long position) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- public long read(ByteBuffer[] dsts, int offset, int length) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- public abstract long size() throws IOException;
-
- public long transferFrom(ReadableByteChannel src, long position, long count) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- public long transferTo(long position, long count, WritableByteChannel target)
- throws IOException {
- throw new UnsupportedOperationException();
- }
-
- public abstract FileChannel truncate(long size) throws IOException;
-
- public FileLock tryLock(long position, long size, boolean shared) throws IOException {
- throw new UnsupportedOperationException();
- }
-
- public abstract int write(ByteBuffer src) throws IOException;
-
- public int write(ByteBuffer src, long position) throws IOException {
- throw new UnsupportedOperationException(); }
-
- public long write(ByteBuffer[] srcs, int offset, int length) throws IOException {
- throw new UnsupportedOperationException(); }
-
- protected void implCloseChannel() throws IOException {
- // ignore
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileCache.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileCache.java
deleted file mode 100644
index 40728ca0472..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileCache.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.fs;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.channels.FileLock;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicReference;
-import org.apache.jackrabbit.mk.util.SimpleLRUCache;
-
-/**
- * A file that has a simple read cache.
- */
-public class FileCache extends FileBase {
-
- private static final boolean APPEND_BUFFER = !Boolean.getBoolean("mk.disableAppendBuffer");
- private static final int APPEND_BUFFER_SIZE_INIT = 8 * 1024;
- private static final int APPEND_BUFFER_SIZE = 8 * 1024;
-
- private static final int BLOCK_SIZE = 4 * 1024;
- private final String name;
- private final Map readCache = SimpleLRUCache.newInstance(16);
- private final FileChannel base;
- private long pos, size;
-
- private AtomicReference appendBuffer;
- private int appendOperations;
- private Thread appendFlushThread;
-
- FileCache(String name, FileChannel base) throws IOException {
- this.name = name;
- this.base = base;
- this.size = base.size();
- }
-
- public long position() throws IOException {
- return pos;
- }
-
- public FileChannel position(long newPosition) throws IOException {
- this.pos = newPosition;
- return this;
- }
-
- boolean flush() throws IOException {
- if (appendBuffer == null) {
- return false;
- }
- synchronized (this) {
- ByteArrayOutputStream newBuff = new ByteArrayOutputStream(APPEND_BUFFER_SIZE_INIT);
- ByteArrayOutputStream buff = appendBuffer.getAndSet(newBuff);
- if (buff.size() > 0) {
- try {
- base.position(size - buff.size());
- base.write(ByteBuffer.wrap(buff.toByteArray()));
- } catch (IOException e) {
- close();
- throw e;
- }
- }
- }
- return true;
- }
-
- public int read(ByteBuffer dst) throws IOException {
- flush();
- long readPos = (pos / BLOCK_SIZE) * BLOCK_SIZE;
- int off = (int) (pos - readPos);
- int len = BLOCK_SIZE - off;
- ByteBuffer buff = readCache.get(readPos);
- if (buff == null) {
- base.position(readPos);
- buff = ByteBuffer.allocate(BLOCK_SIZE);
- int read = base.read(buff);
- if (read == BLOCK_SIZE) {
- readCache.put(readPos, buff);
- } else {
- if (read < 0) {
- return -1;
- }
- len = Math.min(len, read);
- }
- }
- len = Math.min(len, dst.remaining());
- System.arraycopy(buff.array(), off, dst.array(), dst.position(), len);
- dst.position(dst.position() + len);
- pos += len;
- return len;
- }
-
- public long size() throws IOException {
- return size;
- }
-
- public FileChannel truncate(long newSize) throws IOException {
- flush();
- readCache.clear();
- base.truncate(newSize);
- pos = Math.min(pos, newSize);
- size = Math.min(size, newSize);
- return this;
- }
-
- public int write(ByteBuffer src) throws IOException {
- if (readCache.size() > 0) {
- readCache.clear();
- }
- // append operations are buffered, but
- // only if there was at least one successful write operation
- // (to detect trying to write to a read-only file and such early on)
- // (in addition to that, the first few append operations are not buffered
- // to avoid starting a thread unnecessarily)
- if (APPEND_BUFFER && pos == size && ++appendOperations >= 4) {
- int len = src.remaining();
- if (len > APPEND_BUFFER_SIZE) {
- flush();
- } else {
- if (appendBuffer == null) {
- ByteArrayOutputStream buff = new ByteArrayOutputStream(APPEND_BUFFER_SIZE_INIT);
- appendBuffer = new AtomicReference(buff);
- appendFlushThread = new Thread("Flush " + name) {
- public void run() {
- try {
- do {
- Thread.sleep(500);
- if (flush()) {
- continue;
- }
- } while (!Thread.interrupted());
- } catch (Exception e) {
- // ignore
- }
- }
- };
- appendFlushThread.setDaemon(true);
- appendFlushThread.start();
- }
- ByteArrayOutputStream buff = appendBuffer.get();
- if (buff.size() > APPEND_BUFFER_SIZE) {
- flush();
- buff = appendBuffer.get();
- }
- buff.write(src.array(), src.position(), len);
- pos += len;
- size += len;
- return len;
- }
- }
- base.position(pos);
- int len = base.write(src);
- pos += len;
- size = Math.max(size, pos);
- return len;
- }
-
- protected void implCloseChannel() throws IOException {
- if (appendBuffer != null) {
- appendFlushThread.interrupt();
- try {
- appendFlushThread.join();
- } catch (InterruptedException e) {
- // ignore
- }
- flush();
- }
- base.close();
- }
-
- public void force(boolean metaData) throws IOException {
- flush();
- base.force(metaData);
- }
-
- public FileLock tryLock(long position, long size, boolean shared) throws IOException {
- flush();
- return base.tryLock(position, size, shared);
- }
-
- public String toString() {
- return "cache:" + base.toString();
- }
-
-}
\ No newline at end of file
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileChannelInputStream.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileChannelInputStream.java
deleted file mode 100644
index f41c856ad5b..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileChannelInputStream.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License,
- * Version 1.0, and under the Eclipse Public License, Version 1.0
- * (http://h2database.com/html/license.html).
- * Initial Developer: H2 Group
- */
-package org.apache.jackrabbit.mk.fs;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-
-/**
- * Allows to read from a file channel like an input stream.
- */
-public class FileChannelInputStream extends InputStream {
-
- private final FileChannel channel;
- private final byte[] buffer = { 0 };
- private final boolean closeChannel;
-
- /**
- * Create a new file object input stream from the file channel.
- *
- * @param channel the file channel
- */
- public FileChannelInputStream(FileChannel channel, boolean closeChannel) {
- this.channel = channel;
- this.closeChannel = closeChannel;
- }
-
- public int read() throws IOException {
- if (channel.position() >= channel.size()) {
- return -1;
- }
- FileUtils.readFully(channel, ByteBuffer.wrap(buffer));
- return buffer[0] & 0xff;
- }
-
- public int read(byte[] b) throws IOException {
- return read(b, 0, b.length);
- }
-
- public int read(byte[] b, int off, int len) throws IOException {
- if (channel.position() + len < channel.size()) {
- FileUtils.readFully(channel, ByteBuffer.wrap(b, off, len));
- return len;
- }
- return super.read(b, off, len);
- }
-
- public long skip(long n) throws IOException {
- n = Math.min(channel.size() - channel.position(), n);
- channel.position(channel.position() + n);
- return n;
- }
-
- public void close() throws IOException {
- if (closeChannel) {
- channel.close();
- }
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePath.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePath.java
deleted file mode 100644
index 216bf3fe678..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePath.java
+++ /dev/null
@@ -1,322 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.fs;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.channels.FileChannel;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import org.apache.jackrabbit.mk.util.StringUtils;
-
-/**
- * A path to a file. It similar to the Java 7 java.nio.file.Path,
- * but simpler, and works with older versions of Java. It also implements the
- * relevant methods found in java.nio.file.FileSystem and
- * FileSystems
- */
-public abstract class FilePath {
-
- private static final FilePath DEFAULT = new FilePathDisk();
-
- private static Map providers;
-
- /**
- * The prefix for temporary files.
- */
- private static String tempRandom;
- private static long tempSequence;
-
- /**
- * The complete path (which may be absolute or relative, depending on the
- * file system).
- */
- protected String name;
-
- /**
- * Get the file path object for the given path.
- * This method is similar to Java 7 java.nio.file.FileSystem.getPath.
- * Windows-style '\' is replaced with '/'.
- *
- * @param path the path
- * @return the file path object
- */
- public static FilePath get(String path) {
- path = path.replace('\\', '/');
- int index = path.indexOf(':');
- if (index < 2) {
- // use the default provider if no prefix or
- // only a single character (drive name)
- return DEFAULT.getPath(path);
- }
- String scheme = path.substring(0, index);
- registerDefaultProviders();
- FilePath p = providers.get(scheme);
- if (p == null) {
- // provider not found - use the default
- p = DEFAULT;
- }
- return p.getPath(path);
- }
-
- private static void registerDefaultProviders() {
- if (providers == null) {
- Map map = Collections.synchronizedMap(new HashMap());
- for (String c : new String[] {
- "org.apache.jackrabbit.mk.fs.FilePathDisk",
- "org.apache.jackrabbit.mk.fs.FilePathCache"
- }) {
- try {
- FilePath p = (FilePath) Class.forName(c).newInstance();
- map.put(p.getScheme(), p);
- } catch (Exception e) {
- // ignore - the files may be excluded in purpose
- }
- }
- providers = map;
- }
- }
-
- /**
- * Register a file provider.
- *
- * @param provider the file provider
- */
- public static void register(FilePath provider) {
- registerDefaultProviders();
- providers.put(provider.getScheme(), provider);
- }
-
- /**
- * Unregister a file provider.
- *
- * @param provider the file provider
- */
- public static void unregister(FilePath provider) {
- registerDefaultProviders();
- providers.remove(provider.getScheme());
- }
-
- /**
- * Get the size of a file in bytes
- *
- * @return the size in bytes
- */
- public abstract long size();
-
- /**
- * Rename a file if this is allowed.
- *
- * @param newName the new fully qualified file name
- */
- public abstract void moveTo(FilePath newName) throws IOException;
-
- /**
- * Create a new file.
- *
- * @return true if creating was successful
- */
- public abstract boolean createFile();
-
- /**
- * Checks if a file exists.
- *
- * @return true if it exists
- */
- public abstract boolean exists();
-
- /**
- * Delete a file or directory if it exists.
- * Directories may only be deleted if they are empty.
- */
- public abstract void delete() throws IOException;
-
- /**
- * List the files and directories in the given directory.
- *
- * @return the list of fully qualified file names
- */
- public abstract List newDirectoryStream() throws IOException;
-
- /**
- * Normalize a file name.
- *
- * @return the normalized file name
- */
- public abstract FilePath toRealPath() throws IOException;
-
- /**
- * Get the parent directory of a file or directory.
- *
- * @return the parent directory name
- */
- public abstract FilePath getParent();
-
- /**
- * Check if it is a file or a directory.
- *
- * @return true if it is a directory
- */
- public abstract boolean isDirectory();
-
- /**
- * Check if the file name includes a path.
- *
- * @return if the file name is absolute
- */
- public abstract boolean isAbsolute();
-
- /**
- * Get the last modified date of a file
- *
- * @return the last modified date
- */
- public abstract long lastModified();
-
- /**
- * Check if the file is writable.
- *
- * @return if the file is writable
- */
- public abstract boolean canWrite();
-
- /**
- * Create a directory (all required parent directories already exist).
- */
- public abstract void createDirectory() throws IOException;
-
- /**
- * Get the file or directory name (the last element of the path).
- *
- * @return the last element of the path
- */
- public String getName() {
- int idx = Math.max(name.indexOf(':'), name.lastIndexOf('/'));
- return idx < 0 ? name : name.substring(idx + 1);
- }
-
- /**
- * Create an output stream to write into the file.
- *
- * @param append if true, the file will grow, if false, the file will be
- * truncated first
- * @return the output stream
- */
- public abstract OutputStream newOutputStream(boolean append) throws IOException;
-
- /**
- * Open a random access file object.
- *
- * @param mode the access mode. Supported are r, rw, rws, rwd
- * @return the file object
- */
- public abstract FileChannel open(String mode) throws IOException;
-
- /**
- * Create an input stream to read from the file.
- *
- * @return the input stream
- */
- public abstract InputStream newInputStream() throws IOException;
-
- /**
- * Disable the ability to write.
- *
- * @return true if the call was successful
- */
- public abstract boolean setReadOnly();
-
- /**
- * Create a new temporary file.
- *
- * @param suffix the suffix
- * @param deleteOnExit if the file should be deleted when the virtual
- * machine exists
- * @param inTempDir if the file should be stored in the temporary directory
- * @return the name of the created file
- */
- public FilePath createTempFile(String suffix, boolean deleteOnExit, boolean inTempDir) throws IOException {
- while (true) {
- FilePath p = getPath(name + getNextTempFileNamePart(false) + suffix);
- if (p.exists() || !p.createFile()) {
- // in theory, the random number could collide
- getNextTempFileNamePart(true);
- continue;
- }
- p.open("rw").close();
- return p;
- }
- }
-
- /**
- * Get the next temporary file name part (the part in the middle).
- *
- * @param newRandom if the random part of the filename should change
- * @return the file name part
- */
- protected static synchronized String getNextTempFileNamePart(boolean newRandom) {
- if (newRandom || tempRandom == null) {
- byte[] prefix = new byte[8];
- new Random().nextBytes(prefix);
- tempRandom = StringUtils.convertBytesToHex(prefix) + ".";
- }
- return tempRandom + tempSequence++;
- }
-
- /**
- * Get the string representation. The returned string can be used to
- * construct a new object.
- *
- * @return the path as a string
- */
- public String toString() {
- return name;
- }
-
- /**
- * Get the scheme (prefix) for this file provider.
- * This is similar to java.nio.file.spi.FileSystemProvider.getScheme.
- *
- * @return the scheme
- */
- public abstract String getScheme();
-
- /**
- * Convert a file to a path. This is similar to
- * java.nio.file.spi.FileSystemProvider.getPath, but may
- * return an object even if the scheme doesn't match in case of the the
- * default file provider.
- *
- * @param path the path
- * @return the file path object
- */
- public abstract FilePath getPath(String path);
-
- /**
- * Append an element to the path.
- * This is similar to java.nio.file.spi.FileSystemProvider.resolve.
- *
- * @param other the relative path (might be null)
- * @return the resolved path
- */
- public abstract FilePath resolve(String other);
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePathDisk.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePathDisk.java
deleted file mode 100644
index 2135ea01475..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePathDisk.java
+++ /dev/null
@@ -1,417 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.fs;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.RandomAccessFile;
-import java.net.URL;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.channels.FileLock;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * This file system stores files on disk.
- * This is the most common file system.
- */
-public class FilePathDisk extends FilePath {
-
- private static final String CLASSPATH_PREFIX = "classpath:";
- private static final String FILE_SEPARATOR = System.getProperty("file.separator", "/");
- private static final int MAX_FILE_RETRY = 16;
-
- public FilePathDisk getPath(String path) {
- FilePathDisk p = new FilePathDisk();
- p.name = translateFileName(path);
- return p;
- }
-
- public long size() {
- return new File(name).length();
- }
-
- /**
- * Translate the file name to the native format. This will replace '\' with
- * '/' and expand the home directory ('~').
- *
- * @param fileName the file name
- * @return the native file name
- */
- protected static String translateFileName(String fileName) {
- fileName = fileName.replace('\\', '/');
- if (fileName.startsWith("file:")) {
- fileName = fileName.substring("file:".length());
- }
- return expandUserHomeDirectory(fileName);
- }
-
- /**
- * Expand '~' to the user home directory. It is only be expanded if the '~'
- * stands alone, or is followed by '/' or '\'.
- *
- * @param fileName the file name
- * @return the native file name
- */
- public static String expandUserHomeDirectory(String fileName) {
- if (fileName.startsWith("~") && (fileName.length() == 1 || fileName.startsWith("~/"))) {
- String userDir = System.getProperty("user.home", "");
- fileName = userDir + fileName.substring(1);
- }
- return fileName;
- }
-
- public void moveTo(FilePath newName) throws IOException {
- File oldFile = new File(name);
- File newFile = new File(newName.name);
- if (oldFile.getAbsolutePath().equals(newFile.getAbsolutePath())) {
- return;
- }
- if (!oldFile.exists()) {
- throw new IOException("Could not rename " +
- name + " (not found) to " + newName.name);
- }
- if (newFile.exists()) {
- throw new IOException("Could not rename " +
- name + " to " + newName + " (already exists)");
- }
- for (int i = 0; i < MAX_FILE_RETRY; i++) {
- boolean ok = oldFile.renameTo(newFile);
- if (ok) {
- return;
- }
- wait(i);
- }
- throw new IOException("Could not rename " + name + " to " + newName.name);
- }
-
- private static void wait(int i) {
- if (i == 8) {
- System.gc();
- }
- try {
- // sleep at most 256 ms
- long sleep = Math.min(256, i * i);
- Thread.sleep(sleep);
- } catch (InterruptedException e) {
- // ignore
- }
- }
-
- public boolean createFile() {
- File file = new File(name);
- for (int i = 0; i < MAX_FILE_RETRY; i++) {
- try {
- return file.createNewFile();
- } catch (IOException e) {
- // 'access denied' is really a concurrent access problem
- wait(i);
- }
- }
- return false;
- }
-
- public boolean exists() {
- return new File(name).exists();
- }
-
- public void delete() throws IOException {
- File file = new File(name);
- for (int i = 0; i < MAX_FILE_RETRY; i++) {
- boolean ok = file.delete();
- if (ok || !file.exists()) {
- return;
- }
- wait(i);
- }
- throw new IOException("Could not delete " + name);
- }
-
- public List newDirectoryStream() throws IOException {
- ArrayList list = new ArrayList();
- File f = new File(name);
- String[] files = f.list();
- if (files != null) {
- String base = f.getCanonicalPath();
- if (!base.endsWith(FILE_SEPARATOR)) {
- base += FILE_SEPARATOR;
- }
- for (int i = 0, len = files.length; i < len; i++) {
- list.add(getPath(base + files[i]));
- }
- }
- return list;
- }
-
- public boolean canWrite() {
- return canWriteInternal(new File(name));
- }
-
- public boolean setReadOnly() {
- File f = new File(name);
- return f.setReadOnly();
- }
-
- public FilePathDisk toRealPath() throws IOException {
- String fileName = new File(name).getCanonicalPath();
- return getPath(fileName);
- }
-
- public FilePath getParent() {
- String p = new File(name).getParent();
- return p == null ? null : getPath(p);
- }
-
- public boolean isDirectory() {
- return new File(name).isDirectory();
- }
-
- public boolean isAbsolute() {
- return new File(name).isAbsolute();
- }
-
- public long lastModified() {
- return new File(name).lastModified();
- }
-
- private static boolean canWriteInternal(File file) {
- try {
- if (!file.canWrite()) {
- return false;
- }
- } catch (Exception e) {
- // workaround for GAE which throws a
- // java.security.AccessControlException
- return false;
- }
- // File.canWrite() does not respect windows user permissions,
- // so we must try to open it using the mode "rw".
- // See also http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4420020
- RandomAccessFile r = null;
- try {
- r = new RandomAccessFile(file, "rw");
- return true;
- } catch (FileNotFoundException e) {
- return false;
- } finally {
- if (r != null) {
- try {
- r.close();
- } catch (IOException e) {
- // ignore
- }
- }
- }
- }
-
- public void createDirectory() throws IOException {
- File f = new File(name);
- if (f.exists()) {
- if (f.isDirectory()) {
- return;
- }
- throw new IOException("A file with this name already exists: " + name);
- }
- File dir = new File(name);
- for (int i = 0; i < MAX_FILE_RETRY; i++) {
- if ((dir.exists() && dir.isDirectory()) || dir.mkdir()) {
- return;
- }
- wait(i);
- }
- throw new IOException("Could not create " + name);
- }
-
- public OutputStream newOutputStream(boolean append) throws IOException {
- File file = new File(name);
- File parent = file.getParentFile();
- if (parent != null) {
- FileUtils.createDirectories(parent.getAbsolutePath());
- }
- FileOutputStream out = new FileOutputStream(name, append);
- return out;
- }
-
- public InputStream newInputStream() throws IOException {
- if (name.indexOf(':') > 1) {
- // if the : is in position 1, a windows file access is assumed: C:.. or D:
- if (name.startsWith(CLASSPATH_PREFIX)) {
- String fileName = name.substring(CLASSPATH_PREFIX.length());
- if (!fileName.startsWith("/")) {
- fileName = "/" + fileName;
- }
- InputStream in = getClass().getResourceAsStream(fileName);
- if (in == null) {
- Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName);
- }
- if (in == null) {
- throw new FileNotFoundException("Resource " + fileName);
- }
- return in;
- }
- // otherwise an URL is assumed
- URL url = new URL(name);
- InputStream in = url.openStream();
- return in;
- }
- FileInputStream in = new FileInputStream(name);
- return in;
- }
-
- /**
- * Call the garbage collection and run finalization. This close all files that
- * were not closed, and are no longer referenced.
- */
- static void freeMemoryAndFinalize() {
- Runtime rt = Runtime.getRuntime();
- long mem = rt.freeMemory();
- for (int i = 0; i < 16; i++) {
- rt.gc();
- long now = rt.freeMemory();
- rt.runFinalization();
- if (now == mem) {
- break;
- }
- mem = now;
- }
- }
-
- public FileChannel open(String mode) throws IOException {
- return new FileDisk(name, mode);
- }
-
- public String getScheme() {
- return "file";
- }
-
- public FilePath createTempFile(String suffix, boolean deleteOnExit, boolean inTempDir)
- throws IOException {
- String fileName = name + ".";
- String prefix = new File(fileName).getName();
- File dir;
- if (inTempDir) {
- dir = new File(System.getProperty("java.io.tmpdir", "."));
- } else {
- dir = new File(fileName).getAbsoluteFile().getParentFile();
- }
- FileUtils.createDirectories(dir.getAbsolutePath());
- while (true) {
- File f = new File(dir, prefix + getNextTempFileNamePart(false) + suffix);
- if (f.exists() || !f.createNewFile()) {
- // in theory, the random number could collide
- getNextTempFileNamePart(true);
- continue;
- }
- if (deleteOnExit) {
- try {
- f.deleteOnExit();
- } catch (Throwable e) {
- // sometimes this throws a NullPointerException
- // at java.io.DeleteOnExitHook.add(DeleteOnExitHook.java:33)
- // we can ignore it
- }
- }
- return get(f.getCanonicalPath());
- }
- }
-
- public FilePath resolve(String other) {
- return other == null ? this : getPath(name + "/" + other);
- }
-
-}
-
-/**
- * Uses java.io.RandomAccessFile to access a file.
- */
-class FileDisk extends FileBase {
-
- private final RandomAccessFile file;
- private final String name;
-
- private long pos;
-
- FileDisk(String fileName, String mode) throws FileNotFoundException {
- this.file = new RandomAccessFile(fileName, mode);
- this.name = fileName;
- }
-
- public void force(boolean metaData) throws IOException {
- file.getFD().sync();
- }
-
- public FileChannel truncate(long newLength) throws IOException {
- if (newLength < file.length()) {
- // some implementations actually only support truncate
- file.setLength(newLength);
- pos = Math.min(pos, newLength);
- }
- return this;
- }
-
- public synchronized FileLock tryLock(long position, long size, boolean shared) throws IOException {
- return file.getChannel().tryLock();
- }
-
- public void implCloseChannel() throws IOException {
- file.close();
- }
-
- public long position() throws IOException {
- return pos;
- }
-
- public long size() throws IOException {
- return file.length();
- }
-
- public int read(ByteBuffer dst) throws IOException {
- int len = file.read(dst.array(), dst.position(), dst.remaining());
- if (len > 0) {
- pos += len;
- dst.position(dst.position() + len);
- }
- return len;
- }
-
- public FileChannel position(long pos) throws IOException {
- if (this.pos != pos) {
- file.seek(pos);
- this.pos = pos;
- }
- return this;
- }
-
- public int write(ByteBuffer src) throws IOException {
- int len = src.remaining();
- file.write(src.array(), src.position(), len);
- src.position(src.position() + len);
- pos += len;
- return len;
- }
-
- public String toString() {
- return name;
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePathWrapper.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePathWrapper.java
deleted file mode 100644
index 8769082f18f..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FilePathWrapper.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright 2004-2011 H2 Group. Multiple-Licensed under the H2 License,
- * Version 1.0, and under the Eclipse Public License, Version 1.0
- * (http://h2database.com/html/license.html).
- * Initial Developer: H2 Group
- */
-package org.apache.jackrabbit.mk.fs;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.channels.FileChannel;
-import java.util.List;
-import org.h2.message.DbException;
-
-/**
- * The base class for wrapping / delegating file systems such as
- * the split file system.
- */
-public abstract class FilePathWrapper extends FilePath {
-
- private FilePath base;
-
- public FilePathWrapper getPath(String path) {
- return create(path, unwrap(path));
- }
-
- /**
- * Create a wrapped path instance for the given base path.
- *
- * @param base the base path
- * @return the wrapped path
- */
- public FilePathWrapper wrap(FilePath base) {
- return base == null ? null : create(getPrefix() + base.name, base);
- }
-
- public FilePath unwrap() {
- return unwrap(name);
- }
-
- private FilePathWrapper create(String path, FilePath base) {
- try {
- FilePathWrapper p = getClass().newInstance();
- p.name = path;
- p.base = base;
- return p;
- } catch (Exception e) {
- throw DbException.convert(e);
- }
- }
-
- protected String getPrefix() {
- return getScheme() + ":";
- }
-
- /**
- * Get the base path for the given wrapped path.
- *
- * @param path the path including the scheme prefix
- * @return the base file path
- */
- protected FilePath unwrap(String path) {
- return FilePath.get(path.substring(getScheme().length() + 1));
- }
-
- protected FilePath getBase() {
- return base;
- }
-
- public boolean canWrite() {
- return base.canWrite();
- }
-
- public void createDirectory() throws IOException {
- base.createDirectory();
- }
-
- public boolean createFile() {
- return base.createFile();
- }
-
- public void delete() throws IOException {
- base.delete();
- }
-
- public boolean exists() {
- return base.exists();
- }
-
- public FilePath getParent() {
- return wrap(base.getParent());
- }
-
- public boolean isAbsolute() {
- return base.isAbsolute();
- }
-
- public boolean isDirectory() {
- return base.isDirectory();
- }
-
- public long lastModified() {
- return base.lastModified();
- }
-
- public FilePath toRealPath() throws IOException {
- return wrap(base.toRealPath());
- }
-
- public List newDirectoryStream() throws IOException {
- List list = base.newDirectoryStream();
- for (int i = 0, len = list.size(); i < len; i++) {
- list.set(i, wrap(list.get(i)));
- }
- return list;
- }
-
- public void moveTo(FilePath newName) throws IOException {
- base.moveTo(((FilePathWrapper) newName).base);
- }
-
- public InputStream newInputStream() throws IOException {
- return base.newInputStream();
- }
-
- public OutputStream newOutputStream(boolean append) throws IOException {
- return base.newOutputStream(append);
- }
-
- public FileChannel open(String mode) throws IOException {
- return base.open(mode);
- }
-
- public boolean setReadOnly() {
- return base.setReadOnly();
- }
-
- public long size() {
- return base.size();
- }
-
- public FilePath createTempFile(String suffix, boolean deleteOnExit, boolean inTempDir)
- throws IOException {
- return wrap(base.createTempFile(suffix, deleteOnExit, inTempDir));
- }
-
- public FilePath resolve(String other) {
- return other == null ? this : wrap(base.resolve(other));
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileUtils.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileUtils.java
deleted file mode 100644
index 8cb1beb41da..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/fs/FileUtils.java
+++ /dev/null
@@ -1,370 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.fs;
-
-import java.io.EOFException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.util.ArrayList;
-import java.util.List;
-import org.apache.jackrabbit.mk.util.IOUtils;
-
-/**
- * This utility class contains utility functions that use the file system
- * abstraction.
- */
-public class FileUtils {
-
- /**
- * Checks if a file exists.
- * This method is similar to Java 7 java.nio.file.Path.exists.
- *
- * @param fileName the file name
- * @return true if it exists
- */
- public static boolean exists(String fileName) {
- return FilePath.get(fileName).exists();
- }
-
- /**
- * Create a directory (all required parent directories must already exist).
- * This method is similar to Java 7 java.nio.file.Path.createDirectory.
- *
- * @param directoryName the directory name
- */
- public static void createDirectory(String directoryName) throws IOException {
- FilePath.get(directoryName).createDirectory();
- }
-
- /**
- * Create a new file.
- * This method is similar to Java 7 java.nio.file.Path.createFile, but returns
- * false instead of throwing a exception if the file already existed.
- *
- * @param fileName the file name
- * @return true if creating was successful
- */
- public static boolean createFile(String fileName) {
- return FilePath.get(fileName).createFile();
- }
-
- /**
- * Delete a file or directory if it exists.
- * Directories may only be deleted if they are empty.
- * This method is similar to Java 7 java.nio.file.Path.deleteIfExists.
- *
- * @param path the file or directory name
- */
- public static void delete(String path) throws IOException {
- FilePath.get(path).delete();
- }
-
- /**
- * Get the canonical file or directory name.
- * This method is similar to Java 7 java.nio.file.Path.toRealPath.
- *
- * @param fileName the file name
- * @return the normalized file name
- */
- public static String toRealPath(String fileName) throws IOException {
- return FilePath.get(fileName).toRealPath().toString();
- }
-
- /**
- * Get the parent directory of a file or directory.
- * This method returns null if there is no parent.
- * This method is similar to Java 7 java.nio.file.Path.getParent.
- *
- * @param fileName the file or directory name
- * @return the parent directory name
- */
- public static String getParent(String fileName) {
- FilePath p = FilePath.get(fileName).getParent();
- return p == null ? null : p.toString();
- }
-
- /**
- * Check if the file name includes a path.
- * This method is similar to Java 7 java.nio.file.Path.isAbsolute.
- *
- * @param fileName the file name
- * @return if the file name is absolute
- */
- public static boolean isAbsolute(String fileName) {
- return FilePath.get(fileName).isAbsolute();
- }
-
- /**
- * Rename a file if this is allowed.
- * This method is similar to Java 7 java.nio.file.Path.moveTo.
- *
- * @param oldName the old fully qualified file name
- * @param newName the new fully qualified file name
- */
- public static void moveTo(String oldName, String newName) throws IOException {
- FilePath.get(oldName).moveTo(FilePath.get(newName));
- }
-
- /**
- * Get the file or directory name (the last element of the path).
- * This method is similar to Java 7 java.nio.file.Path.getName.
- *
- * @param path the directory and file name
- * @return just the file name
- */
- public static String getName(String path) {
- return FilePath.get(path).getName();
- }
-
- /**
- * List the files and directories in the given directory.
- * This method is similar to Java 7 java.nio.file.Path.newDirectoryStream.
- *
- * @param path the directory
- * @return the list of fully qualified file names
- */
- public static List newDirectoryStream(String path) throws IOException {
- List list = FilePath.get(path).newDirectoryStream();
- int len = list.size();
- List result = new ArrayList(len);
- for (int i = 0; i < len; i++) {
- result.add(list.get(i).toString());
- }
- return result;
- }
-
- /**
- * Get the last modified date of a file.
- * This method is similar to Java 7
- * java.nio.file.attribute.Attributes.readBasicFileAttributes(file).lastModified().toMillis()
- *
- * @param fileName the file name
- * @return the last modified date
- */
- public static long lastModified(String fileName) {
- return FilePath.get(fileName).lastModified();
- }
-
- /**
- * Get the size of a file in bytes
- * This method is similar to Java 7
- * java.nio.file.attribute.Attributes.readBasicFileAttributes(file).size()
- *
- * @param fileName the file name
- * @return the size in bytes
- */
- public static long size(String fileName) {
- return FilePath.get(fileName).size();
- }
-
- /**
- * Check if it is a file or a directory.
- * java.nio.file.attribute.Attributes.readBasicFileAttributes(file).isDirectory()
- *
- * @param fileName the file or directory name
- * @return true if it is a directory
- */
- public static boolean isDirectory(String fileName) {
- return FilePath.get(fileName).isDirectory();
- }
-
- /**
- * Open a random access file object.
- * This method is similar to Java 7 java.nio.channels.FileChannel.open.
- *
- * @param fileName the file name
- * @param mode the access mode. Supported are r, rw, rws, rwd
- * @return the file object
- */
- public static FileChannel open(String fileName, String mode) throws IOException {
- return FilePath.get(fileName).open(mode);
- }
-
- /**
- * Create an input stream to read from the file.
- * This method is similar to Java 7 java.nio.file.Path.newInputStream.
- *
- * @param fileName the file name
- * @return the input stream
- */
- public static InputStream newInputStream(String fileName) throws IOException {
- return FilePath.get(fileName).newInputStream();
- }
-
- /**
- * Create an output stream to write into the file.
- * This method is similar to Java 7 java.nio.file.Path.newOutputStream.
- *
- * @param fileName the file name
- * @param append if true, the file will grow, if false, the file will be
- * truncated first
- * @return the output stream
- */
- public static OutputStream newOutputStream(String fileName, boolean append) throws IOException {
- return FilePath.get(fileName).newOutputStream(append);
- }
-
- /**
- * Check if the file is writable.
- * This method is similar to Java 7
- * java.nio.file.Path.checkAccess(AccessMode.WRITE)
- *
- * @param fileName the file name
- * @return if the file is writable
- */
- public static boolean canWrite(String fileName) {
- return FilePath.get(fileName).canWrite();
- }
-
- // special methods =======================================
-
- /**
- * Disable the ability to write. The file can still be deleted afterwards.
- *
- * @param fileName the file name
- * @return true if the call was successful
- */
- public static boolean setReadOnly(String fileName) {
- return FilePath.get(fileName).setReadOnly();
- }
-
- // utility methods =======================================
-
- /**
- * Delete a directory or file and all subdirectories and files.
- *
- * @param path the path
- * @param tryOnly whether errors should be ignored
- */
- public static void deleteRecursive(String path, boolean tryOnly) throws IOException {
- if (exists(path)) {
- if (isDirectory(path)) {
- for (String s : newDirectoryStream(path)) {
- deleteRecursive(s, tryOnly);
- }
- }
- if (tryOnly) {
- tryDelete(path);
- } else {
- delete(path);
- }
- }
- }
-
- /**
- * Create the directory and all required parent directories.
- *
- * @param dir the directory name
- */
- public static void createDirectories(String dir) throws IOException {
- if (dir != null) {
- if (exists(dir)) {
- if (!isDirectory(dir)) {
- throw new IOException("Could not create directory, " +
- "because a file with the same name already exists: " + dir);
- }
- } else {
- String parent = getParent(dir);
- createDirectories(parent);
- createDirectory(dir);
- }
- }
- }
-
- /**
- * Copy a file from one directory to another, or to another file.
- *
- * @param original the original file name
- * @param copy the file name of the copy
- */
- public static void copy(String original, String copy) throws IOException {
- InputStream in = newInputStream(original);
- try {
- OutputStream out = newOutputStream(copy, false);
- try {
- IOUtils.copy(in, out);
- } finally {
- out.close();
- }
- } finally {
- in.close();
- }
- }
-
- /**
- * Try to delete a file (ignore errors).
- *
- * @param fileName the file name
- * @return true if it worked
- */
- public static boolean tryDelete(String fileName) {
- try {
- FilePath.get(fileName).delete();
- return true;
- } catch (Exception e) {
- return false;
- }
- }
-
- /**
- * Create a new temporary file.
- *
- * @param prefix the prefix of the file name (including directory name if
- * required)
- * @param suffix the suffix
- * @param deleteOnExit if the file should be deleted when the virtual
- * machine exists
- * @param inTempDir if the file should be stored in the temporary directory
- * @return the name of the created file
- */
- public static String createTempFile(String prefix, String suffix, boolean deleteOnExit, boolean inTempDir)
- throws IOException {
- return FilePath.get(prefix).createTempFile(suffix, deleteOnExit, inTempDir).toString();
- }
-
- /**
- * Fully read from the file. This will read all remaining bytes,
- * or throw an EOFException if not successful.
- *
- * @param channel the file channel
- * @param dst the byte buffer
- */
- public static void readFully(FileChannel channel, ByteBuffer dst) throws IOException {
- do {
- int r = channel.read(dst);
- if (r < 0) {
- throw new EOFException();
- }
- } while (dst.remaining() > 0);
- }
-
- /**
- * Fully write to the file. This will write all remaining bytes.
- *
- * @param channel the file channel
- * @param src the byte buffer
- */
- public static void writeFully(FileChannel channel, ByteBuffer src) throws IOException {
- do {
- channel.write(src);
- } while (src.remaining() > 0);
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/json/JsonBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/json/JsonBuilder.java
deleted file mode 100644
index 8fdbe35d1f0..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/json/JsonBuilder.java
+++ /dev/null
@@ -1,445 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.jackrabbit.mk.json;
-
-import java.io.IOException;
-
-/**
- * Partially based on json-simple
- * Limitation: arrays can only have primitive members (i.e. no arrays nor objects)
- */
-public final class JsonBuilder {
- final Appendable writer;
-
- private JsonBuilder(Appendable writer) {
- this.writer = writer;
- }
-
- public static JsonObjectBuilder create(Appendable writer) throws IOException {
- return new JsonBuilder(writer).new JsonObjectBuilder(null);
- }
-
- public final class JsonObjectBuilder {
- private final JsonObjectBuilder parent;
-
- private boolean hasKeys;
-
- public JsonObjectBuilder(JsonObjectBuilder parent) throws IOException {
- this.parent = parent;
- writer.append('{');
- }
-
- public JsonObjectBuilder value(String key, String value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder valueEncoded(String key, String value) throws IOException {
- write(key, value);
- return this;
- }
-
- public JsonObjectBuilder value(String key, int value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder value(String key, long value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder value(String key, float value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder value(String key, double value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder value(String key, Number value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder value(String key, boolean value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder nil(String key) throws IOException {
- write(key, "null");
- return this;
- }
-
- public JsonObjectBuilder array(String key, String[] value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder array(String key, int[] value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder array(String key, long[] value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder array(String key, float[] value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder array(String key, double[] value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder array(String key, Number[] value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonObjectBuilder array(String key, boolean[] value) throws IOException {
- write(key, encode(value));
- return this;
- }
-
- public JsonArrayBuilder array(String key) throws IOException {
- writeKey(key);
- return new JsonArrayBuilder(this);
- }
-
- public JsonObjectBuilder object(String key) throws IOException {
- writeKey(key);
- return new JsonObjectBuilder(this);
- }
-
- public JsonObjectBuilder build() throws IOException {
- writer.append('}');
- return parent;
- }
-
- //------------------------------------------< private >---
-
- private void optionalComma() throws IOException {
- if (hasKeys) {
- writer.append(',');
- } else {
- hasKeys = true;
- }
- }
-
- private void writeKey(String key) throws IOException {
- optionalComma();
- writer.append(quote(escape(key)));
- writer.append(':');
- }
-
- private void write(String key, String value) throws IOException {
- writeKey(key);
- writer.append(value);
- }
-
- }
-
- public final class JsonArrayBuilder {
- private final JsonObjectBuilder parent;
-
- private boolean hasValues;
-
- public JsonArrayBuilder(JsonObjectBuilder parent) throws IOException {
- writer.append('[');
- this.parent = parent;
- }
-
- public JsonArrayBuilder value(String value) throws IOException {
- optionalComma();
- writer.append(encode(value));
- return this;
- }
-
- public JsonArrayBuilder value(int value) throws IOException {
- optionalComma();
- writer.append(encode(value));
- return this;
- }
-
- public JsonArrayBuilder value(long value) throws IOException {
- optionalComma();
- writer.append(encode(value));
- return this;
- }
-
- public JsonArrayBuilder value(float value) throws IOException {
- optionalComma();
- writer.append(encode(value));
- return this;
- }
-
- public JsonArrayBuilder value(double value) throws IOException {
- optionalComma();
- writer.append(encode(value));
- return this;
- }
-
- public JsonArrayBuilder value(Number value) throws IOException {
- optionalComma();
- writer.append(encode(value));
- return this;
- }
-
- public JsonArrayBuilder value(boolean value) throws IOException {
- optionalComma();
- writer.append(encode(value));
- return this;
- }
-
- public JsonArrayBuilder nil() throws IOException {
- optionalComma();
- writer.append("null");
- return this;
- }
-
- public JsonObjectBuilder build() throws IOException {
- writer.append(']');
- return parent;
- }
-
- //------------------------------------------< private >---
-
- private void optionalComma() throws IOException {
- if (hasValues) {
- writer.append(',');
- } else {
- hasValues = true;
- }
- }
- }
-
- /**
- * Escape quotes, \, /, \r, \n, \b, \f, \t and other control characters (U+0000 through U+001F).
- */
- public static String escape(String string) {
- if (string == null) {
- return null;
- }
-
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < string.length(); i++) {
- char ch = string.charAt(i);
- switch (ch) {
- case '"':
- sb.append("\\\"");
- break;
- case '\\':
- sb.append("\\\\");
- break;
- case '\b':
- sb.append("\\b");
- break;
- case '\f':
- sb.append("\\f");
- break;
- case '\n':
- sb.append("\\n");
- break;
- case '\r':
- sb.append("\\r");
- break;
- case '\t':
- sb.append("\\t");
- break;
- default:
- //Reference: http://www.unicode.org/versions/Unicode5.1.0/
- if (ch >= '\u0000' && ch <= '\u001F' ||
- ch >= '\u007F' && ch <= '\u009F' ||
- ch >= '\u2000' && ch <= '\u20FF') {
-
- String ss = Integer.toHexString(ch);
- sb.append("\\u");
- for (int k = 0; k < 4 - ss.length(); k++) {
- sb.append('0');
- }
- sb.append(ss.toUpperCase());
- } else {
- sb.append(ch);
- }
- }
- }
-
- return sb.toString();
- }
-
- public static String quote(String string) {
- return '"' + string + '"';
- }
-
- public static String encode(String value) {
- return quote(escape(value));
- }
-
- public static String encode(int value) {
- return Integer.toString(value);
- }
-
- public static String encode(long value) {
- return Long.toString(value);
- }
-
- public static String encode(float value) {
- // TODO silently losing data, should probably throw an exception instead
- return Float.isInfinite(value) || Float.isNaN(value)
- ? "null"
- : Float.toString(value);
- }
-
- public static String encode(double value) {
- // TODO silently losing data, should probably throw an exception instead
- return Double.isInfinite(value) || Double.isNaN(value)
- ? "null"
- : Double.toString(value);
- }
-
- public static String encode(Number value) {
- return value.toString();
- }
-
- public static String encode(boolean value) {
- return Boolean.toString(value);
- }
-
- public static String encode(String[] values) {
- if (values.length == 0) {
- return "[]";
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append('[');
- for (String value : values) {
- sb.append(encode(value));
- sb.append(',');
- }
- sb.deleteCharAt(sb.length() - 1);
- sb.append(']');
- return sb.toString();
- }
-
- public static String encode(int[] values) {
- if (values.length == 0) {
- return "[]";
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append('[');
- for (int value : values) {
- sb.append(encode(value));
- sb.append(',');
- }
- sb.deleteCharAt(sb.length() - 1);
- sb.append(']');
- return sb.toString();
- }
-
- public static String encode(long[] values) {
- if (values.length == 0) {
- return "[]";
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append('[');
- for (long value : values) {
- sb.append(encode(value));
- sb.append(',');
- }
- sb.deleteCharAt(sb.length() - 1);
- sb.append(']');
- return sb.toString();
- }
-
- public static String encode(float[] values) {
- if (values.length == 0) {
- return "[]";
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append('[');
- for (float value : values) {
- sb.append(encode(value));
- sb.append(',');
- }
- sb.deleteCharAt(sb.length() - 1);
- sb.append(']');
- return sb.toString();
- }
-
- public static String encode(double[] values) {
- if (values.length == 0) {
- return "[]";
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append('[');
- for (double value : values) {
- sb.append(encode(value));
- sb.append(',');
- }
- sb.deleteCharAt(sb.length() - 1);
- sb.append(']');
- return sb.toString();
- }
-
- public static String encode(Number[] values) {
- if (values.length == 0) {
- return "[]";
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append('[');
- for (Number value : values) {
- sb.append(encode(value));
- sb.append(',');
- }
- sb.deleteCharAt(sb.length() - 1);
- sb.append(']');
- return sb.toString();
- }
-
- public static String encode(boolean[] values) {
- if (values.length == 0) {
- return "[]";
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append('[');
- for (boolean value : values) {
- sb.append(encode(value));
- sb.append(',');
- }
- sb.deleteCharAt(sb.length() - 1);
- sb.append(']');
- return sb.toString();
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/model/CommitBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/model/CommitBuilder.java
deleted file mode 100644
index a109d5676b4..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/model/CommitBuilder.java
+++ /dev/null
@@ -1,494 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.model;
-
-import org.apache.jackrabbit.mk.store.NotFoundException;
-import org.apache.jackrabbit.mk.store.RevisionStore;
-import org.apache.jackrabbit.mk.util.PathUtils;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- */
-public class CommitBuilder {
-
- private Id baseRevId;
-
- private final String msg;
-
- private final RevisionStore store;
-
- // key is a path
- private final Map staged = new HashMap();
- // change log
- private final List changeLog = new ArrayList();
-
- public CommitBuilder(Id baseRevId, String msg, RevisionStore store) throws Exception {
- this.baseRevId = baseRevId;
- this.msg = msg;
- this.store = store;
- }
-
- public void addNode(String parentNodePath, String nodeName) throws Exception {
- addNode(parentNodePath, nodeName, Collections.emptyMap());
- }
-
- public void addNode(String parentNodePath, String nodeName, Map properties) throws Exception {
- MutableNode modParent = getOrCreateStagedNode(parentNodePath);
- if (modParent.getChildNodeEntry(nodeName) != null) {
- throw new Exception("there's already a child node with name '" + nodeName + "'");
- }
- MutableNode newChild = new MutableNode(store);
- newChild.getProperties().putAll(properties);
-
- // id will be computed on commit
- modParent.add(new ChildNodeEntry(nodeName, null));
- String newPath = PathUtils.concat(parentNodePath, nodeName);
- staged.put(newPath, newChild);
- // update change log
- changeLog.add(new AddNode(parentNodePath, nodeName, properties));
- }
-
- public void removeNode(String nodePath) throws NotFoundException, Exception {
- String parentPath = PathUtils.getParentPath(nodePath);
- String nodeName = PathUtils.getName(nodePath);
-
- MutableNode parent = getOrCreateStagedNode(parentPath);
- if (parent.remove(nodeName) == null) {
- throw new NotFoundException(nodePath);
- }
-
- // update staging area
- removeStagedNodes(nodePath);
-
- // update change log
- changeLog.add(new RemoveNode(nodePath));
- }
-
- public void moveNode(String srcPath, String destPath) throws NotFoundException, Exception {
- if (PathUtils.isAncestor(srcPath, destPath)) {
- throw new Exception("target path cannot be descendant of source path: " + destPath);
- }
-
- String srcParentPath = PathUtils.getParentPath(srcPath);
- String srcNodeName = PathUtils.getName(srcPath);
-
- String destParentPath = PathUtils.getParentPath(destPath);
- String destNodeName = PathUtils.getName(destPath);
-
- MutableNode srcParent = getOrCreateStagedNode(srcParentPath);
- if (srcParentPath.equals(destParentPath)) {
- if (srcParent.getChildNodeEntry(destNodeName) != null) {
- throw new Exception("node already exists at move destination path: " + destPath);
- }
- if (srcParent.rename(srcNodeName, destNodeName) == null) {
- throw new NotFoundException(srcPath);
- }
- } else {
- ChildNodeEntry srcCNE = srcParent.remove(srcNodeName);
- if (srcCNE == null) {
- throw new NotFoundException(srcPath);
- }
-
- MutableNode destParent = getOrCreateStagedNode(destParentPath);
- if (destParent.getChildNodeEntry(destNodeName) != null) {
- throw new Exception("node already exists at move destination path: " + destPath);
- }
- destParent.add(new ChildNodeEntry(destNodeName, srcCNE.getId()));
- }
-
- // update staging area
- moveStagedNodes(srcPath, destPath);
-
- // update change log
- changeLog.add(new MoveNode(srcPath, destPath));
- }
-
- public void copyNode(String srcPath, String destPath) throws NotFoundException, Exception {
- String srcParentPath = PathUtils.getParentPath(srcPath);
- String srcNodeName = PathUtils.getName(srcPath);
-
- String destParentPath = PathUtils.getParentPath(destPath);
- String destNodeName = PathUtils.getName(destPath);
-
- MutableNode srcParent = getOrCreateStagedNode(srcParentPath);
- ChildNodeEntry srcCNE = srcParent.getChildNodeEntry(srcNodeName);
- if (srcCNE == null) {
- throw new NotFoundException(srcPath);
- }
-
- MutableNode destParent = getOrCreateStagedNode(destParentPath);
- destParent.add(new ChildNodeEntry(destNodeName, srcCNE.getId()));
-
- // update change log
- changeLog.add(new CopyNode(srcPath, destPath));
- }
-
- public void setProperty(String nodePath, String propName, String propValue) throws Exception {
- MutableNode node = getOrCreateStagedNode(nodePath);
-
- Map properties = node.getProperties();
- if (propValue == null) {
- properties.remove(propName);
- } else {
- properties.put(propName, propValue);
- }
-
- // update change log
- changeLog.add(new SetProperty(nodePath, propName, propValue));
- }
-
- public void setProperties(String nodePath, Map properties) throws Exception {
- MutableNode node = getOrCreateStagedNode(nodePath);
-
- node.getProperties().clear();
- node.getProperties().putAll(properties);
-
- // update change log
- changeLog.add(new SetProperties(nodePath, properties));
- }
-
- public Id /* new revId */ doCommit() throws Exception {
- if (staged.isEmpty()) {
- // nothing to commit
- return baseRevId;
- }
-
- Id currentHead = store.getHeadCommitId();
- if (!currentHead.equals(baseRevId)) {
- // todo gracefully handle certain conflicts (e.g. changes on moved sub-trees, competing deletes etc)
- // update base revision to new head
- baseRevId = currentHead;
- // clear staging area
- staged.clear();
- // replay change log on new base revision
- // copy log in order to avoid concurrent modifications
- List log = new ArrayList(changeLog);
- for (Change change : log) {
- change.apply();
- }
- }
-
- Id rootNodeId = persistStagedNodes();
-
- Id newRevId;
- store.lockHead();
- try {
- currentHead = store.getHeadCommitId();
- if (!currentHead.equals(baseRevId)) {
- StoredNode baseRoot = store.getRootNode(baseRevId);
- StoredNode theirRoot = store.getRootNode(currentHead);
- StoredNode ourRoot = store.getNode(rootNodeId);
-
- rootNodeId = mergeTree(baseRoot, ourRoot, theirRoot);
-
- baseRevId = currentHead;
- }
-
- if (store.getCommit(currentHead).getRootNodeId().equals(rootNodeId)) {
- // the commit didn't cause any changes,
- // no need to create new commit object/update head revision
- return currentHead;
- }
- MutableCommit newCommit = new MutableCommit();
- newCommit.setParentId(baseRevId);
- newCommit.setCommitTS(System.currentTimeMillis());
- newCommit.setMsg(msg);
- newCommit.setRootNodeId(rootNodeId);
- newRevId = store.putCommit(newCommit);
-
- store.setHeadCommitId(newRevId);
- } finally {
- store.unlockHead();
- }
-
- // reset instance in order to be reusable
- staged.clear();
- changeLog.clear();
-
- return newRevId;
- }
-
- MutableNode getOrCreateStagedNode(String nodePath) throws Exception {
- MutableNode node = staged.get(nodePath);
- if (node == null) {
- MutableNode parent = staged.get("/");
- if (parent == null) {
- parent = new MutableNode(store.getRootNode(baseRevId), store);
- staged.put("/", parent);
- }
- node = parent;
- String names[] = PathUtils.split(nodePath);
- for (int i = names.length - 1; i >= 0; i--) {
- String path = PathUtils.getAncestorPath(nodePath, i);
- node = staged.get(path);
- if (node == null) {
- // not yet staged, resolve id using staged parent
- // to allow for staged move operations
- ChildNodeEntry cne = parent.getChildNodeEntry(names[names.length - i - 1]);
- if (cne == null) {
- throw new NotFoundException(nodePath);
- }
- node = new MutableNode(store.getNode(cne.getId()), store);
- staged.put(path, node);
- }
- parent = node;
- }
- }
- return node;
- }
-
- void moveStagedNodes(String srcPath, String destPath) throws Exception {
- MutableNode node = staged.get(srcPath);
- if (node != null) {
- staged.remove(srcPath);
- staged.put(destPath, node);
- for (Iterator it = node.getChildNodeNames(0, -1); it.hasNext(); ) {
- String childName = it.next();
- moveStagedNodes(PathUtils.concat(srcPath, childName), PathUtils.concat(destPath, childName));
- }
- }
- }
-
- void removeStagedNodes(String nodePath) throws Exception {
- MutableNode node = staged.get(nodePath);
- if (node != null) {
- staged.remove(nodePath);
- for (Iterator it = node.getChildNodeNames(0, -1); it.hasNext(); ) {
- String childName = it.next();
- removeStagedNodes(PathUtils.concat(nodePath, childName));
- }
- }
- }
-
- Id /* new id of root node */ persistStagedNodes() throws Exception {
- // sort paths in in depth-descending order
- ArrayList orderedPaths = new ArrayList(staged.keySet());
- Collections.sort(orderedPaths, new Comparator() {
- public int compare(String path1, String path2) {
- // paths should be ordered by depth, descending
- int result = getDepth(path2) - getDepth(path1);
- return (result != 0) ? result : 1;
- }
-
- int getDepth(String path) {
- return PathUtils.getDepth(path);
- }
- });
- // iterate over staged entries in depth-descending order
- Id rootNodeId = null;
- for (String path : orderedPaths) {
- // persist node
- Id id = store.putNode(staged.get(path));
- if (PathUtils.denotesRoot(path)) {
- rootNodeId = id;
- } else {
- staged.get(PathUtils.getParentPath(path)).add(new ChildNodeEntry(PathUtils.getName(path), id));
- }
- }
- if (rootNodeId == null) {
- throw new Exception("internal error: inconsistent staging area content");
- }
- return rootNodeId;
- }
-
- /**
- * Performs a three-way merge of the trees rooted at ourRoot,
- * theirRoot, using the tree at baseRoot as reference.
- *
- * @param baseRoot
- * @param ourRoot
- * @param theirRoot
- * @return id of merged root node
- * @throws Exception
- */
- Id /* id of merged root node */ mergeTree(StoredNode baseRoot, StoredNode ourRoot, StoredNode theirRoot) throws Exception {
- // as we're going to use the staging area for the merge process,
- // we need to clear it first
- staged.clear();
-
- // recursively merge 'our' changes with 'their' changes...
- mergeNode(baseRoot, ourRoot, theirRoot, "/");
-
- return persistStagedNodes();
- }
-
- void mergeNode(StoredNode baseNode, StoredNode ourNode, StoredNode theirNode, String path) throws Exception {
- NodeDelta theirChanges = new NodeDelta(
- store, store.getNodeState(baseNode), store.getNodeState(theirNode));
- NodeDelta ourChanges = new NodeDelta(
- store, store.getNodeState(baseNode), store.getNodeState(ourNode));
-
- // merge non-conflicting changes
- MutableNode mergedNode = new MutableNode(theirNode, store);
- staged.put(path, mergedNode);
-
- mergedNode.getProperties().putAll(ourChanges.getAddedProperties());
- mergedNode.getProperties().putAll(ourChanges.getChangedProperties());
- for (String name : ourChanges.getRemovedProperties().keySet()) {
- mergedNode.getProperties().remove(name);
- }
-
- for (Map.Entry entry : ourChanges.getAddedChildNodes ().entrySet()) {
- mergedNode.add(new ChildNodeEntry(entry.getKey(), entry.getValue()));
- }
- for (Map.Entry entry : ourChanges.getChangedChildNodes ().entrySet()) {
- mergedNode.add(new ChildNodeEntry(entry.getKey(), entry.getValue()));
- }
- for (String name : ourChanges.getRemovedChildNodes().keySet()) {
- mergedNode.remove(name);
- }
-
- List conflicts = theirChanges.listConflicts(ourChanges);
- // resolve/report merge conflicts
- for (NodeDelta.Conflict conflict : conflicts) {
- String conflictName = conflict.getName();
- String conflictPath = PathUtils.concat(path, conflictName);
- switch (conflict.getType()) {
- case PROPERTY_VALUE_CONFLICT:
- throw new Exception(
- "concurrent modification of property " + conflictPath
- + " with conflicting values: \""
- + ourNode.getProperties().get(conflictName)
- + "\", \""
- + theirNode.getProperties().get(conflictName));
-
- case NODE_CONTENT_CONFLICT: {
- if (ourChanges.getChangedChildNodes().containsKey(conflictName)) {
- // modified subtrees
- StoredNode baseChild = store.getNode(baseNode.getChildNodeEntry(conflictName).getId());
- StoredNode ourChild = store.getNode(ourNode.getChildNodeEntry(conflictName).getId());
- StoredNode theirChild = store.getNode(theirNode.getChildNodeEntry(conflictName).getId());
- // merge the dirty subtrees recursively
- mergeNode(baseChild, ourChild, theirChild, PathUtils.concat(path, conflictName));
- } else {
- // todo handle/merge colliding node creation
- throw new Exception("colliding concurrent node creation: " + conflictPath);
- }
- break;
- }
-
- case REMOVED_DIRTY_PROPERTY_CONFLICT:
- mergedNode.getProperties().remove(conflictName);
- break;
-
- case REMOVED_DIRTY_NODE_CONFLICT:
- mergedNode.remove(conflictName);
- break;
- }
-
- }
- }
-
- //--------------------------------------------------------< inner classes >
- abstract class Change {
- abstract void apply() throws Exception;
- }
-
- class AddNode extends Change {
- String parentNodePath;
- String nodeName;
- Map properties;
-
- AddNode(String parentNodePath, String nodeName, Map properties) {
- this.parentNodePath = parentNodePath;
- this.nodeName = nodeName;
- this.properties = properties;
- }
-
- void apply() throws Exception {
- addNode(parentNodePath, nodeName, properties);
- }
- }
-
- class RemoveNode extends Change {
- String nodePath;
-
- RemoveNode(String nodePath) {
- this.nodePath = nodePath;
- }
-
- void apply() throws Exception {
- removeNode(nodePath);
- }
- }
-
- class MoveNode extends Change {
- String srcPath;
- String destPath;
-
- MoveNode(String srcPath, String destPath) {
- this.srcPath = srcPath;
- this.destPath = destPath;
- }
-
- void apply() throws Exception {
- moveNode(srcPath, destPath);
- }
- }
-
- class CopyNode extends Change {
- String srcPath;
- String destPath;
-
- CopyNode(String srcPath, String destPath) {
- this.srcPath = srcPath;
- this.destPath = destPath;
- }
-
- void apply() throws Exception {
- copyNode(srcPath, destPath);
- }
- }
-
- class SetProperty extends Change {
- String nodePath;
- String propName;
- String propValue;
-
- SetProperty(String nodePath, String propName, String propValue) {
- this.nodePath = nodePath;
- this.propName = propName;
- this.propValue = propValue;
- }
-
- void apply() throws Exception {
- setProperty(nodePath, propName, propValue);
- }
- }
-
- class SetProperties extends Change {
- String nodePath;
- Map properties;
-
- SetProperties(String nodePath, Map properties) {
- this.nodePath = nodePath;
- this.properties = properties;
- }
-
- void apply() throws Exception {
- setProperties(nodePath, properties);
- }
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/model/NodeDiffHandler.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/model/NodeDiffHandler.java
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/model/NodeStateDiff.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/model/NodeStateDiff.java
deleted file mode 100644
index 6f58c5f11fc..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/model/NodeStateDiff.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.model;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.jackrabbit.oak.model.ChildNodeEntry;
-import org.apache.jackrabbit.oak.model.NodeState;
-import org.apache.jackrabbit.oak.model.PropertyState;
-
-/**
- * Utility base class for comparing two {@link NodeState} instances. The
- * {@link #compare(NodeState, NodeState)} method will go through all
- * properties and child nodes of the two states, calling the relevant
- * added, changed or deleted methods where appropriate. Differences in
- * the ordering of properties or child nodes do not affect the comparison.
- */
-public class NodeStateDiff {
-
- /**
- * Called by {@link #compare(NodeState, NodeState)} for all added
- * properties. The default implementation does nothing.
- *
- * @param after property state after the change
- */
- public void propertyAdded(PropertyState after) {
- }
-
- /**
- * Called by {@link #compare(NodeState, NodeState)} for all changed
- * properties. The names of the given two property states are guaranteed
- * to be the same. The default implementation does nothing.
- *
- * @param before property state before the change
- * @param after property state after the change
- */
- public void propertyChanged(PropertyState before, PropertyState after) {
- }
-
- /**
- * Called by {@link #compare(NodeState, NodeState)} for all deleted
- * properties. The default implementation does nothing.
- *
- * @param before property state before the change
- */
- public void propertyDeleted(PropertyState before) {
- }
-
- /**
- * Called by {@link #compare(NodeState, NodeState)} for all added
- * child nodes. The default implementation does nothing.
- *
- * @param name name of the added child node
- * @param after child node state after the change
- */
- public void childNodeAdded(String name, NodeState after) {
- }
-
- /**
- * Called by {@link #compare(NodeState, NodeState)} for all changed
- * child nodes. The default implementation does nothing.
- *
- * @param name name of the changed child node
- * @param before child node state before the change
- * @param after child node state after the change
- */
- public void childNodeChanged(String name, NodeState before, NodeState after) {
- }
-
- /**
- * Called by {@link #compare(NodeState, NodeState)} for all deleted
- * child nodes. The default implementation does nothing.
- *
- * @param name name of the deleted child node
- * @param before child node state before the change
- */
- public void childNodeDeleted(String name, NodeState before) {
- }
-
- /**
- * Compares the given two node states. Any found differences are
- * reported by calling the relevant added, changed or deleted methods.
- *
- * @param before node state before changes
- * @param after node state after changes
- */
- public void compare(NodeState before, NodeState after) {
- compareProperties(before, after);
- compareChildNodes(before, after);
- }
-
- /**
- * Compares the properties of the given two node states.
- *
- * @param before node state before changes
- * @param after node state after changes
- */
- private void compareProperties(NodeState before, NodeState after) {
- Set beforeProperties = new HashSet();
-
- for (PropertyState beforeProperty : before.getProperties()) {
- String name = beforeProperty.getName();
- PropertyState afterProperty = after.getProperty(name);
- if (afterProperty == null) {
- propertyDeleted(beforeProperty);
- } else {
- beforeProperties.add(name);
- if (!beforeProperty.equals(afterProperty)) {
- propertyChanged(beforeProperty, afterProperty);
- }
- }
- }
-
- for (PropertyState afterProperty : after.getProperties()) {
- if (!beforeProperties.contains(afterProperty.getName())) {
- propertyAdded(afterProperty);
- }
- }
- }
-
- /**
- * Compares the child nodes of the given two node states.
- *
- * @param before node state before changes
- * @param after node state after changes
- */
- private void compareChildNodes(NodeState before, NodeState after) {
- Set beforeChildNodes = new HashSet();
-
- for (ChildNodeEntry beforeCNE : before.getChildNodeEntries(0, -1)) {
- String name = beforeCNE.getName();
- NodeState beforeChild = beforeCNE.getNode();
- NodeState afterChild = after.getChildNode(name);
- if (afterChild == null) {
- childNodeDeleted(name, beforeChild);
- } else {
- beforeChildNodes.add(name);
- if (!beforeChild.equals(afterChild)) {
- childNodeChanged(name, beforeChild, afterChild);
- }
- }
- }
-
- for (ChildNodeEntry afterChild : after.getChildNodeEntries(0, -1)) {
- String name = afterChild.getName();
- if (!beforeChildNodes.contains(name)) {
- childNodeAdded(name, afterChild.getNode());
- }
- }
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/CopyingGC.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/store/CopyingGC.java
deleted file mode 100644
index 7e451158901..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/CopyingGC.java
+++ /dev/null
@@ -1,301 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.store;
-
-import java.io.Closeable;
-import java.io.InputStream;
-import java.util.Comparator;
-import java.util.Iterator;
-import java.util.TreeSet;
-
-import org.apache.jackrabbit.mk.model.ChildNodeEntriesMap;
-import org.apache.jackrabbit.mk.model.ChildNodeEntry;
-import org.apache.jackrabbit.mk.model.Id;
-import org.apache.jackrabbit.mk.model.MutableCommit;
-import org.apache.jackrabbit.mk.model.MutableNode;
-import org.apache.jackrabbit.mk.model.StoredCommit;
-import org.apache.jackrabbit.mk.model.StoredNode;
-import org.apache.jackrabbit.mk.util.IOUtils;
-import org.apache.jackrabbit.oak.model.NodeState;
-
-/**
- * Revision garbage collector that copies reachable revisions from a "from" revision
- * store to a "to" revision store. It assumes that both stores share the same blob
- * store.
- *
- * In the current design, a revision is reachable, if it is either the head revision
- * or requested during the GC cycle.
- */
-public class CopyingGC implements RevisionStore, Closeable {
-
- /**
- * From store.
- */
- private RevisionStore rsFrom;
-
- /**
- * To store.
- */
- private RevisionStore rsTo;
-
- /**
- * Flag indicating whether a GC cycle is running.
- */
- private volatile boolean running;
-
- /**
- * First commit id of "to" store.
- */
- private Id firstCommitId;
-
- /**
- * Map of commits that have been accessed while a GC cycle is running; these
- * need to be "re-linked" with a preceding, possibly not adjacent parent
- * commit before saving them back to the "to" revision store.
- */
- private final TreeSet commits = new TreeSet(
- new Comparator() {
- public int compare(MutableCommit o1, MutableCommit o2) {
- return o1.getId().compareTo(o2.getId());
- }
- });
-
- /**
- * Create a new instance of this class.
- *
- * @param rsFrom from store
- * @param rsTo to store
- */
- public CopyingGC(RevisionStore rsFrom, RevisionStore rsTo) {
- this.rsFrom = rsFrom;
- this.rsTo = rsTo;
- }
-
- /**
- * Start GC cycle.
- *
- * @throws Exception if an error occurs
- */
- public void start() throws Exception {
- commits.clear();
- firstCommitId = rsTo.getHeadCommitId();
-
- // Copy the head commit
- MutableCommit commitTo = copy(rsFrom.getHeadCommit());
- commitTo.setParentId(rsTo.getHeadCommitId());
- Id revId = rsTo.putCommit(commitTo);
- rsTo.setHeadCommitId(revId);
-
- // Add this as sentinel
- commits.add(commitTo);
-
- running = true;
- }
-
- /**
- * Stop GC cycle.
- */
- public void stop() throws Exception {
- running = false;
-
- if (commits.size() > 1) {
- Id parentId = firstCommitId;
- for (MutableCommit commit : commits) {
- commit.setParentId(parentId);
- rsTo.putCommit(commit);
- parentId = commit.getId();
- }
- }
- // TODO: swap rsFrom/rsTo and reset them
- rsFrom = rsTo;
- rsTo = null;
- }
-
- public void close() {
- if (rsFrom instanceof Closeable) {
- IOUtils.closeQuietly((Closeable) rsFrom);
- }
- if (rsTo instanceof Closeable) {
- IOUtils.closeQuietly((Closeable) rsTo);
- }
- }
-
- /**
- * Copy a commit and all the nodes belonging to it, starting at the root node.
- *
- * @param commit commit to copy
- * @return commit in the "to" store, not yet persisted
- * @throws Exception if an error occurs
- */
- private MutableCommit copy(StoredCommit commit) throws Exception {
- StoredNode nodeFrom = rsFrom.getNode(commit.getRootNodeId());
- copy(nodeFrom);
-
- return new MutableCommit(commit);
- }
-
- /**
- * Copy a node and all its descendants into a target store
- * @param node source node
- * @throws Exception if an error occurs
- */
- private void copy(StoredNode node) throws Exception {
- try {
- rsTo.getNode(node.getId());
- return;
- } catch (NotFoundException e) {
- // ignore, better add a has() method
- }
- rsTo.putNode(new MutableNode(node, rsTo));
-
- Iterator iter = node.getChildNodeEntries(0, -1);
- while (iter.hasNext()) {
- ChildNodeEntry c = iter.next();
- copy(rsFrom.getNode(c.getId()));
- }
- }
-
- // ---------------------------------------------------------- RevisionStore
-
- public NodeState getNodeState(StoredNode node) {
- return new StoredNodeAsState(node, this);
- }
-
- public Id getId(NodeState node) {
- return ((StoredNodeAsState) node).getId();
- }
-
- public StoredNode getNode(Id id) throws NotFoundException, Exception {
- if (running) {
- try {
- return rsTo.getNode(id);
- } catch (NotFoundException e) {
- // ignore, better add a has() method
- }
- }
- return rsFrom.getNode(id);
- }
-
- public StoredCommit getCommit(Id id) throws NotFoundException,
- Exception {
-
- if (running) {
- try {
- return rsTo.getCommit(id);
- } catch (NotFoundException e) {
- // ignore, better add a has() method
- }
- }
- return rsFrom.getCommit(id);
- }
-
- public ChildNodeEntriesMap getCNEMap(Id id) throws NotFoundException,
- Exception {
-
- if (running) {
- try {
- return rsTo.getCNEMap(id);
- } catch (NotFoundException e) {
- // ignore, better add a has() method
- }
- }
- return rsFrom.getCNEMap(id);
- }
-
- public StoredNode getRootNode(Id commitId) throws NotFoundException,
- Exception {
-
- if (running) {
- try {
- return rsTo.getRootNode(commitId);
- } catch (NotFoundException e) {
- // ignore, better add a has() method
- }
- }
- // Copy this commit
- StoredCommit commit = rsFrom.getCommit(commitId);
- if (running) {
- commits.add(copy(commit));
- }
- return rsFrom.getNode(commit.getRootNodeId());
- }
-
- public StoredCommit getHeadCommit() throws Exception {
- return running ? rsTo.getHeadCommit() : rsFrom.getHeadCommit();
- }
-
- public Id getHeadCommitId() throws Exception {
- return running ? rsTo.getHeadCommitId() : rsFrom.getHeadCommitId();
- }
-
- public Id putNode(MutableNode node) throws Exception {
- return running ? rsTo.putNode(node) : rsFrom.putNode(node);
- }
-
- public Id putCommit(MutableCommit commit) throws Exception {
- return running ? rsTo.putCommit(commit) : rsFrom.putCommit(commit);
- }
-
- public Id putCNEMap(ChildNodeEntriesMap map) throws Exception {
- return running ? rsTo.putCNEMap(map) : rsFrom.putCNEMap(map);
- }
-
- // TODO: potentially dangerous, if lock & unlock interfere with GC start
- public void lockHead() {
- if (running) {
- rsTo.lockHead();
- } else {
- rsFrom.lockHead();
- }
- }
-
- public void setHeadCommitId(Id commitId) throws Exception {
- if (running) {
- rsTo.setHeadCommitId(commitId);
- } else {
- rsFrom.setHeadCommitId(commitId);
- }
- }
-
- // TODO: potentially dangerous, if lock & unlock interfere with GC start
- public void unlockHead() {
- if (running) {
- rsTo.unlockHead();
- } else {
- rsFrom.unlockHead();
- }
- }
-
- public int getBlob(String blobId, long pos, byte[] buff, int off, int length)
- throws NotFoundException, Exception {
-
- // Assuming that from and to store use the same BlobStore instance
- return rsTo.getBlob(blobId, pos, buff, off, length);
- }
-
- public long getBlobLength(String blobId) throws NotFoundException,
- Exception {
-
- // Assuming that from and to store use the same BlobStore instance
- return rsTo.getBlobLength(blobId);
- }
-
- public String putBlob(InputStream in) throws Exception {
- // Assuming that from and to store use the same BlobStore instance
- return rsTo.putBlob(in);
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/DefaultRevisionStore.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/store/DefaultRevisionStore.java
deleted file mode 100644
index 57a7dceb854..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/DefaultRevisionStore.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.store;
-
-import org.apache.jackrabbit.mk.blobs.BlobStore;
-import org.apache.jackrabbit.mk.blobs.FileBlobStore;
-import org.apache.jackrabbit.mk.model.ChildNodeEntriesMap;
-import org.apache.jackrabbit.mk.model.Id;
-import org.apache.jackrabbit.mk.model.MutableCommit;
-import org.apache.jackrabbit.mk.model.MutableNode;
-import org.apache.jackrabbit.mk.model.StoredCommit;
-import org.apache.jackrabbit.mk.model.StoredNode;
-import org.apache.jackrabbit.mk.store.persistence.H2Persistence;
-import org.apache.jackrabbit.mk.store.persistence.Persistence;
-import org.apache.jackrabbit.mk.util.SimpleLRUCache;
-import org.apache.jackrabbit.oak.model.NodeState;
-
-import java.io.Closeable;
-import java.io.File;
-import java.io.InputStream;
-import java.util.Collections;
-import java.util.Map;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-/**
- * Default revision store implementation, passing calls to a Persistence
- * and a BlobStore, respectively and providing caching.
- */
-public class DefaultRevisionStore implements RevisionStore, Closeable {
-
- public static final String CACHE_SIZE = "mk.cacheSize";
- public static final int DEFAULT_CACHE_SIZE = 10000;
-
- private boolean initialized;
-
- private Id head;
- private long headCounter;
- private final ReentrantReadWriteLock headLock = new ReentrantReadWriteLock();
- private Persistence pm;
- private BlobStore blobStore;
- private boolean blobStoreNeedsClose;
-
- private Map cache;
-
- public void initialize(File homeDir) throws Exception {
- if (initialized) {
- throw new IllegalStateException("already initialized");
- }
-
- cache = Collections.synchronizedMap(SimpleLRUCache.newInstance(determineInitialCacheSize()));
-
- pm = new H2Persistence();
- //pm = new InMemPersistence();
- //pm = new MongoPersistence();
- //pm = new BDbPersistence();
- //pm = new FSPersistence();
- pm.initialize(homeDir);
-
- if (pm instanceof BlobStore) {
- blobStore = (BlobStore) pm;
- } else {
- blobStore = new FileBlobStore(new File(homeDir, "blobs").getCanonicalPath());
- blobStoreNeedsClose = true;
- }
-
- // make sure we've got a HEAD commit
- head = pm.readHead();
- if (head == null || head.getBytes().length == 0) {
- // assume virgin repository
- byte[] rawHead = longToBytes(++headCounter);
- head = new Id(rawHead);
-
- Id rootNodeId = pm.writeNode(new MutableNode(this));
- MutableCommit initialCommit = new MutableCommit();
- initialCommit.setCommitTS(System.currentTimeMillis());
- initialCommit.setRootNodeId(rootNodeId);
- pm.writeCommit(head, initialCommit);
- pm.writeHead(head);
- } else {
- headCounter = Long.parseLong(head.toString(), 16);
- }
-
- initialized = true;
- }
-
- public void close() {
- verifyInitialized();
-
- cache.clear();
-
- if (blobStoreNeedsClose) {
- blobStore.close();
- }
- pm.close();
-
- initialized = false;
- }
-
- protected void verifyInitialized() {
- if (!initialized) {
- throw new IllegalStateException("not initialized");
- }
- }
-
- protected int determineInitialCacheSize() {
- String val = System.getProperty(CACHE_SIZE);
- return (val != null) ? Integer.parseInt(val) : DEFAULT_CACHE_SIZE;
- }
-
- /**
- * Convert a long value into a fixed-size byte array of size 8.
- *
- * @param value value
- * @return byte array
- */
- private static byte[] longToBytes(long value) {
- byte[] result = new byte[8];
-
- for (int i = result.length - 1; i >= 0 && value != 0; i--) {
- result[i] = (byte) (value & 0xff);
- value >>>= 8;
- }
- return result;
- }
-
- //--------------------------------------------------------< RevisionStore >
-
- public Id putNode(MutableNode node) throws Exception {
- verifyInitialized();
-
- PersistHook callback = null;
- if (node instanceof PersistHook) {
- callback = (PersistHook) node;
- callback.prePersist(this);
- }
-
- Id id = pm.writeNode(node);
-
- if (callback != null) {
- callback.postPersist(this);
- }
-
- cache.put(id, new StoredNode(id, node, this));
-
- return id;
- }
-
- public Id putCNEMap(ChildNodeEntriesMap map) throws Exception {
- verifyInitialized();
-
- PersistHook callback = null;
- if (map instanceof PersistHook) {
- callback = (PersistHook) map;
- callback.prePersist(this);
- }
-
- Id id = pm.writeCNEMap(map);
-
- if (callback != null) {
- callback.postPersist(this);
- }
-
- cache.put(id, map);
-
- return id;
- }
-
- public Id putCommit(MutableCommit commit) throws Exception {
- verifyInitialized();
-
- PersistHook callback = null;
- if (commit instanceof PersistHook) {
- callback = (PersistHook) commit;
- callback.prePersist(this);
- }
-
- Id id = commit.getId();
- if (id == null) {
- id = new Id(longToBytes(++headCounter));
- }
- pm.writeCommit(id, commit);
-
- if (callback != null) {
- callback.postPersist(this);
- }
- cache.put(id, new StoredCommit(id, commit));
- return id;
- }
-
- public void setHeadCommitId(Id id) throws Exception {
- verifyInitialized();
-
- headLock.writeLock().lock();
- try {
- pm.writeHead(id);
- head = id;
-
- long headCounter = Long.parseLong(id.toString(), 16);
- if (headCounter > this.headCounter) {
- this.headCounter = headCounter;
- }
- } finally {
- headLock.writeLock().unlock();
- }
- }
-
- public void lockHead() {
- headLock.writeLock().lock();
- }
-
- public void unlockHead() {
- headLock.writeLock().unlock();
- }
-
- public String putBlob(InputStream in) throws Exception {
- verifyInitialized();
-
- return blobStore.writeBlob(in);
- }
-
- //-----------------------------------------------------< RevisionProvider >
-
- public NodeState getNodeState(StoredNode node) {
- return new StoredNodeAsState(node, this);
- }
-
- public Id getId(NodeState node) {
- return ((StoredNodeAsState) node).getId();
- }
-
- public StoredNode getNode(Id id) throws NotFoundException, Exception {
- verifyInitialized();
-
- StoredNode node = (StoredNode) cache.get(id);
- if (node != null) {
- return node;
- }
-
- Binding nodeBinding = pm.readNodeBinding(id);
- node = StoredNode.deserialize(id, this, nodeBinding);
-
- cache.put(id, node);
-
- return node;
- }
-
- public ChildNodeEntriesMap getCNEMap(Id id) throws NotFoundException, Exception {
- verifyInitialized();
-
- ChildNodeEntriesMap map = (ChildNodeEntriesMap) cache.get(id);
- if (map != null) {
- return map;
- }
-
- map = pm.readCNEMap(id);
-
- cache.put(id, map);
-
- return map;
- }
-
- public StoredCommit getCommit(Id id) throws NotFoundException, Exception {
- verifyInitialized();
-
- StoredCommit commit = (StoredCommit) cache.get(id);
- if (commit != null) {
- return commit;
- }
-
- commit = pm.readCommit(id);
- cache.put(id, commit);
-
- return commit;
- }
-
- public StoredNode getRootNode(Id commitId) throws NotFoundException, Exception {
- return getNode(getCommit(commitId).getRootNodeId());
- }
-
- public StoredCommit getHeadCommit() throws Exception {
- return getCommit(getHeadCommitId());
- }
-
- public Id getHeadCommitId() throws Exception {
- verifyInitialized();
-
- headLock.readLock().lock();
- try {
- return head;
- } finally {
- headLock.readLock().unlock();
- }
- }
-
- public int getBlob(String blobId, long pos, byte[] buff, int off, int length) throws NotFoundException, Exception {
- verifyInitialized();
-
- return blobStore.readBlob(blobId, pos, buff, off, length);
- }
-
- public long getBlobLength(String blobId) throws NotFoundException, Exception {
- verifyInitialized();
-
- return blobStore.getBlobLength(blobId);
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/BDbPersistence.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/BDbPersistence.java
deleted file mode 100644
index 9ead947c0bf..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/BDbPersistence.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.store.persistence;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-
-import org.apache.jackrabbit.mk.model.ChildNodeEntriesMap;
-import org.apache.jackrabbit.mk.model.Commit;
-import org.apache.jackrabbit.mk.model.Id;
-import org.apache.jackrabbit.mk.model.Node;
-import org.apache.jackrabbit.mk.model.StoredCommit;
-import org.apache.jackrabbit.mk.store.BinaryBinding;
-import org.apache.jackrabbit.mk.store.Binding;
-import org.apache.jackrabbit.mk.store.IdFactory;
-import org.apache.jackrabbit.mk.store.NotFoundException;
-
-import com.sleepycat.je.Database;
-import com.sleepycat.je.DatabaseConfig;
-import com.sleepycat.je.DatabaseEntry;
-import com.sleepycat.je.Durability;
-import com.sleepycat.je.Environment;
-import com.sleepycat.je.EnvironmentConfig;
-import com.sleepycat.je.EnvironmentMutableConfig;
-import com.sleepycat.je.LockMode;
-import com.sleepycat.je.OperationStatus;
-
-/**
- *
- */
-public class BDbPersistence implements Persistence {
-
- private final static byte[] HEAD_ID = new byte[]{0};
- private Environment dbEnv;
- private Database db;
- private Database head;
-
- // TODO: make this configurable
- private IdFactory idFactory = IdFactory.getDigestFactory();
-
- public void initialize(File homeDir) throws Exception {
- File dbDir = new File(homeDir, "db");
- if (!dbDir.exists()) {
- dbDir.mkdirs();
- }
-
- EnvironmentConfig envConfig = new EnvironmentConfig();
- //envConfig.setTransactional(true);
- envConfig.setAllowCreate(true);
- dbEnv = new Environment(dbDir, envConfig);
-
- EnvironmentMutableConfig envMutableConfig = new EnvironmentMutableConfig();
- //envMutableConfig.setDurability(Durability.COMMIT_SYNC);
- //envMutableConfig.setDurability(Durability.COMMIT_NO_SYNC);
- envMutableConfig.setDurability(Durability.COMMIT_WRITE_NO_SYNC);
- dbEnv.setMutableConfig(envMutableConfig);
-
- DatabaseConfig dbConfig = new DatabaseConfig();
- dbConfig.setAllowCreate(true);
- //dbConfig.setDeferredWrite(true);
- db = dbEnv.openDatabase(null, "revs", dbConfig);
-
- head = dbEnv.openDatabase(null, "head", dbConfig);
-
- // TODO FIXME workaround in case we're not closed properly
- Runtime.getRuntime().addShutdownHook(new Thread() {
- public void run() {
- try { close(); } catch (Throwable ignore) {}
- }
- });
- }
-
- public void close() {
- try {
- if (db.getConfig().getDeferredWrite()) {
- db.sync();
- }
- db.close();
- head.close();
- dbEnv.close();
-
- db = null;
- dbEnv = null;
- } catch (Throwable t) {
- t.printStackTrace();
- }
- }
-
- public Id readHead() throws Exception {
- DatabaseEntry key = new DatabaseEntry(HEAD_ID);
- DatabaseEntry data = new DatabaseEntry();
-
- if (head.get(null, key, data, LockMode.DEFAULT) == OperationStatus.SUCCESS) {
- return new Id(data.getData());
- } else {
- return null;
- }
- }
-
- public void writeHead(Id id) throws Exception {
- DatabaseEntry key = new DatabaseEntry(HEAD_ID);
- DatabaseEntry data = new DatabaseEntry(id.getBytes());
-
- head.put(null, key, data);
- }
-
- public Binding readNodeBinding(Id id) throws NotFoundException, Exception {
- DatabaseEntry key = new DatabaseEntry(id.getBytes());
- DatabaseEntry data = new DatabaseEntry();
-
- if (db.get(null, key, data, LockMode.DEFAULT) == OperationStatus.SUCCESS) {
- ByteArrayInputStream in = new ByteArrayInputStream(data.getData());
- return new BinaryBinding(in);
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeNode(Node node) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- node.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
- persist(id.getBytes(), bytes);
- return id;
- }
-
- public StoredCommit readCommit(Id id) throws NotFoundException, Exception {
- DatabaseEntry key = new DatabaseEntry(id.getBytes());
- DatabaseEntry data = new DatabaseEntry();
-
- if (db.get(null, key, data, LockMode.DEFAULT) == OperationStatus.SUCCESS) {
- ByteArrayInputStream in = new ByteArrayInputStream(data.getData());
- return StoredCommit.deserialize(id, new BinaryBinding(in));
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public void writeCommit(Id id, Commit commit) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- commit.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- persist(id.getBytes(), bytes);
- }
-
- public ChildNodeEntriesMap readCNEMap(Id id) throws NotFoundException, Exception {
- DatabaseEntry key = new DatabaseEntry(id.getBytes());
- DatabaseEntry data = new DatabaseEntry();
-
- if (db.get(null, key, data, LockMode.DEFAULT) == OperationStatus.SUCCESS) {
- ByteArrayInputStream in = new ByteArrayInputStream(data.getData());
- return ChildNodeEntriesMap.deserialize(new BinaryBinding(in));
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeCNEMap(ChildNodeEntriesMap map) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- map.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
- persist(id.getBytes(), bytes);
- return id;
- }
-
- //-------------------------------------------------------< implementation >
-
- protected void persist(byte[] rawId, byte[] bytes) throws Exception {
- DatabaseEntry key = new DatabaseEntry(rawId);
- DatabaseEntry data = new DatabaseEntry(bytes);
-
- db.put(null, key, data);
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/FSPersistence.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/FSPersistence.java
deleted file mode 100644
index dd0fb42a211..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/FSPersistence.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.store.persistence;
-
-import java.io.BufferedInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-
-import org.apache.jackrabbit.mk.model.ChildNodeEntriesMap;
-import org.apache.jackrabbit.mk.model.Commit;
-import org.apache.jackrabbit.mk.model.Id;
-import org.apache.jackrabbit.mk.model.Node;
-import org.apache.jackrabbit.mk.model.StoredCommit;
-import org.apache.jackrabbit.mk.store.BinaryBinding;
-import org.apache.jackrabbit.mk.store.Binding;
-import org.apache.jackrabbit.mk.store.IdFactory;
-import org.apache.jackrabbit.mk.store.NotFoundException;
-import org.apache.jackrabbit.mk.util.IOUtils;
-
-/**
- *
- */
-public class FSPersistence implements Persistence {
-
- private File dataDir;
- private File head;
-
- // TODO: make this configurable
- private IdFactory idFactory = IdFactory.getDigestFactory();
-
- public void initialize(File homeDir) throws Exception {
- dataDir = new File(homeDir, "data");
- if (!dataDir.exists()) {
- dataDir.mkdirs();
- }
- head = new File(homeDir, "HEAD");
- if (!head.exists()) {
- writeHead(null);
- }
- }
-
- public void close() {
- }
-
- public Id readHead() throws Exception {
- FileInputStream in = new FileInputStream(head);
- try {
- String s = IOUtils.readString(in);
- return s.equals("") ? null : Id.fromString(s);
- } finally {
- in.close();
- }
- }
-
- public void writeHead(Id id) throws Exception {
- FileOutputStream out = new FileOutputStream(head);
- try {
- IOUtils.writeString(out, id == null ? "" : id.toString());
- } finally {
- out.close();
- }
- }
-
- public Binding readNodeBinding(Id id) throws NotFoundException, Exception {
- File f = getFile(id);
- if (f.exists()) {
- BufferedInputStream in = new BufferedInputStream(new FileInputStream(f));
- try {
- return new BinaryBinding(in);
- } finally {
- in.close();
- }
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeNode(Node node) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- node.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
- writeFile(id, bytes);
- return id;
- }
-
- public StoredCommit readCommit(Id id) throws NotFoundException, Exception {
- File f = getFile(id);
- if (f.exists()) {
- BufferedInputStream in = new BufferedInputStream(new FileInputStream(f));
- try {
- return StoredCommit.deserialize(id, new BinaryBinding(in));
- } finally {
- in.close();
- }
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public void writeCommit(Id id, Commit commit) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- commit.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- writeFile(id, bytes);
- }
-
- public ChildNodeEntriesMap readCNEMap(Id id) throws NotFoundException, Exception {
- File f = getFile(id);
- if (f.exists()) {
- BufferedInputStream in = new BufferedInputStream(new FileInputStream(f));
- try {
- return ChildNodeEntriesMap.deserialize(new BinaryBinding(in));
- } finally {
- in.close();
- }
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeCNEMap(ChildNodeEntriesMap map) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- map.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
- writeFile(id, bytes);
- return id;
- }
-
- //-------------------------------------------------------< implementation >
-
- private File getFile(Id id) {
- String sId = id.toString();
- StringBuilder buf = new StringBuilder(sId.substring(0, 2));
- buf.append('/');
- buf.append(sId.substring(2));
- return new File(dataDir, buf.toString());
- }
-
- private void writeFile(Id id, byte[] data) throws Exception {
- File tmp = File.createTempFile("tmp", null, dataDir);
-
- try {
- FileOutputStream fos = new FileOutputStream(tmp);
-
- try {
- fos.write(data);
- } finally {
- //fos.getChannel().force(true);
- fos.close();
- }
-
- File dst = getFile(id);
- if (dst.exists()) {
- // already exists
- return;
- }
- // move tmp file
- tmp.setReadOnly();
- if (tmp.renameTo(dst)) {
- return;
- }
- // make sure parent dir exists and try again
- dst.getParentFile().mkdir();
- if (tmp.renameTo(dst)) {
- return;
- }
- throw new Exception("failed to create " + dst);
- } finally {
- tmp.delete();
- }
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/InMemPersistence.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/InMemPersistence.java
deleted file mode 100644
index 66a43f6fd6e..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/InMemPersistence.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.store.persistence;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.InputStream;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.jackrabbit.mk.blobs.BlobStore;
-import org.apache.jackrabbit.mk.blobs.MemoryBlobStore;
-import org.apache.jackrabbit.mk.model.ChildNodeEntriesMap;
-import org.apache.jackrabbit.mk.model.Commit;
-import org.apache.jackrabbit.mk.model.Id;
-import org.apache.jackrabbit.mk.model.Node;
-import org.apache.jackrabbit.mk.model.StoredCommit;
-import org.apache.jackrabbit.mk.store.BinaryBinding;
-import org.apache.jackrabbit.mk.store.Binding;
-import org.apache.jackrabbit.mk.store.IdFactory;
-import org.apache.jackrabbit.mk.store.NotFoundException;
-
-/**
- *
- */
-public class InMemPersistence implements Persistence, BlobStore {
-
- private final Map nodes = Collections.synchronizedMap(new HashMap());
- private final Map commits = Collections.synchronizedMap(new HashMap());
- private final Map cneMaps = Collections.synchronizedMap(new HashMap());
- private final BlobStore blobs = new MemoryBlobStore();
-
- private Id head;
-
- // TODO: make this configurable
- private IdFactory idFactory = IdFactory.getDigestFactory();
-
- public void initialize(File homeDir) throws Exception {
- head = null;
- }
-
- public void close() {
- }
-
- public Id readHead() throws Exception {
- return head;
- }
-
- public void writeHead(Id id) throws Exception {
- head = id;
- }
-
- public Binding readNodeBinding(Id id) throws NotFoundException, Exception {
- byte[] bytes = nodes.get(id);
- if (bytes != null) {
- return new BinaryBinding(new ByteArrayInputStream(bytes));
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeNode(Node node) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- node.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
-
- if (!nodes.containsKey(id)) {
- nodes.put(id, bytes);
- }
-
- return id;
- }
-
- public StoredCommit readCommit(Id id) throws NotFoundException, Exception {
- StoredCommit commit = commits.get(id);
- if (commit != null) {
- return commit;
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public void writeCommit(Id id, Commit commit) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- commit.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
-
- if (!commits.containsKey(id)) {
- commits.put(id, StoredCommit.deserialize(id, new BinaryBinding(new ByteArrayInputStream(bytes))));
- }
- }
-
- public ChildNodeEntriesMap readCNEMap(Id id) throws NotFoundException, Exception {
- ChildNodeEntriesMap map = cneMaps.get(id);
- if (map != null) {
- return map;
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeCNEMap(ChildNodeEntriesMap map) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- map.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
-
- if (!cneMaps.containsKey(id)) {
- cneMaps.put(id, ChildNodeEntriesMap.deserialize(new BinaryBinding(new ByteArrayInputStream(bytes))));
- }
-
- return id;
- }
-
- //------------------------------------------------------------< BlobStore >
-
- public String addBlob(String tempFilePath) throws Exception {
- return blobs.addBlob(tempFilePath);
- }
-
- public String writeBlob(InputStream in) throws Exception {
- return blobs.writeBlob(in);
- }
-
- public int readBlob(String blobId, long pos, byte[] buff, int off, int length) throws NotFoundException, Exception {
- return blobs.readBlob(blobId, pos, buff, off, length);
- }
-
- public long getBlobLength(String blobId) throws Exception {
- return blobs.getBlobLength(blobId);
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/MongoPersistence.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/MongoPersistence.java
deleted file mode 100644
index fcba0930a2e..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/store/persistence/MongoPersistence.java
+++ /dev/null
@@ -1,478 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.store.persistence;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.InputStream;
-import java.util.Iterator;
-
-import org.apache.jackrabbit.mk.blobs.BlobStore;
-import org.apache.jackrabbit.mk.fs.FilePath;
-import org.apache.jackrabbit.mk.model.ChildNodeEntriesMap;
-import org.apache.jackrabbit.mk.model.Commit;
-import org.apache.jackrabbit.mk.model.Id;
-import org.apache.jackrabbit.mk.model.Node;
-import org.apache.jackrabbit.mk.model.StoredCommit;
-import org.apache.jackrabbit.mk.store.BinaryBinding;
-import org.apache.jackrabbit.mk.store.Binding;
-import org.apache.jackrabbit.mk.store.IdFactory;
-import org.apache.jackrabbit.mk.store.NotFoundException;
-import org.apache.jackrabbit.mk.util.ExceptionFactory;
-import org.apache.jackrabbit.mk.util.IOUtils;
-import org.apache.jackrabbit.mk.util.StringUtils;
-import org.bson.types.ObjectId;
-
-import com.mongodb.BasicDBObject;
-import com.mongodb.DB;
-import com.mongodb.DBCollection;
-import com.mongodb.DBObject;
-import com.mongodb.Mongo;
-import com.mongodb.MongoException;
-import com.mongodb.WriteConcern;
-import com.mongodb.gridfs.GridFS;
-import com.mongodb.gridfs.GridFSDBFile;
-import com.mongodb.gridfs.GridFSInputFile;
-
-/**
- *
- */
-public class MongoPersistence implements Persistence, BlobStore {
-
- private static final boolean BINARY_FORMAT = false;
-
- private static final String HEAD_COLLECTION = "head";
- private static final String NODES_COLLECTION = "nodes";
- private static final String COMMITS_COLLECTION = "commits";
- private static final String CNEMAPS_COLLECTION = "cneMaps";
- private static final String ID_FIELD = ":id";
- private static final String DATA_FIELD = ":data";
-
- private Mongo con;
- private DB db;
- private DBCollection nodes;
- private DBCollection commits;
- private DBCollection cneMaps;
- private GridFS fs;
-
- // TODO: make this configurable
- private IdFactory idFactory = IdFactory.getDigestFactory();
-
- public void initialize(File homeDir) throws Exception {
- con = new Mongo();
- //con = new Mongo("localhost", 27017);
-
- db = con.getDB("mk");
- db.setWriteConcern(WriteConcern.SAFE);
-
- if (!db.collectionExists(HEAD_COLLECTION)) {
- // capped collection of size 1
- db.createCollection(HEAD_COLLECTION, new BasicDBObject("capped", true).append("size", 256).append("max", 1));
- }
-
- nodes = db.getCollection(NODES_COLLECTION);
- nodes.ensureIndex(
- new BasicDBObject(ID_FIELD, 1),
- new BasicDBObject("unique", true));
-
- commits = db.getCollection(COMMITS_COLLECTION);
- commits.ensureIndex(
- new BasicDBObject(ID_FIELD, 1),
- new BasicDBObject("unique", true));
-
- cneMaps = db.getCollection(CNEMAPS_COLLECTION);
- cneMaps.ensureIndex(
- new BasicDBObject(ID_FIELD, 1),
- new BasicDBObject("unique", true));
-
- fs = new GridFS(db);
- }
-
- public void close() {
- con.close();
- con = null;
- db = null;
- }
-
- public Id readHead() throws Exception {
- DBObject entry = db.getCollection(HEAD_COLLECTION).findOne();
- if (entry == null) {
- return null;
- }
- return new Id((byte[]) entry.get(ID_FIELD));
- }
-
- public void writeHead(Id id) throws Exception {
- // capped collection of size 1
- db.getCollection(HEAD_COLLECTION).insert(new BasicDBObject(ID_FIELD, id.getBytes()));
- }
-
- public Binding readNodeBinding(Id id) throws NotFoundException, Exception {
- BasicDBObject key = new BasicDBObject();
- if (BINARY_FORMAT) {
- key.put(ID_FIELD, id.getBytes());
- } else {
- key.put(ID_FIELD, id.toString());
- }
- final BasicDBObject nodeObject = (BasicDBObject) nodes.findOne(key);
- if (nodeObject != null) {
- if (BINARY_FORMAT) {
- byte[] bytes = (byte[]) nodeObject.get(DATA_FIELD);
- return new BinaryBinding(new ByteArrayInputStream(bytes));
- } else {
- return new DBObjectBinding(nodeObject);
- }
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeNode(Node node) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- node.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
-
- BasicDBObject nodeObject;
- if (BINARY_FORMAT) {
- nodeObject = new BasicDBObject(ID_FIELD, id.getBytes()).append(DATA_FIELD, bytes);
- } else {
- nodeObject = new BasicDBObject(ID_FIELD, id.toString());
- node.serialize(new DBObjectBinding(nodeObject));
- }
- try {
- nodes.insert(nodeObject);
- } catch (MongoException.DuplicateKey ignore) {
- // fall through
- }
-
- return id;
- }
-
- public StoredCommit readCommit(Id id) throws NotFoundException, Exception {
- BasicDBObject key = new BasicDBObject();
-
- if (BINARY_FORMAT) {
- key.put(ID_FIELD, id.getBytes());
- } else {
- key.put(ID_FIELD, id.toString());
- }
- BasicDBObject commitObject = (BasicDBObject) commits.findOne(key);
- if (commitObject != null) {
- if (BINARY_FORMAT) {
- byte[] bytes = (byte[]) commitObject.get(DATA_FIELD);
- return StoredCommit.deserialize(id, new BinaryBinding(new ByteArrayInputStream(bytes)));
- } else {
- return StoredCommit.deserialize(id, new DBObjectBinding(commitObject));
- }
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public void writeCommit(Id id, Commit commit) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- commit.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
-
- BasicDBObject commitObject;
- if (BINARY_FORMAT) {
- commitObject = new BasicDBObject(ID_FIELD, id.getBytes()).append(DATA_FIELD, bytes);
- } else {
- commitObject = new BasicDBObject(ID_FIELD, id.toString());
- commit.serialize(new DBObjectBinding(commitObject));
- }
- try {
- commits.insert(commitObject);
- } catch (MongoException.DuplicateKey ignore) {
- // fall through
- }
- }
-
- public ChildNodeEntriesMap readCNEMap(Id id) throws NotFoundException, Exception {
- BasicDBObject key = new BasicDBObject();
- if (BINARY_FORMAT) {
- key.put(ID_FIELD, id.getBytes());
- } else {
- key.put(ID_FIELD, id.toString());
- }
- BasicDBObject mapObject = (BasicDBObject) cneMaps.findOne(key);
- if (mapObject != null) {
- if (BINARY_FORMAT) {
- byte[] bytes = (byte[]) mapObject.get(DATA_FIELD);
- return ChildNodeEntriesMap.deserialize(new BinaryBinding(new ByteArrayInputStream(bytes)));
- } else {
- return ChildNodeEntriesMap.deserialize(new DBObjectBinding(mapObject));
- }
- } else {
- throw new NotFoundException(id.toString());
- }
- }
-
- public Id writeCNEMap(ChildNodeEntriesMap map) throws Exception {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- map.serialize(new BinaryBinding(out));
- byte[] bytes = out.toByteArray();
- Id id = new Id(idFactory.createContentId(bytes));
-
- BasicDBObject mapObject;
- if (BINARY_FORMAT) {
- mapObject = new BasicDBObject(ID_FIELD, id.getBytes()).append(DATA_FIELD, bytes);
- } else {
- mapObject = new BasicDBObject(ID_FIELD, id.toString());
- map.serialize(new DBObjectBinding(mapObject));
- }
- try {
- cneMaps.insert(mapObject);
- } catch (MongoException.DuplicateKey ignore) {
- // fall through
- }
-
- return id;
- }
-
- //------------------------------------------------------------< BlobStore >
-
- public String addBlob(String tempFilePath) throws Exception {
- try {
- FilePath file = FilePath.get(tempFilePath);
- try {
- InputStream in = file.newInputStream();
- return writeBlob(in);
- } finally {
- file.delete();
- }
- } catch (Exception e) {
- throw ExceptionFactory.convert(e);
- }
- }
-
- public String writeBlob(InputStream in) throws Exception {
- GridFSInputFile f = fs.createFile(in, true);
- //f.save(0x20000); // save in 128k chunks
- f.save();
-
- return f.getId().toString();
- }
-
- public int readBlob(String blobId, long pos, byte[] buff, int off,
- int length) throws Exception {
-
- GridFSDBFile f = fs.findOne(new ObjectId(blobId));
- if (f == null) {
- throw new NotFoundException(blobId);
- }
- // todo provide a more efficient implementation
- InputStream in = f.getInputStream();
- try {
- in.skip(pos);
- return in.read(buff, off, length);
- } finally {
- IOUtils.closeQuietly(in);
- }
- }
-
- public long getBlobLength(String blobId) throws Exception {
- GridFSDBFile f = fs.findOne(new ObjectId(blobId));
- if (f == null) {
- throw new NotFoundException(blobId);
- }
-
- return f.getLength();
- }
-
- //-------------------------------------------------------< implementation >
-
- protected final static String ENCODED_DOT = "_x46_";
- protected final static String ENCODED_DOLLAR_SIGN = "_x36_";
-
- /**
- * see http://www.mongodb.org/display/DOCS/Legal+Key+Names
- *
- * @param name
- * @return
- */
- protected static String encodeName(String name) {
- StringBuilder buf = null;
- for (int i = 0; i < name.length(); i++) {
- if (i == 0 && name.charAt(i) == '$') {
- // mongodb field names must not start with '$'
- buf = new StringBuilder();
- buf.append(ENCODED_DOLLAR_SIGN);
- } else if (name.charAt(i) == '.') {
- // . is a reserved char for mongodb field names
- if (buf == null) {
- buf = new StringBuilder(name.substring(0, i));
- }
- buf.append(ENCODED_DOT);
- } else {
- if (buf != null) {
- buf.append(name.charAt(i));
- }
- }
- }
-
- return buf == null ? name : buf.toString();
- }
-
- protected static String decodeName(String name) {
- StringBuilder buf = null;
-
- int lastPos = 0;
- if (name.startsWith(ENCODED_DOLLAR_SIGN)) {
- buf = new StringBuilder("$");
- lastPos = ENCODED_DOLLAR_SIGN.length();
- }
-
- int pos;
- while ((pos = name.indexOf(ENCODED_DOT, lastPos)) != -1) {
- if (buf == null) {
- buf = new StringBuilder();
- }
- buf.append(name.substring(lastPos, pos));
- buf.append('.');
- lastPos = pos + ENCODED_DOT.length();
- }
-
- if (buf != null) {
- buf.append(name.substring(lastPos));
- return buf.toString();
- } else {
- return name;
- }
- }
-
- //--------------------------------------------------------< inner classes >
-
- protected class DBObjectBinding implements Binding {
-
- BasicDBObject obj;
-
- protected DBObjectBinding(BasicDBObject obj) {
- this.obj = obj;
- }
-
- @Override
- public void write(String key, String value) throws Exception {
- obj.append(encodeName(key), value);
- }
-
- @Override
- public void write(String key, byte[] value) throws Exception {
- obj.append(encodeName(key), StringUtils.convertBytesToHex(value));
- }
-
- @Override
- public void write(String key, long value) throws Exception {
- obj.append(encodeName(key), value);
- }
-
- @Override
- public void write(String key, int value) throws Exception {
- obj.append(encodeName(key), value);
- }
-
- @Override
- public void writeMap(String key, int count, StringEntryIterator iterator) throws Exception {
- BasicDBObject childObj = new BasicDBObject();
- while (iterator.hasNext()) {
- StringEntry entry = iterator.next();
- childObj.append(encodeName(entry.getKey()), entry.getValue());
- }
- obj.append(encodeName(key), childObj);
- }
-
- @Override
- public void writeMap(String key, int count, BytesEntryIterator iterator) throws Exception {
- BasicDBObject childObj = new BasicDBObject();
- while (iterator.hasNext()) {
- BytesEntry entry = iterator.next();
- childObj.append(encodeName(entry.getKey()), StringUtils.convertBytesToHex(entry.getValue()));
- }
- obj.append(encodeName(key), childObj);
- }
-
- @Override
- public String readStringValue(String key) throws Exception {
- return obj.getString(encodeName(key));
- }
-
- @Override
- public byte[] readBytesValue(String key) throws Exception {
- return StringUtils.convertHexToBytes(obj.getString(encodeName(key)));
- }
-
- @Override
- public long readLongValue(String key) throws Exception {
- return obj.getLong(encodeName(key));
- }
-
- @Override
- public int readIntValue(String key) throws Exception {
- return obj.getInt(encodeName(key));
- }
-
- @Override
- public StringEntryIterator readStringMap(String key) throws Exception {
- final BasicDBObject childObj = (BasicDBObject) obj.get(encodeName(key));
- final Iterator it = childObj.keySet().iterator();
- return new StringEntryIterator() {
- @Override
- public boolean hasNext() {
- return it.hasNext();
- }
-
- @Override
- public StringEntry next() {
- String key = it.next();
- return new StringEntry(decodeName(key), childObj.getString(key));
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- };
- }
-
- @Override
- public BytesEntryIterator readBytesMap(String key) throws Exception {
- final BasicDBObject childObj = (BasicDBObject) obj.get(encodeName(key));
- final Iterator it = childObj.keySet().iterator();
- return new BytesEntryIterator() {
- @Override
- public boolean hasNext() {
- return it.hasNext();
- }
-
- @Override
- public BytesEntry next() {
- String key = it.next();
- return new BytesEntry(
- decodeName(key),
- StringUtils.convertHexToBytes(childObj.getString(key)));
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- };
- }
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/BloomFilterUtils.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/util/BloomFilterUtils.java
deleted file mode 100644
index 8d00ea3abfa..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/BloomFilterUtils.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-/**
- * Bloom filter utilities.
- */
-public class BloomFilterUtils {
-
- /**
- * The multiply and shift constants for the supplemental hash function.
- */
- private static final int MUL = 2153, SHIFT = 19;
-
- /**
- * The number of bits needed per stored element.
- * Using the formula m = - (n * ln(p)) / (ln(2)^2) as described in
- * http://en.wikipedia.org/wiki/Bloom_filter
- * (simplified, as we used a fixed K: 2).
- */
- private static final double BIT_FACTOR = -Math.log(0.02) / Math.pow(Math.log(2), 2);
-
- /**
- * Create a bloom filter array for the given number of elements.
- *
- * @param count the number of entries
- * @param maxBytes the maximum number of bytes
- * @return the empty bloom filter
- */
- public static byte[] createFilter(int elementCount, int maxBytes) {
- int bits = (int) (elementCount * BIT_FACTOR) + 7;
- return new byte[Math.min(maxBytes, bits / 8)];
- }
-
- /**
- * Add the key.
- *
- * @param bloom the bloom filter
- * @param key the key
- */
- public static void add(byte[] bloom, Object key) {
- int len = bloom.length;
- if (len > 0) {
- int h1 = hash(key.hashCode()), h2 = hash(h1);
- bloom[(h1 >>> 3) % len] |= 1 << (h1 & 7);
- bloom[(h2 >>> 3) % len] |= 1 << (h2 & 7);
- }
- }
-
- /**
- * Check whether the given key is probably in the set. This method never
- * returns false if the key is in the set, but possibly returns true even if
- * it isn't.
- *
- * @param bloom the bloom filter
- * @param key the key
- * @return true if the given key is probably in the set
- */
- public static boolean probablyContains(byte[] bloom, Object key) {
- int len = bloom.length;
- if (len == 0) {
- return true;
- }
- int h1 = hash(key.hashCode()), h2 = hash(h1);
- int x = bloom[(h1 >>> 3) % len] & (1 << (h1 & 7));
- if (x != 0) {
- x = bloom[(h2 >>> 3) % len] & (1 << (h2 & 7));
- }
- return x != 0;
- }
-
- /**
- * Get the hash value for the given key. The returned hash value is
- * stretched so that it should work well even for relatively bad hashCode
- * implementations.
- *
- * @param key the key
- * @return the hash value
- */
- private static int hash(int oldHash) {
- return oldHash ^ ((oldHash * MUL) >> SHIFT);
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/MemorySockets.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/util/MemorySockets.java
deleted file mode 100644
index 5db58237d48..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/MemorySockets.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InterruptedIOException;
-import java.io.OutputStream;
-import java.io.PipedInputStream;
-import java.io.PipedOutputStream;
-import java.net.InetAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.UnknownHostException;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import javax.net.ServerSocketFactory;
-import javax.net.SocketFactory;
-
-/**
- * Memory sockets.
- */
-public abstract class MemorySockets {
-
- /** Sockets queue */
- static final BlockingQueue QUEUE = new LinkedBlockingQueue();
-
- /** Sentinel socket, used to signal a closed queue */
- static final Socket SENTINEL = new Socket();
-
- /**
- * Return the server socket factory.
- *
- * @return server socket factory
- */
- public static ServerSocketFactory getServerSocketFactory() {
- return new ServerSocketFactory() {
- @Override
- public ServerSocket createServerSocket() throws IOException {
- return new ServerSocket() {
- /** Closed flag */
- private final AtomicBoolean closed = new AtomicBoolean();
-
- @Override
- public Socket accept() throws IOException {
- if (closed.get()) {
- throw new IOException("closed");
- }
- try {
- Socket socket = QUEUE.take();
- if (socket == SENTINEL) {
- throw new IOException("closed");
- }
- return socket;
- } catch (InterruptedException e) {
- throw new InterruptedIOException();
- }
- }
-
- @Override
- public void close() throws IOException {
- if (closed.compareAndSet(false, true)) {
- QUEUE.add(SENTINEL);
- }
- }
- };
- }
-
- @Override
- public ServerSocket createServerSocket(int port) throws IOException {
- return createServerSocket();
- }
-
- @Override
- public ServerSocket createServerSocket(int port, int backlog)
- throws IOException {
-
- return createServerSocket();
- }
-
- @Override
- public ServerSocket createServerSocket(int port, int backlog,
- InetAddress ifAddress) throws IOException {
-
- return createServerSocket();
- }
- };
- }
-
- /**
- * Return the socket factory.
- *
- * @return socket factory
- */
- public static SocketFactory getSocketFactory() {
- return new SocketFactory() {
- @Override
- public Socket createSocket() throws IOException {
- PipedSocket socket = new PipedSocket();
- QUEUE.add(new PipedSocket(socket));
- return socket;
- }
-
- @Override
- public Socket createSocket(InetAddress host, int port) throws IOException {
- return createSocket();
- }
-
- @Override
- public Socket createSocket(String host, int port) throws IOException,
- UnknownHostException {
-
- return createSocket();
- }
-
- @Override
- public Socket createSocket(String host, int port, InetAddress localHost,
- int localPort) throws IOException, UnknownHostException {
-
- return createSocket();
- }
-
- @Override
- public Socket createSocket(InetAddress address, int port,
- InetAddress localAddress, int localPort) throws IOException {
-
- return createSocket();
- }
- };
- };
-
- /**
- * Socket implementation, using pipes to exchange information between a
- * pair of sockets.
- */
- static class PipedSocket extends Socket {
-
- /** Input stream */
- protected final PipedInputStream in;
-
- /** Output stream */
- protected final PipedOutputStream out;
-
- /**
- * Used to initialize the socket on the client side.
- */
- PipedSocket() {
- in = new PipedInputStream(8192);
- out = new PipedOutputStream();
- }
-
- /**
- * Used to initialize the socket on the server side.
- */
- PipedSocket(PipedSocket client) throws IOException {
- in = new PipedInputStream(client.out);
- out = new PipedOutputStream(client.in);
- }
-
- @Override
- public InputStream getInputStream() throws IOException {
- return in;
- }
-
- @Override
- public OutputStream getOutputStream() throws IOException {
- return out;
- }
- }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/Sync.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/util/Sync.java
deleted file mode 100644
index d5fc9ac5d14..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/Sync.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-import java.util.Iterator;
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.simple.NodeImpl;
-
-/**
- * Traverse the nodes in two repositories / revisions / nodes in order to
- * synchronize them or list the differences.
- *
- * If the target is not set, the tool can be used to list or backup the content,
- * for (data store) garbage collection, or similar.
- */
-public class Sync {
-
- private MicroKernel sourceMk, targetMk;
- private String sourceRev, targetRev;
- private String sourcePath, targetPath = "/";
- private boolean useContentHashOptimization;
- private int childNodesPerBatch = 100;
-
- private Handler handler;
-
- /**
- * Set the source (required).
- *
- * @param mk the source
- * @param rev the revision
- * @param path the path
- */
- public void setSource(MicroKernel mk, String rev, String path) {
- sourceMk = mk;
- sourceRev = rev;
- sourcePath = path;
- }
-
- /**
- * Set the target (optional). If not set, the tool assumes no nodes exist on
- * the target.
- *
- * @param mk the target
- * @param rev the revision
- * @param path the path
- */
-
- public void setTarget(MicroKernel mk, String rev, String path) {
- targetMk = mk;
- targetRev = rev;
- targetPath = path;
- }
-
- /**
- * Whether to use the content hash optimization if available.
- *
- * @return true if the optimization should be used
- */
- public boolean getUseContentHashOptimization() {
- return useContentHashOptimization;
- }
-
- /**
- * Use the content hash optimization if available.
- *
- * @param useContentHashOptimization the new value
- */
- public void setUseContentHashOptimization(boolean useContentHashOptimization) {
- this.useContentHashOptimization = useContentHashOptimization;
- }
-
- /**
- * Get the number of child nodes to request in one call.
- *
- * @return the number of child nodes to request
- */
- public int getChildNodesPerBatch() {
- return childNodesPerBatch;
- }
-
- /**
- * Set the number of child nodes to request in one call.
- *
- * @param childNodesPerBatch the number of child nodes to request
- */
- public void setChildNodesPerBatch(int childNodesPerBatch) {
- this.childNodesPerBatch = childNodesPerBatch;
- }
-
- public void run(Handler handler) {
- this.handler = handler;
- visit("");
- }
-
- public void visit(String relPath) {
- String source = PathUtils.concat(sourcePath, relPath);
- String target = PathUtils.concat(targetPath, relPath);
- NodeImpl s = null, t = null;
- if (sourceMk.nodeExists(source, sourceRev)) {
- s = NodeImpl.parse(sourceMk.getNodes(source, sourceRev, 0, 0, childNodesPerBatch, null));
- }
- if (targetMk != null && targetMk.nodeExists(target, targetRev)) {
- t = NodeImpl.parse(targetMk.getNodes(target, targetRev, 0, 0, childNodesPerBatch, null));
- }
- if (s == null || t == null) {
- if (s == t) {
- // both don't exist - ok
- return;
- } else if (s == null) {
- handler.removeNode(target);
- return;
- } else {
- if (!PathUtils.denotesRoot(target)) {
- handler.addNode(target);
- }
- }
- }
- // properties
- for (int i = 0; i < s.getPropertyCount(); i++) {
- String name = s.getProperty(i);
- String sourceValue = s.getPropertyValue(i);
- String targetValue = t != null && t.hasProperty(name) ? t.getProperty(name) : null;
- if (!sourceValue.equals(targetValue)) {
- handler.setProperty(target, name, sourceValue);
- }
- }
- if (t != null) {
- for (int i = 0; i < t.getPropertyCount(); i++) {
- String name = t.getProperty(i);
- // if it exists in the source, it's already updated
- if (!s.hasProperty(name)) {
- handler.setProperty(target, name, null);
- }
- }
- }
- // child nodes
- Iterator it = s.getTotalChildNodeCount() > s.getChildNodeCount() ?
- getAllChildNodeNames(sourceMk, source, sourceRev, childNodesPerBatch) :
- s.getChildNodeNames(Integer.MAX_VALUE);
- while (it.hasNext()) {
- String name = it.next();
- visit(PathUtils.concat(relPath, name));
- }
- if (t != null) {
- it = t.getTotalChildNodeCount() > t.getChildNodeCount() ?
- getAllChildNodeNames(targetMk, target, targetRev, childNodesPerBatch) :
- t.getChildNodeNames(Integer.MAX_VALUE);
- while (it.hasNext()) {
- String name = it.next();
- if (s.exists(name)) {
- // if it exists in the source, it's already updated
- } else if (s.getTotalChildNodeCount() > s.getChildNodeCount() &&
- sourceMk.nodeExists(PathUtils.concat(source, name), sourceRev)) {
- // if it exists in the source, it's already updated
- // (in this case, there are many child nodes)
- } else {
- visit(PathUtils.concat(relPath, name));
- }
- }
- }
- }
-
- /**
- * Get a child node name iterator that batches node names. This work
- * efficiently for small and big child node lists.
- *
- * @param mk the implementation
- * @param path the path
- * @param rev the revision
- * @param batchSize the batch size
- * @return a child node name iterator
- */
- public static Iterator getAllChildNodeNames(final MicroKernel mk, final String path, final String rev, final int batchSize) {
- return new Iterator() {
-
- private long offset;
- private Iterator current;
-
- {
- nextBatch();
- }
-
- private void nextBatch() {
- NodeImpl n = NodeImpl.parse(mk.getNodes(path, rev, 0, offset, batchSize, null));
- current = n.getChildNodeNames(Integer.MAX_VALUE);
- offset += batchSize;
- }
-
- @Override
- public boolean hasNext() {
- if (!current.hasNext()) {
- nextBatch();
- }
- return current.hasNext();
- }
-
- @Override
- public String next() {
- if (!current.hasNext()) {
- nextBatch();
- }
- return current.next();
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- };
- }
-
- /**
- * The sync handler.
- */
- public interface Handler {
-
- /**
- * The given node needs to be added to the target.
- *
- * @param path the path
- */
- void addNode(String path);
-
- /**
- * The given node needs to be removed from the target.
- *
- * @param path the path
- */
- void removeNode(String target);
-
- /**
- * The given property needs to be set on the target.
- *
- * @param path the path
- * @param property the property name
- * @param value the new value, or null to remove it
- */
- void setProperty(String target, String property, String value);
-
- }
-
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/SynchronizedVerifier.java b/oak-core/src/main/java/org/apache/jackrabbit/mk/util/SynchronizedVerifier.java
deleted file mode 100644
index 96333146532..00000000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/mk/util/SynchronizedVerifier.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.IdentityHashMap;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-/**
- * A utility class that allows to verify access to a resource is synchronized.
- */
-public class SynchronizedVerifier {
-
- private static volatile boolean enabled;
- private static final Map, AtomicBoolean> DETECT =
- Collections.synchronizedMap(new HashMap, AtomicBoolean>());
- private static final Map