Index: src/main/java/org/apache/jackrabbit/core/cache/CacheManager.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/cache/CacheManager.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/cache/CacheManager.java (working copy) @@ -17,9 +17,12 @@ package org.apache.jackrabbit.core.cache; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.WeakHashMap; +import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,19 +41,19 @@ public class CacheManager implements CacheAccessListener { /** The logger instance. */ - private static Logger log = LoggerFactory.getLogger(CacheManager.class); + private static Logger logger = LoggerFactory.getLogger(CacheManager.class); /** The default maximum amount of memory to distribute across the caches. */ - private static final long DEFAULT_MAX_MEMORY = 16 * 1024 * 1024; + private static final long DEFAULT_MAX_MEMORY = 16 * FileUtils.ONE_MB; /** The default minimum size of a cache. */ - private static final long DEFAULT_MIN_MEMORY_PER_CACHE = 128 * 1024; + private static final long DEFAULT_MIN_MEMORY_PER_CACHE = 128 * FileUtils.ONE_KB; /** The default maximum memory per cache. */ - private static final long DEFAULT_MAX_MEMORY_PER_CACHE = 4 * 1024 * 1024; + private static final long DEFAULT_MAX_MEMORY_PER_CACHE = 4 * FileUtils.ONE_MB; /** The set of caches (weakly referenced). */ - private WeakHashMap caches = new WeakHashMap(); + private final Map caches = Collections.synchronizedMap(new WeakHashMap()); /** The default minimum resize interval (in ms). */ private static final int DEFAULT_MIN_RESIZE_INTERVAL = 1000; @@ -156,18 +159,16 @@ * Log info about the caches. */ private void logCacheStats() { - if (log.isDebugEnabled()) { - long now = System.currentTimeMillis(); + if (logger.isDebugEnabled()) { + final long now = System.currentTimeMillis(); if (now < nextLogStats) { return; } // JCR-3194 avoid ConcurrentModificationException - List list = new ArrayList(); - synchronized (caches) { - list.addAll(caches.keySet()); - } + final List list = new ArrayList(); + list.addAll(caches.keySet()); for (Cache cache : list) { - log.debug(cache.getCacheInfoAsString()); + logger.debug(cache.getCacheInfoAsString()); } nextLogStats = now + minLogStatsInterval; } @@ -177,22 +178,20 @@ * Re-calculate the maximum memory for each cache, and set the new limits. */ private void resizeAll() { - if (log.isTraceEnabled()) { - log.trace("resizeAll size=" + caches.size()); + if (logger.isTraceEnabled()) { + logger.trace("resizeAll size=" + caches.size()); } // get strong references // entries in a weak hash map may disappear any time // so can't use size() / keySet() directly // only using the iterator guarantees that we don't get null references - List list = new ArrayList(); - synchronized (caches) { - list.addAll(caches.keySet()); - } + final List list = new ArrayList(); + list.addAll(caches.keySet()); if (list.size() == 0) { // nothing to do return; } - CacheInfo[] infos = new CacheInfo[list.size()]; + final CacheInfo[] infos = new CacheInfo[list.size()]; for (int i = 0; i < list.size(); i++) { infos[i] = new CacheInfo((Cache) list.get(i)); } @@ -199,7 +198,7 @@ // calculate the total access count and memory used long totalAccessCount = 0; long totalMemoryUsed = 0; - for (CacheInfo info : infos) { + for (final CacheInfo info : infos) { totalAccessCount += info.getAccessCount(); totalMemoryUsed += info.getMemoryUsed(); } @@ -213,7 +212,7 @@ double memoryPerUsed = (double) maxMemory / 2. / Math.max(1., (double) totalMemoryUsed); int fullCacheCount = 0; - for (CacheInfo info : infos) { + for (final CacheInfo info : infos) { long mem = (long) (memoryPerAccess * info.getAccessCount()); mem += (long) (memoryPerUsed * info.getMemoryUsed()); mem = Math.min(mem, maxMemoryPerCache); @@ -228,12 +227,12 @@ } // calculate the unused memory long unusedMemory = maxMemory; - for (CacheInfo info : infos) { + for (final CacheInfo info : infos) { unusedMemory -= info.getMemory(); } // distribute the remaining memory evenly across the full caches if (unusedMemory > 0 && fullCacheCount > 0) { - for (CacheInfo info : infos) { + for (final CacheInfo info : infos) { if (info.wasFull()) { info.setMemory(info.getMemory() + unusedMemory / fullCacheCount); @@ -241,10 +240,10 @@ } } // set the new limit - for (CacheInfo info : infos) { - Cache cache = info.getCache(); - if (log.isTraceEnabled()) { - log.trace(cache + " now:" + cache.getMaxMemorySize() + " used:" + for (final CacheInfo info : infos) { + final Cache cache = info.getCache(); + if (logger.isTraceEnabled()) { + logger.trace(cache + " now:" + cache.getMaxMemorySize() + " used:" + info.getMemoryUsed() + " access:" + info.getAccessCount() + " new:" + info.getMemory()); } @@ -258,10 +257,8 @@ * * @param cache the cache to add */ - public void add(Cache cache) { - synchronized (caches) { - caches.put(cache, null); - } + public void add(final Cache cache) { + caches.put(cache, null); } /** @@ -272,10 +269,8 @@ * @param cache * the cache to remove */ - public void remove(Cache cache) { - synchronized (caches) { - caches.remove(cache); - } + public void remove(final Cache cache) { + caches.remove(cache); } /** @@ -292,7 +287,7 @@ private boolean wasFull; - CacheInfo(Cache cache) { + CacheInfo(final Cache cache) { this.cache = cache; // copy the data as this runs in a different thread // the exact values are not important, but it is important that the @@ -334,7 +329,7 @@ } - public void disposeCache(Cache cache) { + public void disposeCache(final Cache cache) { remove(cache); } Index: src/main/java/org/apache/jackrabbit/core/config/RepositoryConfig.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/config/RepositoryConfig.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/config/RepositoryConfig.java (working copy) @@ -808,6 +808,7 @@ String s = writer.getBuffer().toString(); configWriter.write(s); + configWriter.flush(); configContent.append(s); } } catch (IOException e) { Index: src/main/java/org/apache/jackrabbit/core/journal/InstanceRevision.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/journal/InstanceRevision.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/journal/InstanceRevision.java (working copy) @@ -16,10 +16,12 @@ */ package org.apache.jackrabbit.core.journal; +import java.io.Closeable; + /** * */ -public interface InstanceRevision { +public interface InstanceRevision extends Closeable { /** * Return current instance revision. Index: src/main/java/org/apache/jackrabbit/core/journal/Journal.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/journal/Journal.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/journal/Journal.java (working copy) @@ -16,12 +16,14 @@ */ package org.apache.jackrabbit.core.journal; +import java.io.Closeable; + import org.apache.jackrabbit.spi.commons.namespace.NamespaceResolver; /** * Generic journal interface. */ -public interface Journal { +public interface Journal extends Closeable { /** * Initialize journal. Index: src/main/java/org/apache/jackrabbit/core/persistence/bundle/AbstractBundlePersistenceManager.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/persistence/bundle/AbstractBundlePersistenceManager.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/persistence/bundle/AbstractBundlePersistenceManager.java (working copy) @@ -30,6 +30,7 @@ import javax.jcr.PropertyType; import javax.jcr.RepositoryException; +import org.apache.commons.io.FileUtils; import org.apache.jackrabbit.api.stats.RepositoryStatistics; import org.apache.jackrabbit.core.cache.Cache; import org.apache.jackrabbit.core.cache.CacheAccessListener; @@ -152,7 +153,7 @@ protected PMContext context; /** default size of the bundle cache */ - private long bundleCacheSize = 8 * 1024 * 1024; + private long bundleCacheSize = 8l * FileUtils.ONE_MB; /** Counter of read operations. */ private AtomicLong readCounter; @@ -183,17 +184,24 @@ * @return the size of the bundle cache in megabytes. */ public String getBundleCacheSize() { - return String.valueOf(bundleCacheSize / (1024 * 1024)); + return String.valueOf(bundleCacheSize / FileUtils.ONE_MB); } /** * Sets the size of the bundle cache in megabytes. - * the default is 8. + * the default is 8. Max is 1TB. * * @param bundleCacheSize the bundle cache size in megabytes. */ - public void setBundleCacheSize(String bundleCacheSize) { - this.bundleCacheSize = Long.parseLong(bundleCacheSize) * 1024 * 1024; + public void setBundleCacheSize(final String bundleCacheSize) { + final long cacheSize = Long.parseLong(bundleCacheSize); + + if (cacheSize > (1024l * 1024l)) + { + throw new IllegalArgumentException("Cache size overflow large: " + cacheSize); + } + + this.bundleCacheSize = cacheSize * FileUtils.ONE_MB; } /** @@ -204,21 +212,20 @@ * @param id the id of the node * @return the buffer with the appended data. */ - protected StringBuffer buildNodeFolderPath(StringBuffer buf, NodeId id) { + protected StringBuilder buildNodeFolderPath(StringBuilder buf, final NodeId id) { if (buf == null) { - buf = new StringBuffer(); + buf = new StringBuilder(); } - char[] chars = id.toString().toCharArray(); + final char[] chars = id.toString().toCharArray(); int cnt = 0; - for (int i = 0; i < chars.length; i++) { - if (chars[i] == '-') { + for (char c : chars) { + if (c == '-') { continue; } - //if (cnt > 0 && cnt % 4 == 0) { if (cnt == 2 || cnt == 4) { buf.append(FileSystem.SEPARATOR_CHAR); } - buf.append(chars[i]); + buf.append(c); cnt++; } return buf; @@ -232,9 +239,9 @@ * @param id the id of the property * @return the buffer with the appended data. */ - protected StringBuffer buildPropFilePath(StringBuffer buf, PropertyId id) { + protected StringBuilder buildPropFilePath(StringBuilder buf, final PropertyId id) { if (buf == null) { - buf = new StringBuffer(); + buf = new StringBuilder(); } buildNodeFolderPath(buf, id.getParentId()); buf.append(FileSystem.SEPARATOR); @@ -253,10 +260,10 @@ * @param i the index of the property value * @return the buffer with the appended data. */ - protected StringBuffer buildBlobFilePath(StringBuffer buf, PropertyId id, - int i) { + protected StringBuilder buildBlobFilePath(StringBuilder buf, final PropertyId id, + final int i) { if (buf == null) { - buf = new StringBuffer(); + buf = new StringBuilder(); } buildPropFilePath(buf, id); buf.append('.'); @@ -272,9 +279,9 @@ * @param id the id of the node * @return the buffer with the appended data. */ - protected StringBuffer buildNodeFilePath(StringBuffer buf, NodeId id) { + protected StringBuilder buildNodeFilePath(StringBuilder buf, final NodeId id) { if (buf == null) { - buf = new StringBuffer(); + buf = new StringBuilder(); } buildNodeFolderPath(buf, id); buf.append(FileSystem.SEPARATOR); @@ -290,10 +297,9 @@ * @param id the id of the node * @return the buffer with the appended data. */ - protected StringBuffer buildNodeReferencesFilePath( - StringBuffer buf, NodeId id) { + protected StringBuilder buildNodeReferencesFilePath(StringBuilder buf, final NodeId id) { if (buf == null) { - buf = new StringBuffer(); + buf = new StringBuilder(); } buildNodeFolderPath(buf, id); buf.append(FileSystem.SEPARATOR); @@ -307,21 +313,21 @@ * @throws IllegalStateException if an error occurs. */ public StringIndex getNsIndex() { + if (nsIndex != null) + { + return nsIndex; + } try { - if (nsIndex == null) { - // load name and ns index - FileSystemResource nsFile = new FileSystemResource(context.getFileSystem(), RES_NS_INDEX); - if (nsFile.exists()) { - nsIndex = new FileBasedIndex(nsFile); - } else { - nsIndex = (StringIndex) context.getNamespaceRegistry(); - } + // load name and ns index + final FileSystemResource nsFile = new FileSystemResource(context.getFileSystem(), RES_NS_INDEX); + if (nsFile.exists()) { + nsIndex = new FileBasedIndex(nsFile); + } else { + nsIndex = (StringIndex) context.getNamespaceRegistry(); } return nsIndex; - } catch (Exception e) { - IllegalStateException e2 = new IllegalStateException("Unable to create nsIndex."); - e2.initCause(e); - throw e2; + } catch (final Exception e) { + throw new IllegalStateException("Unable to create nsIndex.", e); } } @@ -331,16 +337,15 @@ * @throws IllegalStateException if an error occurs. */ public StringIndex getNameIndex() { + if (nameIndex != null) + { + return nameIndex; + } try { - if (nameIndex == null) { - nameIndex = new FileBasedIndex(new FileSystemResource( - context.getFileSystem(), RES_NAME_INDEX)); - } + nameIndex = new FileBasedIndex(new FileSystemResource(context.getFileSystem(), RES_NAME_INDEX)); return nameIndex; - } catch (Exception e) { - IllegalStateException e2 = new IllegalStateException("Unable to create nameIndex."); - e2.initCause(e); - throw e2; + } catch (final Exception e) { + throw new IllegalStateException("Unable to create nameIndex.", e); } } @@ -350,13 +355,13 @@ * {@inheritDoc} */ public synchronized void onExternalUpdate(ChangeLog changes) { - for (ItemState state : changes.modifiedStates()) { + for (final ItemState state : changes.modifiedStates()) { bundles.remove(getBundleId(state)); } - for (ItemState state : changes.deletedStates()) { + for (final ItemState state : changes.deletedStates()) { bundles.remove(getBundleId(state)); } - for (ItemState state : changes.addedStates()) { + for (final ItemState state : changes.addedStates()) { // There may have been a cache miss entry bundles.remove(getBundleId(state)); } @@ -365,18 +370,17 @@ private NodeId getBundleId(ItemState state) { if (state.isNode()) { return (NodeId) state.getId(); - } else { - return state.getParentId(); } + return state.getParentId(); } //------------------------------------------< IterablePersistenceManager >-- @Override - public Map getAllNodeInfos(NodeId after, int maxCount) + public Map getAllNodeInfos(final NodeId after, final int maxCount) throws ItemStateException, RepositoryException { - Map infos = new LinkedHashMap(); - for (NodeId nodeId : getAllNodeIds(after, maxCount)) { + final Map infos = new LinkedHashMap(); + for (final NodeId nodeId : getAllNodeIds(after, maxCount)) { infos.put(nodeId, new NodeInfo(loadBundle(nodeId))); } return infos; @@ -539,18 +543,17 @@ * * Loads the state via the appropriate NodePropBundle. */ - public boolean exists(PropertyId id) throws ItemStateException { - NodePropBundle bundle = getBundle(id.getParentId()); + public boolean exists(final PropertyId id) throws ItemStateException { + final NodePropBundle bundle = getBundle(id.getParentId()); if (bundle != null) { - Name name = id.getName(); + final Name name = id.getName(); return bundle.hasProperty(name) || JCR_PRIMARYTYPE.equals(name) || (JCR_UUID.equals(name) && bundle.isReferenceable()) || (JCR_MIXINTYPES.equals(name) && !bundle.getMixinTypeNames().isEmpty()); - } else { - return false; } + return false; } /** @@ -558,7 +561,7 @@ * * Checks the existence via the appropriate NodePropBundle. */ - public boolean exists(NodeId id) throws ItemStateException { + public boolean exists(final NodeId id) throws ItemStateException { // anticipating a load followed by a exists return getBundle(id) != null; } @@ -566,7 +569,7 @@ /** * {@inheritDoc} */ - public NodeState createNew(NodeId id) { + public NodeState createNew(final NodeId id) { return new NodeState(id, null, null, NodeState.STATUS_NEW, false); } @@ -573,7 +576,7 @@ /** * {@inheritDoc} */ - public PropertyState createNew(PropertyId id) { + public PropertyState createNew(final PropertyId id) { return new PropertyState(id, PropertyState.STATUS_NEW, false); } @@ -588,14 +591,11 @@ */ public synchronized void store(ChangeLog changeLog) throws ItemStateException { - boolean success = false; try { storeInternal(changeLog); - success = true; - } finally { - if (!success) { - bundles.clear(); - } + } catch (final Exception e) { + bundles.clear(); + throw e; } } @@ -605,13 +605,12 @@ * @param changeLog the changelog to store * @throws ItemStateException on failure */ - private void storeInternal(ChangeLog changeLog) + private void storeInternal(final ChangeLog changeLog) throws ItemStateException { - // delete bundles - HashSet deleted = new HashSet(); - for (ItemState state : changeLog.deletedStates()) { + final HashSet deleted = new HashSet(); + for (final ItemState state : changeLog.deletedStates()) { if (state.isNode()) { - NodePropBundle bundle = getBundle((NodeId) state.getId()); + final NodePropBundle bundle = getBundle((NodeId) state.getId()); if (bundle == null) { throw new NoSuchItemStateException(state.getId().toString()); } @@ -628,9 +627,9 @@ } } // gather modified node states - for (ItemState state : changeLog.modifiedStates()) { + for (final ItemState state : changeLog.modifiedStates()) { if (state.isNode()) { - NodeId nodeId = (NodeId) state.getId(); + final NodeId nodeId = (NodeId) state.getId(); NodePropBundle bundle = modified.get(nodeId); if (bundle == null) { bundle = getBundle(nodeId); @@ -641,7 +640,7 @@ } bundle.update((NodeState) state); } else { - PropertyId id = (PropertyId) state.getId(); + final PropertyId id = (PropertyId) state.getId(); // skip redundant primaryType, mixinTypes and uuid properties if (id.getName().equals(JCR_PRIMARYTYPE) || id.getName().equals(JCR_MIXINTYPES) @@ -648,7 +647,7 @@ || id.getName().equals(JCR_UUID)) { continue; } - NodeId nodeId = id.getParentId(); + final NodeId nodeId = id.getParentId(); NodePropBundle bundle = modified.get(nodeId); if (bundle == null) { bundle = getBundle(nodeId); @@ -661,16 +660,16 @@ } } // add removed properties - for (ItemState state : changeLog.deletedStates()) { + for (final ItemState state : changeLog.deletedStates()) { if (state.isNode()) { // check consistency - NodeId parentId = state.getParentId(); + final NodeId parentId = state.getParentId(); if (!modified.containsKey(parentId) && !deleted.contains(parentId)) { log.warn("Deleted node state's parent is not modified or deleted: " + parentId + "/" + state.getId()); } } else { - PropertyId id = (PropertyId) state.getId(); - NodeId nodeId = id.getParentId(); + final PropertyId id = (PropertyId) state.getId(); + final NodeId nodeId = id.getParentId(); if (!deleted.contains(nodeId)) { NodePropBundle bundle = modified.get(nodeId); if (bundle == null) { @@ -687,9 +686,9 @@ } } // add added properties - for (ItemState state : changeLog.addedStates()) { + for (final ItemState state : changeLog.addedStates()) { if (!state.isNode()) { - PropertyId id = (PropertyId) state.getId(); + final PropertyId id = (PropertyId) state.getId(); // skip primaryType pr mixinTypes properties if (id.getName().equals(JCR_PRIMARYTYPE) || id.getName().equals(JCR_MIXINTYPES) @@ -696,7 +695,7 @@ || id.getName().equals(JCR_UUID)) { continue; } - NodeId nodeId = id.getParentId(); + final NodeId nodeId = id.getParentId(); NodePropBundle bundle = modified.get(nodeId); if (bundle == null) { // should actually not happen @@ -713,7 +712,7 @@ // now store all modified bundles long updateSize = 0; - for (NodePropBundle bundle : modified.values()) { + for (final NodePropBundle bundle : modified.values()) { putBundle(bundle); updateSize += bundle.getSize(); } @@ -720,7 +719,7 @@ changeLog.setUpdateSize(updateSize); // store the refs - for (NodeReferences refs : changeLog.modifiedRefs()) { + for (final NodeReferences refs : changeLog.modifiedRefs()) { if (refs.hasReferences()) { store(refs); } else { @@ -739,8 +738,8 @@ * * @throws ItemStateException if an error occurs. */ - private NodePropBundle getBundle(NodeId id) throws ItemStateException { - NodePropBundle bundle = bundles.get(id); + private NodePropBundle getBundle(final NodeId id) throws ItemStateException { + final NodePropBundle bundle = bundles.get(id); readCounter.incrementAndGet(); if (bundle == MISSING) { return null; @@ -762,11 +761,11 @@ * @return * @throws ItemStateException */ - private NodePropBundle getBundleCacheMiss(NodeId id) + private NodePropBundle getBundleCacheMiss(final NodeId id) throws ItemStateException { - long time = System.nanoTime(); + final long time = System.nanoTime(); log.debug("Loading bundle {}", id); - NodePropBundle bundle = loadBundle(id); + final NodePropBundle bundle = loadBundle(id); cacheMissDuration.addAndGet(System.nanoTime() - time); cacheMissCounter.incrementAndGet(); if (bundle != null) { @@ -784,7 +783,7 @@ * @param bundle the bundle to delete * @throws ItemStateException if an error occurs */ - private void deleteBundle(NodePropBundle bundle) throws ItemStateException { + private void deleteBundle(final NodePropBundle bundle) throws ItemStateException { destroyBundle(bundle); bundle.removeAllProperties(getBlobStore()); bundles.put(bundle.getId(), MISSING, MISSING_SIZE_ESTIMATE); @@ -796,8 +795,8 @@ * @param bundle the bundle to store * @throws ItemStateException if an error occurs */ - private void putBundle(NodePropBundle bundle) throws ItemStateException { - long time = System.nanoTime(); + private void putBundle(final NodePropBundle bundle) throws ItemStateException { + final long time = System.nanoTime(); log.debug("Storing bundle {}", bundle.getId()); storeBundle(bundle); if (auditLogger.isDebugEnabled()) { @@ -818,7 +817,7 @@ /** * {@inheritDoc} */ - public void checkConsistency(String[] uuids, boolean recursive, boolean fix) { + public void checkConsistency(final String[] uuids, boolean recursive, boolean fix) { try { ConsistencyCheckerImpl checker = new ConsistencyCheckerImpl(this, null, null, eventChannel); checker.check(uuids, recursive); @@ -831,7 +830,7 @@ } } - public void setEventChannel(UpdateEventChannel eventChannel) { + public void setEventChannel(final UpdateEventChannel eventChannel) { this.eventChannel = eventChannel; } @@ -855,11 +854,11 @@ * * @param id the id of the bundle. */ - protected void evictBundle(NodeId id) { + protected void evictBundle(final NodeId id) { bundles.remove(id); } - public void cacheAccessed(long accessCount) { + public void cacheAccessed(final long accessCount) { logCacheStats(); cacheAccessCounter.addAndGet(accessCount); cacheSizeCounter.set(bundles.getMemoryUsed()); @@ -867,7 +866,7 @@ private void logCacheStats() { if (log.isInfoEnabled()) { - long now = System.currentTimeMillis(); + final long now = System.currentTimeMillis(); if (now < nextLogStats) { return; } Index: src/main/java/org/apache/jackrabbit/core/persistence/bundle/BundleFsPersistenceManager.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/persistence/bundle/BundleFsPersistenceManager.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/persistence/bundle/BundleFsPersistenceManager.java (working copy) @@ -58,7 +58,7 @@ public class BundleFsPersistenceManager extends AbstractBundlePersistenceManager { /** the default logger */ - private static Logger log = LoggerFactory.getLogger(BundleFsPersistenceManager.class); + private static Logger logger = LoggerFactory.getLogger(BundleFsPersistenceManager.class); /** flag indicating if this manager was initialized */ protected boolean initialized; @@ -231,21 +231,21 @@ /** * {@inheritDoc} */ - protected NodePropBundle loadBundle(NodeId id) throws ItemStateException { + protected NodePropBundle loadBundle(final NodeId id) throws ItemStateException { try { - String path = buildNodeFilePath(null, id).toString(); + final String path = buildNodeFilePath(null, id).toString(); if (!itemFs.exists(path)) { return null; } - InputStream in = itemFs.getInputStream(path); + final InputStream in = itemFs.getInputStream(path); try { return binding.readBundle(in, id); } finally { IOUtils.closeQuietly(in); } - } catch (Exception e) { + } catch (final Exception e) { String msg = "failed to read bundle: " + id + ": " + e; - log.error(msg); + logger.error(msg); throw new ItemStateException(msg, e); } } @@ -258,9 +258,9 @@ * @param id the id of the node * @return the buffer with the appended data. */ - protected StringBuffer buildNodeFilePath(StringBuffer buf, NodeId id) { + protected StringBuilder buildNodeFilePath(StringBuilder buf, final NodeId id) { if (buf == null) { - buf = new StringBuffer(); + buf = new StringBuilder(); } buildNodeFolderPath(buf, id); buf.append('.'); @@ -276,10 +276,9 @@ * @param id the id of the node * @return the buffer with the appended data. */ - protected StringBuffer buildNodeReferencesFilePath( - StringBuffer buf, NodeId id) { + protected StringBuilder buildNodeReferencesFilePath(StringBuilder buf, final NodeId id) { if (buf == null) { - buf = new StringBuffer(); + buf = new StringBuilder(); } buildNodeFolderPath(buf, id); buf.append('.'); @@ -290,42 +289,43 @@ /** * {@inheritDoc} */ - protected synchronized void storeBundle(NodePropBundle bundle) throws ItemStateException { + protected synchronized void storeBundle(final NodePropBundle bundle) throws ItemStateException { + OutputStream out = null; try { - StringBuffer buf = buildNodeFolderPath(null, bundle.getId()); - buf.append('.'); - buf.append(NODEFILENAME); - String fileName = buf.toString(); - String dir = fileName.substring(0, fileName.lastIndexOf(FileSystem.SEPARATOR_CHAR)); + final StringBuilder buf = buildNodeFolderPath(null, bundle.getId()); + buf.append('.').append(NODEFILENAME); + final String fileName = buf.toString(); + final String dir = fileName.substring(0, fileName.lastIndexOf(FileSystem.SEPARATOR_CHAR)); if (!itemFs.exists(dir)) { itemFs.createFolder(dir); } - OutputStream out = itemFs.getOutputStream(fileName); - try { - binding.writeBundle(out, bundle); - } finally { - out.close(); - } - } catch (Exception e) { + out = itemFs.getOutputStream(fileName); + binding.writeBundle(out, bundle); + out.flush(); + } catch (final Exception e) { String msg = "failed to write bundle: " + bundle.getId(); - BundleFsPersistenceManager.log.error(msg, e); + logger.error(msg, e); throw new ItemStateException(msg, e); } + finally + { + IOUtils.closeQuietly(out); + } } /** * {@inheritDoc} */ - protected synchronized void destroyBundle(NodePropBundle bundle) throws ItemStateException { + protected synchronized void destroyBundle(final NodePropBundle bundle) throws ItemStateException { try { - StringBuffer buf = buildNodeFilePath(null, bundle.getId()); + final StringBuilder buf = buildNodeFilePath(null, bundle.getId()); itemFs.deleteFile(buf.toString()); - } catch (Exception e) { + } catch (final Exception e) { if (e instanceof NoSuchItemStateException) { throw (NoSuchItemStateException) e; } - String msg = "failed to delete bundle: " + bundle.getId(); - BundleFsPersistenceManager.log.error(msg, e); + final String msg = "failed to delete bundle: " + bundle.getId(); + logger.error(msg, e); throw new ItemStateException(msg, e); } } @@ -333,7 +333,7 @@ /** * {@inheritDoc} */ - public synchronized NodeReferences loadReferencesTo(NodeId targetId) + public synchronized NodeReferences loadReferencesTo(final NodeId targetId) throws NoSuchItemStateException, ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); @@ -340,20 +340,20 @@ } InputStream in = null; try { - String path = buildNodeReferencesFilePath(null, targetId).toString(); + final String path = buildNodeReferencesFilePath(null, targetId).toString(); if (!itemFs.exists(path)) { // special case throw new NoSuchItemStateException(targetId.toString()); } in = itemFs.getInputStream(path); - NodeReferences refs = new NodeReferences(targetId); + final NodeReferences refs = new NodeReferences(targetId); Serializer.deserialize(refs, in); return refs; - } catch (NoSuchItemStateException e) { + } catch (final NoSuchItemStateException e) { throw e; - } catch (Exception e) { - String msg = "failed to read references: " + targetId; - BundleFsPersistenceManager.log.error(msg, e); + } catch (final Exception e) { + final String msg = "failed to read references: " + targetId; + logger.error(msg, e); throw new ItemStateException(msg, e); } finally { IOUtils.closeQuietly(in); @@ -363,46 +363,50 @@ /** * {@inheritDoc} */ - public synchronized void store(NodeReferences refs) + public synchronized void store(final NodeReferences refs) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } + OutputStream out = null; try { - StringBuffer buf = buildNodeFolderPath(null, refs.getTargetId()); - buf.append('.'); - buf.append(NODEREFSFILENAME); - String fileName = buf.toString(); - String dir = fileName.substring(0, fileName.lastIndexOf(FileSystem.SEPARATOR_CHAR)); + final StringBuilder buf = buildNodeFolderPath(null, refs.getTargetId()); + buf.append('.').append(NODEREFSFILENAME); + final String fileName = buf.toString(); + final String dir = fileName.substring(0, fileName.lastIndexOf(FileSystem.SEPARATOR_CHAR)); if (!itemFs.exists(dir)) { itemFs.createFolder(dir); } - OutputStream out = itemFs.getOutputStream(fileName); + out = itemFs.getOutputStream(fileName); Serializer.serialize(refs, out); - out.close(); - } catch (Exception e) { - String msg = "failed to write " + refs; - BundleFsPersistenceManager.log.error(msg, e); + out.flush(); + } catch (final Exception e) { + final String msg = "failed to write " + refs; + logger.error(msg, e); throw new ItemStateException(msg, e); } + finally + { + IOUtils.closeQuietly(out); + } } /** * {@inheritDoc} */ - public synchronized void destroy(NodeReferences refs) throws ItemStateException { + public synchronized void destroy(final NodeReferences refs) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } try { - StringBuffer buf = buildNodeReferencesFilePath(null, refs.getTargetId()); + final StringBuilder buf = buildNodeReferencesFilePath(null, refs.getTargetId()); itemFs.deleteFile(buf.toString()); - } catch (Exception e) { + } catch (final Exception e) { if (e instanceof NoSuchItemStateException) { throw (NoSuchItemStateException) e; } String msg = "failed to delete " + refs; - BundleFsPersistenceManager.log.error(msg, e); + BundleFsPersistenceManager.logger.error(msg, e); throw new ItemStateException(msg, e); } } @@ -410,16 +414,16 @@ /** * {@inheritDoc} */ - public synchronized boolean existsReferencesTo(NodeId targetId) throws ItemStateException { + public synchronized boolean existsReferencesTo(final NodeId targetId) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } try { - StringBuffer buf = buildNodeReferencesFilePath(null, targetId); + final StringBuilder buf = buildNodeReferencesFilePath(null, targetId); return itemFs.exists(buf.toString()); - } catch (Exception e) { - String msg = "failed to check existence of node references: " + targetId; - BundleFsPersistenceManager.log.error(msg, e); + } catch (final Exception e) { + final String msg = "failed to check existence of node references: " + targetId; + logger.error(msg, e); throw new ItemStateException(msg, e); } } @@ -429,14 +433,13 @@ * @param message * @param se */ - protected void logException(String message, SQLException se) { + protected void logException(final String message, final SQLException se) { if (message != null) { - BundleFsPersistenceManager.log.error(message); + logger.error(message); } - BundleFsPersistenceManager.log.error(" Reason: " + se.getMessage()); - BundleFsPersistenceManager.log.error( - " State/Code: " + se.getSQLState() + "/" + se.getErrorCode()); - BundleFsPersistenceManager.log.debug(" dump:", se); + logger.error(" Reason: " + se.getMessage()); + logger.error(" State/Code: " + se.getSQLState() + "/" + se.getErrorCode()); + logger.debug(" dump:", se); } /** @@ -466,7 +469,7 @@ this.fs = fs; } - public String createId(PropertyId id, int index) { + public String createId(final PropertyId id, final int index) { return buildBlobFilePath(null, id, index).toString(); } @@ -483,15 +486,15 @@ /** * {@inheritDoc} */ - public List getAllNodeIds(NodeId bigger, int maxCount) + public List getAllNodeIds(final NodeId bigger, final int maxCount) throws ItemStateException { - ArrayList list = new ArrayList(); + final ArrayList list = new ArrayList(); try { getListRecursive(list, "", bigger == null ? null : bigger, maxCount); return list; - } catch (FileSystemException e) { + } catch (final FileSystemException e) { String msg = "failed to read node list: " + bigger + ": " + e; - log.error(msg); + logger.error(msg); throw new ItemStateException(msg, e); } } @@ -499,19 +502,21 @@ /** * {@inheritDoc} */ - protected NodeId getIdFromFileName(String fileName) { - StringBuffer buff = new StringBuffer(35); + protected NodeId getIdFromFileName(final String fileName) { if (!fileName.endsWith("." + NODEFILENAME)) { return null; } + + final StringBuilder buff = new StringBuilder(35); + for (int i = 0; i < fileName.length(); i++) { - char c = fileName.charAt(i); + final char c = fileName.charAt(i); if (c == '.') { break; } if (c != '/') { buff.append(c); - int len = buff.length(); + final int len = buff.length(); if (len == 8 || len == 13 || len == 18 || len == 23) { buff.append('-'); } @@ -526,11 +531,10 @@ if (maxCount > 0 && list.size() >= maxCount) { return; } - String[] files = itemFs.listFiles(path); + final String[] files = itemFs.listFiles(path); Arrays.sort(files); - for (int i = 0; i < files.length; i++) { - String f = files[i]; - NodeId n = getIdFromFileName(path + FileSystem.SEPARATOR + f); + for (final String f : files) { + final NodeId n = getIdFromFileName(path + FileSystem.SEPARATOR + f); if (n == null) { continue; } @@ -542,10 +546,10 @@ return; } } - String[] dirs = itemFs.listFolders(path); + final String[] dirs = itemFs.listFolders(path); Arrays.sort(dirs); - for (int i = 0; i < dirs.length; i++) { - getListRecursive(list, path + FileSystem.SEPARATOR + dirs[i], + for (final String dir : dirs) { + getListRecursive(list, path + FileSystem.SEPARATOR + dir, bigger, maxCount); } } Index: src/main/java/org/apache/jackrabbit/core/persistence/pool/BundleDbPersistenceManager.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/persistence/pool/BundleDbPersistenceManager.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/persistence/pool/BundleDbPersistenceManager.java (working copy) @@ -16,10 +16,8 @@ */ package org.apache.jackrabbit.core.persistence.pool; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; -import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.sql.ResultSet; @@ -34,6 +32,8 @@ import javax.sql.DataSource; import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.ClosedInputStream; +import org.apache.commons.io.input.ProxyInputStream; import org.apache.jackrabbit.core.fs.FileSystem; import org.apache.jackrabbit.core.fs.FileSystemResource; import org.apache.jackrabbit.core.fs.local.LocalFileSystem; @@ -87,7 +87,9 @@ extends AbstractBundlePersistenceManager implements DatabaseAware { /** the default logger */ - private static Logger log = LoggerFactory.getLogger(BundleDbPersistenceManager.class); + private static Logger logger = LoggerFactory.getLogger(BundleDbPersistenceManager.class); + + private static final InputStream NULL_STREAM = new ClosedInputStream(); /** storage model modifier: binary keys */ public static final int SM_BINARY_KEYS = 1; @@ -482,16 +484,16 @@ super.store(changeLog); conHelper.endBatch(true); return; - } catch (SQLException e) { + } catch (final SQLException e) { // Either startBatch or stopBatch threw it: either way the // transaction was not persisted and no action needs to be taken. lastException = new ItemStateException(e.getMessage(), e); - } catch (ItemStateException e) { + } catch (final ItemStateException e) { // store call threw it: we need to cancel the transaction lastException = e; try { conHelper.endBatch(false); - } catch (SQLException e2) { + } catch (final SQLException e2) { DbUtility.logException("rollback failed", e2); } @@ -500,9 +502,9 @@ assert !isIntegrityConstraintViolation(e.getCause()); } failures++; - log.error("Failed to persist ChangeLog (stacktrace on DEBUG log level), blockOnConnectionLoss = " + logger.error("Failed to persist ChangeLog (stacktrace on DEBUG log level), blockOnConnectionLoss = " + blockOnConnectionLoss + ": " + lastException); - log.debug("Failed to persist ChangeLog", lastException); + logger.debug("Failed to persist ChangeLog", lastException); if (blockOnConnectionLoss || failures <= 1) { // if we're going to try again try { Thread.sleep(100); @@ -509,7 +511,7 @@ } catch (InterruptedException e1) { Thread.currentThread().interrupt(); sleepInterrupted = true; - log.error("Interrupted: canceling retry of ChangeLog storage"); + logger.error("Interrupted: canceling retry of ChangeLog storage"); } } } @@ -516,13 +518,12 @@ throw lastException; } - private boolean isIntegrityConstraintViolation(Throwable t) { + private boolean isIntegrityConstraintViolation(final Throwable t) { if (t instanceof SQLException) { - String state = ((SQLException) t).getSQLState(); + final String state = ((SQLException) t).getSQLState(); return state != null && state.startsWith("23"); - } else { - return false; } + return false; } /** @@ -567,13 +568,12 @@ private DataSource getDataSource() throws Exception { if (getDataSourceName() == null || "".equals(getDataSourceName())) { return connectionFactory.getDataSource(getDriver(), getUrl(), getUser(), getPassword()); - } else { - String dbType = connectionFactory.getDataBaseType(dataSourceName); - if (BundleDbPersistenceManager.class.getResourceAsStream(dbType + ".ddl") != null) { - setDatabaseType(dbType); - } - return connectionFactory.getDataSource(dataSourceName); } + final String dbType = connectionFactory.getDataBaseType(dataSourceName); + if (BundleDbPersistenceManager.class.getResourceAsStream(dbType + ".ddl") != null) { + setDatabaseType(dbType); + } + return connectionFactory.getDataSource(dataSourceName); } /** @@ -597,7 +597,7 @@ * @return a new {@link CheckSchemaOperation} instance */ protected CheckSchemaOperation createCheckSchemaOperation() { - InputStream in = + final InputStream in = AbstractBundlePersistenceManager.class.getResourceAsStream( databaseType + ".ddl"); return new CheckSchemaOperation(conHelper, in, schemaObjectPrefix + "BUNDLE").addVariableReplacement( @@ -620,9 +620,8 @@ protected CloseableBLOBStore createBlobStore() throws Exception { if (useLocalFsBlobStore()) { return createLocalFSBlobStore(context); - } else { - return createDBBlobStore(context); } + return createDBBlobStore(context); } /** @@ -631,22 +630,21 @@ * @throws IllegalStateException if an error occurs. */ public StringIndex getNameIndex() { + if (nameIndex != null) + { + return nameIndex; + } try { - if (nameIndex == null) { - FileSystemResource res = new FileSystemResource(context.getFileSystem(), RES_NAME_INDEX); - if (res.exists()) { - nameIndex = super.getNameIndex(); - } else { - // create db nameindex - nameIndex = createDbNameIndex(); - } + final FileSystemResource res = new FileSystemResource(context.getFileSystem(), RES_NAME_INDEX); + if (res.exists()) { + nameIndex = super.getNameIndex(); + } else { + // create db nameindex + nameIndex = createDbNameIndex(); } return nameIndex; - } catch (Exception e) { - IllegalStateException exception = - new IllegalStateException("Unable to create nsIndex"); - exception.initCause(e); - throw exception; + } catch (final Exception e) { + throw new IllegalStateException("Unable to create nsIndex", e); } } @@ -675,13 +673,13 @@ * @return a blob store * @throws Exception if an error occurs. */ - protected CloseableBLOBStore createLocalFSBlobStore(PMContext context) + protected CloseableBLOBStore createLocalFSBlobStore(final PMContext context) throws Exception { /** * store blob's in local file system in a sub directory * of the workspace home directory */ - LocalFileSystem blobFS = new LocalFileSystem(); + final LocalFileSystem blobFS = new LocalFileSystem(); blobFS.setRoot(new File(context.getHomeDir(), "blobs")); blobFS.init(); return new FSBlobStore(blobFS); @@ -732,10 +730,9 @@ protected Object[] getKey(NodeId id) { if (getStorageModel() == SM_BINARY_KEYS) { return new Object[] { id.getRawBytes() }; - } else { - return new Object[] { - id.getMostSignificantBits(), id.getLeastSignificantBits() }; } + return new Object[] { + id.getMostSignificantBits(), id.getLeastSignificantBits() }; } /** @@ -747,10 +744,10 @@ * @param before whether the other parameter should be before the uuid parameter * @return an Object array that represents the parameters */ - protected Object[] createParams(NodeId id, Object p, boolean before) { + protected Object[] createParams(final NodeId id, final Object p, final boolean before) { // Create the key - List key = new ArrayList(); + final List key = new ArrayList(2); if (getStorageModel() == SM_BINARY_KEYS) { key.add(id.getRawBytes()); } else { @@ -759,7 +756,7 @@ } // Create the parameters - List params = new ArrayList(); + final List params = new ArrayList(3); if (before) { params.add(p); params.addAll(key); @@ -774,7 +771,7 @@ /** * {@inheritDoc} */ - public synchronized List getAllNodeIds(NodeId bigger, int maxCount) + public synchronized List getAllNodeIds(final NodeId bigger, int maxCount) throws ItemStateException, RepositoryException { ResultSet rs = null; try { @@ -794,14 +791,14 @@ maxCount += 10; } rs = conHelper.exec(sql, keys, false, maxCount); - ArrayList result = new ArrayList(); + final ArrayList result = new ArrayList(); while ((maxCount == 0 || result.size() < maxCount) && rs.next()) { - NodeId current; + final NodeId current; if (getStorageModel() == SM_BINARY_KEYS) { current = new NodeId(rs.getBytes(1)); } else { - long high = rs.getLong(1); - long low = rs.getLong(2); + final long high = rs.getLong(1); + final long low = rs.getLong(2); current = new NodeId(high, low); if (lowId != null) { // skip the keys that are smaller or equal (see above, maxCount += 10) @@ -814,9 +811,9 @@ result.add(current); } return result; - } catch (SQLException e) { - String msg = "getAllNodeIds failed."; - log.error(msg, e); + } catch (final SQLException e) { + final String msg = "getAllNodeIds failed."; + logger.error(msg, e); throw new ItemStateException(msg, e); } finally { DbUtility.close(rs); @@ -846,14 +843,14 @@ maxCount += 10; } rs = conHelper.exec(sql, keys, false, maxCount); - Map result = new LinkedHashMap(maxCount); + final Map result = new LinkedHashMap(maxCount); while ((maxCount == 0 || result.size() < maxCount) && rs.next()) { NodeId current; if (getStorageModel() == SM_BINARY_KEYS) { current = new NodeId(rs.getBytes(1)); } else { - long high = rs.getLong(1); - long low = rs.getLong(2); + final long high = rs.getLong(1); + final long low = rs.getLong(2); current = new NodeId(high, low); } if (getStorageModel() == SM_LONGLONG_KEYS && lowId != null) { @@ -867,9 +864,9 @@ result.put(nodeInfo.getId(), nodeInfo); } return result; - } catch (SQLException e) { + } catch (final SQLException e) { String msg = "getAllNodeIds failed."; - log.error(msg, e); + logger.error(msg, e); throw new ItemStateException(msg, e); } finally { DbUtility.close(rs); @@ -880,27 +877,21 @@ * {@inheritDoc} */ @Override - protected NodePropBundle loadBundle(NodeId id) throws ItemStateException { + protected NodePropBundle loadBundle(final NodeId id) throws ItemStateException { + ResultSet rs = null; try { - ResultSet rs = - conHelper.exec(bundleSelectSQL, getKey(id), false, 0); - try { - if (rs != null && rs.next()) { - return readBundle(id, rs, 1); - } else { - return null; - } - } finally { - if (rs != null) { - rs.close(); - } - } - } catch (SQLException e) { + rs = conHelper.exec(bundleSelectSQL, getKey(id), false, 0); + return (rs.next()) ? readBundle(id, rs, 1) : null; + } catch (final SQLException e) { String msg = "failed to read bundle (stacktrace on DEBUG log level): " + id + ": " + e; - log.error(msg); - log.debug("failed to read bundle: " + id, e); + logger.error(msg); + logger.debug("failed to read bundle: " + id, e); throw new ItemStateException(msg, e); } + finally + { + DbUtility.close(rs); + } } /** @@ -914,53 +905,49 @@ * @return parsed bundle * @throws SQLException if the bundle can not be read or parsed */ - private NodePropBundle readBundle(NodeId id, ResultSet rs, int column) + private NodePropBundle readBundle(final NodeId id, final ResultSet rs, final int column) throws SQLException { + InputStream in = null; try { - InputStream in; if (rs.getMetaData().getColumnType(column) == Types.BLOB) { in = rs.getBlob(column).getBinaryStream(); } else { in = rs.getBinaryStream(column); } - try { - return binding.readBundle(in, id); - } finally { - in.close(); - } - } catch (IOException e) { - SQLException exception = - new SQLException("Failed to parse bundle " + id); - exception.initCause(e); - throw exception; + return binding.readBundle(in, id); + } catch (final IOException e) { + throw new SQLException("Failed to parse bundle " + id, e); } + finally + { + IOUtils.closeQuietly(in); + } } /** * {@inheritDoc} */ - protected synchronized void storeBundle(NodePropBundle bundle) throws ItemStateException { + protected synchronized void storeBundle(final NodePropBundle bundle) throws ItemStateException { try { - ByteArrayOutputStream out = - new ByteArrayOutputStream(INITIAL_BUFFER_SIZE); + final ByteArrayOutputStream out = new ByteArrayOutputStream(INITIAL_BUFFER_SIZE); binding.writeBundle(out, bundle); - String sql = bundle.isNew() ? bundleInsertSQL : bundleUpdateSQL; - Object[] params = createParams(bundle.getId(), out.toByteArray(), true); + final String sql = bundle.isNew() ? bundleInsertSQL : bundleUpdateSQL; + final Object[] params = createParams(bundle.getId(), out.toByteArray(), true); conHelper.update(sql, params); - } catch (Exception e) { - String msg; + } catch (final Exception e) { + String msg = "Failed to write bundle: "; if (isIntegrityConstraintViolation(e)) { // we should never get an integrity constraint violation here // other PMs may not be able to detect this and end up with // corrupted data - msg = "FATAL error while writing the bundle: " + bundle.getId(); - } else { - msg = "failed to write bundle: " + bundle.getId(); + msg = "FATAL error while writing the bundle: "; } - log.error(msg, e); + msg += bundle.getId(); + + logger.error(msg, e); throw new ItemStateException(msg, e); } } @@ -968,15 +955,15 @@ /** * {@inheritDoc} */ - protected synchronized void destroyBundle(NodePropBundle bundle) throws ItemStateException { + protected synchronized void destroyBundle(final NodePropBundle bundle) throws ItemStateException { try { conHelper.update(bundleDeleteSQL, getKey(bundle.getId())); - } catch (Exception e) { + } catch (final Exception e) { if (e instanceof NoSuchItemStateException) { throw (NoSuchItemStateException) e; } - String msg = "failed to delete bundle: " + bundle.getId(); - log.error(msg, e); + final String msg = "failed to delete bundle: " + bundle.getId(); + logger.error(msg, e); throw new ItemStateException(msg, e); } } @@ -984,7 +971,7 @@ /** * {@inheritDoc} */ - public synchronized NodeReferences loadReferencesTo(NodeId targetId) + public synchronized NodeReferences loadReferencesTo(final NodeId targetId) throws NoSuchItemStateException, ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); @@ -999,16 +986,16 @@ } in = rs.getBinaryStream(1); - NodeReferences refs = new NodeReferences(targetId); + final NodeReferences refs = new NodeReferences(targetId); Serializer.deserialize(refs, in); return refs; - } catch (Exception e) { + } catch (final Exception e) { if (e instanceof NoSuchItemStateException) { throw (NoSuchItemStateException) e; } - String msg = "failed to read references: " + targetId; - log.error(msg, e); + final String msg = "failed to read references: " + targetId; + logger.error(msg, e); throw new ItemStateException(msg, e); } finally { IOUtils.closeQuietly(in); @@ -1025,29 +1012,25 @@ * shared statement. If the method would not be synchronized, the shared * statement must be synchronized. */ - public synchronized void store(NodeReferences refs) throws ItemStateException { + public synchronized void store(final NodeReferences refs) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } // check if insert or update - boolean update = existsReferencesTo(refs.getTargetId()); - String sql = (update) ? nodeReferenceUpdateSQL : nodeReferenceInsertSQL; + final boolean update = existsReferencesTo(refs.getTargetId()); + final String sql = (update) ? nodeReferenceUpdateSQL : nodeReferenceInsertSQL; try { - ByteArrayOutputStream out = - new ByteArrayOutputStream(INITIAL_BUFFER_SIZE); + final ByteArrayOutputStream out = new ByteArrayOutputStream(INITIAL_BUFFER_SIZE); // serialize references Serializer.serialize(refs, out); - Object[] params = createParams(refs.getTargetId(), out.toByteArray(), true); + final Object[] params = createParams(refs.getTargetId(), out.toByteArray(), true); conHelper.exec(sql, params); - - // there's no need to close a ByteArrayOutputStream - //out.close(); - } catch (Exception e) { - String msg = "failed to write " + refs; - log.error(msg, e); + } catch (final Exception e) { + final String msg = "failed to write " + refs; + logger.error(msg, e); throw new ItemStateException(msg, e); } } @@ -1055,7 +1038,7 @@ /** * {@inheritDoc} */ - public synchronized void destroy(NodeReferences refs) throws ItemStateException { + public synchronized void destroy(final NodeReferences refs) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } @@ -1062,12 +1045,12 @@ try { conHelper.exec(nodeReferenceDeleteSQL, getKey(refs.getTargetId())); - } catch (Exception e) { + } catch (final Exception e) { if (e instanceof NoSuchItemStateException) { throw (NoSuchItemStateException) e; } - String msg = "failed to delete " + refs; - log.error(msg, e); + final String msg = "failed to delete " + refs; + logger.error(msg, e); throw new ItemStateException(msg, e); } } @@ -1075,7 +1058,7 @@ /** * {@inheritDoc} */ - public synchronized boolean existsReferencesTo(NodeId targetId) throws ItemStateException { + public synchronized boolean existsReferencesTo(final NodeId targetId) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } @@ -1086,10 +1069,9 @@ // a reference exists if the result has at least one entry return rs.next(); - } catch (Exception e) { - String msg = "failed to check existence of node references: " - + targetId; - log.error(msg, e); + } catch (final Exception e) { + final String msg = "failed to check existence of node references: " + targetId; + logger.error(msg, e); throw new ItemStateException(msg, e); } finally { DbUtility.close(rs); @@ -1175,12 +1157,12 @@ private FileSystem fs; - public FSBlobStore(FileSystem fs) { + public FSBlobStore(final FileSystem fs) { super(fs); this.fs = fs; } - public String createId(PropertyId id, int index) { + public String createId(final PropertyId id, final int index) { return buildBlobFilePath(null, id, index).toString(); } @@ -1231,9 +1213,8 @@ /** * {@inheritDoc} */ - public InputStream get(String blobId) throws Exception { + public InputStream get(final String blobId) throws Exception { ResultSet rs = null; - boolean close = true; try { rs = conHelper.exec(blobSelectSQL, new Object[]{blobId}, false, 0); if (!rs.next()) { @@ -1240,31 +1221,29 @@ throw new Exception("no such BLOB: " + blobId); } - InputStream in = rs.getBinaryStream(1); + final InputStream in = rs.getBinaryStream(1); if (in == null) { // some databases treat zero-length values as NULL; // return empty InputStream in such a case - return new ByteArrayInputStream(new byte[0]); + return NULL_STREAM; } // return an InputStream wrapper in order to close the ResultSet when the stream is closed - close = false; - final ResultSet rs2 = rs; - return new FilterInputStream(in) { + final ResultSet wrappedResultSet = rs; + rs = null; + return new ProxyInputStream(in) { public void close() throws IOException { try { in.close(); } finally { // now it's safe to close ResultSet - DbUtility.close(rs2); + DbUtility.close(wrappedResultSet); } } }; } finally { - if (close) { - DbUtility.close(rs); - } + DbUtility.close(rs); } } @@ -1271,7 +1250,7 @@ /** * {@inheritDoc} */ - public synchronized void put(String blobId, InputStream in, long size) + public synchronized void put(final String blobId, final InputStream in, final long size) throws Exception { ResultSet rs = null; boolean exists; @@ -1282,8 +1261,8 @@ } finally { DbUtility.close(rs); } - String sql = (exists) ? blobUpdateSQL : blobInsertSQL; - Object[] params = new Object[]{new StreamWrapper(in, size), blobId}; + final String sql = (exists) ? blobUpdateSQL : blobInsertSQL; + final Object[] params = new Object[]{new StreamWrapper(in, size), blobId}; conHelper.exec(sql, params); } @@ -1290,7 +1269,7 @@ /** * {@inheritDoc} */ - public synchronized boolean remove(String blobId) throws Exception { + public synchronized boolean remove(final String blobId) throws Exception { return conHelper.update(blobDeleteSQL, new Object[]{blobId}) == 1; } Index: src/main/java/org/apache/jackrabbit/core/RepositoryImpl.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/RepositoryImpl.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/RepositoryImpl.java (working copy) @@ -124,7 +124,7 @@ * A RepositoryImpl ... */ public class RepositoryImpl extends AbstractRepository - implements javax.jcr.Repository, JackrabbitRepository, SessionListener, WorkspaceListener { + implements JackrabbitRepository, SessionListener, WorkspaceListener { private static Logger log = LoggerFactory.getLogger(RepositoryImpl.class); @@ -236,7 +236,7 @@ private final CacheManager cacheMgr = new CacheManager(); /** - * Chanel for posting create workspace messages. + * Channel for posting create workspace messages. */ private WorkspaceEventChannel createWorkspaceEventChannel; @@ -248,7 +248,7 @@ * instance running on the given configuration * or another error occurs. */ - protected RepositoryImpl(RepositoryConfig repConfig) throws RepositoryException { + protected RepositoryImpl(final RepositoryConfig repConfig) throws RepositoryException { // Acquire a lock on the repository home repLock = repConfig.getRepositoryLockMechanism(); repLock.init(repConfig.getHomeDir()); @@ -568,12 +568,12 @@ */ private NodeId loadRootNodeId() throws RepositoryException { try { - FileSystemResource uuidFile = new FileSystemResource( + final FileSystemResource uuidFile = new FileSystemResource( context.getFileSystem(), "/meta/rootUUID"); if (uuidFile.exists()) { // Load uuid of the repository's root node. It is stored in // text format (36 characters) for better readability. - InputStream in = uuidFile.getInputStream(); + final InputStream in = uuidFile.getInputStream(); try { return NodeId.valueOf(IOUtils.toString(in, "US-ASCII")); } finally { @@ -585,9 +585,10 @@ // hard-coded uuid makes it easier to copy/move entire // workspaces from one repository instance to another. uuidFile.makeParentDirs(); - OutputStream out = uuidFile.getOutputStream(); + final OutputStream out = uuidFile.getOutputStream(); try { out.write(ROOT_NODE_ID.toString().getBytes("US-ASCII")); + out.flush(); return ROOT_NODE_ID; } finally { IOUtils.closeQuietly(out); @@ -1318,22 +1319,24 @@ * @throws RepositoryException if the properties can not be loaded */ protected Properties getCustomRepositoryDescriptors() throws RepositoryException { - InputStream in = RepositoryImpl.class.getResourceAsStream(PROPERTIES_RESOURCE); - if (in != null) { - try { - Properties props = new Properties(); - props.load(in); - return props; - } catch (IOException e) { - String msg = "Failed to load customized repository properties: " + e.toString(); - log.error(msg); - throw new RepositoryException(msg, e); - } finally { - IOUtils.closeQuietly(in); - } - } else { + final InputStream in = RepositoryImpl.class.getResourceAsStream(PROPERTIES_RESOURCE); + + if (in == null) + { return null; } + + try { + final Properties props = new Properties(); + props.load(in); + return props; + } catch (final IOException e) { + String msg = "Failed to load customized repository properties: " + e.toString(); + log.error(msg); + throw new RepositoryException(msg, e); + } finally { + IOUtils.closeQuietly(in); + } } protected void setDescriptor(String desc, String value) { @@ -2129,9 +2132,9 @@ */ log.debug("initializing SearchManager..."); - long t0 = System.currentTimeMillis(); + final long t0 = System.currentTimeMillis(); // register SearchManager as event listener - SearchManager searchMgr = getSearchManager(); + final SearchManager searchMgr = getSearchManager(); if (searchMgr != null) { wsp.getObservationManager().addEventListener(searchMgr, Event.NODE_ADDED | Event.NODE_REMOVED @@ -2382,9 +2385,9 @@ } } // get names of workspaces - Set wspNames; + final Set wspNames = new HashSet(0); synchronized (wspInfos) { - wspNames = new HashSet(wspInfos.keySet()); + wspNames.addAll(wspInfos.keySet()); } // remove default workspace (will never be shutdown when idle) wspNames.remove(repConfig.getDefaultWorkspaceName()); Index: src/main/java/org/apache/jackrabbit/core/security/JackrabbitSecurityManager.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/security/JackrabbitSecurityManager.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/security/JackrabbitSecurityManager.java (working copy) @@ -16,6 +16,8 @@ */ package org.apache.jackrabbit.core.security; +import java.io.Closeable; + import org.apache.jackrabbit.api.security.principal.PrincipalManager; import org.apache.jackrabbit.api.security.user.UserManager; import org.apache.jackrabbit.core.security.authentication.AuthContext; @@ -29,7 +31,7 @@ /** * JackrabbitSecurityManager... */ -public interface JackrabbitSecurityManager { +public interface JackrabbitSecurityManager extends Closeable{ void init(Repository repository, Session systemSession) throws RepositoryException; Index: src/main/java/org/apache/jackrabbit/core/value/BLOBInTempFile.java =================================================================== --- src/main/java/org/apache/jackrabbit/core/value/BLOBInTempFile.java (revision 1630634) +++ src/main/java/org/apache/jackrabbit/core/value/BLOBInTempFile.java (working copy) @@ -52,25 +52,22 @@ * @param temp * @throws RepositoryException */ - private BLOBInTempFile(InputStream in, boolean temp) throws RepositoryException { - this.temp = temp; + private BLOBInTempFile(final InputStream in, final boolean temp) throws RepositoryException { + + final TransientFileFactory fileFactory = TransientFileFactory.getInstance(); OutputStream out = null; + try { - TransientFileFactory fileFactory = TransientFileFactory.getInstance(); - file = fileFactory.createTransientFile("bin", null, null); + this.file = fileFactory.createTransientFile("bin", null, null); out = new FileOutputStream(file); - length = IOUtils.copyLarge(in, out); - } catch (IOException e) { + + this.temp = temp; + this.length = IOUtils.copyLarge(in, out); + } catch (final IOException e) { throw new RepositoryException("Error creating temporary file", e); } finally { + IOUtils.closeQuietly(out); IOUtils.closeQuietly(in); - if (out != null) { - try { - out.close(); - } catch (IOException e) { - throw new RepositoryException("Error creating temporary file", e); - } - } } } @@ -80,7 +77,7 @@ * @param file * @param temp */ - private BLOBInTempFile(File file, boolean temp) { + private BLOBInTempFile(final File file, final boolean temp) { this.file = file; this.length = file.length(); this.temp = temp; @@ -92,12 +89,12 @@ * @param in the stream * @param temp */ - static BLOBFileValue getInstance(InputStream in, boolean temp) throws RepositoryException { + static BLOBFileValue getInstance(final InputStream in, final boolean temp) throws RepositoryException { + final BLOBInTempFile tempFile = new BLOBInTempFile(in, temp); if (temp) { - return new RefCountingBLOBFileValue(new BLOBInTempFile(in, temp)); - } else { - return new BLOBInTempFile(in, temp); + return new RefCountingBLOBFileValue(tempFile); } + return tempFile; } /** @@ -105,11 +102,12 @@ * * @param file the file */ - static BLOBInTempFile getInstance(File file, boolean temp) { + static BLOBInTempFile getInstance(final File file, final boolean temp) { return new BLOBInTempFile(file, temp); } - void delete(boolean pruneEmptyParentDirs) { + void delete(final boolean pruneEmptyParentDirs) + { file.delete(); length = -1; file = null; @@ -124,9 +122,8 @@ BLOBFileValue copy() throws RepositoryException { if (temp) { return BLOBInTempFile.getInstance(getStream(), temp); - } else { - return BLOBInTempFile.getInstance(file, temp); } + return BLOBInTempFile.getInstance(file, temp); } public long getSize() { @@ -136,7 +133,7 @@ public InputStream getStream() throws IllegalStateException, RepositoryException { try { return new LazyFileInputStream(file); - } catch (FileNotFoundException fnfe) { + } catch (final FileNotFoundException fnfe) { throw new RepositoryException("file backing binary value not found", fnfe); } } @@ -167,8 +164,8 @@ return 0; } - public int read(byte[] b, long position) throws IOException, RepositoryException { - RandomAccessFile raf = new RandomAccessFile(file, "r"); + public int read(final byte[] b, final long position) throws IOException, RepositoryException { + final RandomAccessFile raf = new RandomAccessFile(this.file, "r"); try { raf.seek(position); return raf.read(b);