diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java index 7bcbe05..c0fb6bf 100755 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java @@ -294,14 +294,21 @@ public class RDBDocumentStore implements DocumentStore { @Override public List createOrUpdate(Collection collection, List updateOps) { - List result = null; + Map results = new LinkedHashMap(); Map operationsToCover = new LinkedHashMap(); + Set duplicates = new HashSet(); for (UpdateOp updateOp : updateOps) { UpdateUtils.assertUnconditional(updateOp); - UpdateOp clone = updateOp.copy(); - addUpdateCounters(clone); - operationsToCover.put(clone.getId(), clone); + if (operationsToCover.containsKey(updateOp.getId())) { + duplicates.add(updateOp); + results.put(updateOp, null); + } else { + UpdateOp clone = updateOp.copy(); + addUpdateCounters(clone); + operationsToCover.put(clone.getId(), clone); + results.put(clone, null); + } } Map oldDocs = new HashMap(); @@ -327,28 +334,23 @@ public class RDBDocumentStore implements DocumentStore { } for (List partition : partition(newArrayList(operationsToCover.values()), CHUNKSIZE)) { - Set successfulUpdates = bulkUpdate(collection, partition, oldDocs, upsert); - operationsToCover.keySet().removeAll(successfulUpdates); + Map successfulUpdates = bulkUpdate(collection, partition, oldDocs, upsert); + results.putAll(successfulUpdates); + operationsToCover.values().removeAll(successfulUpdates.keySet()); } } // if there are some changes left, we'll apply them one after another for (UpdateOp updateOp : updateOps) { - if (operationsToCover.remove(updateOp.getId()) != null) { - // work on the original update operation - T oldDoc = createOrUpdate(collection, updateOp.copy()); - if (oldDoc != null) { - oldDocs.put(oldDoc.getId(), oldDoc); - } + UpdateOp conflictedOp = operationsToCover.remove(updateOp.getId()); + if (conflictedOp != null) { + results.put(conflictedOp, createOrUpdate(collection, updateOp)); + } else if (duplicates.contains(updateOp)) { + results.put(updateOp, createOrUpdate(collection, updateOp)); } } - result = new ArrayList(updateOps.size()); - for (UpdateOp op : updateOps) { - result.add(oldDocs.get(op.getId())); - } - - return result; + return new ArrayList(results.values()); } private Map readDocumentCached(Collection collection, Set keys) { @@ -401,7 +403,7 @@ public class RDBDocumentStore implements DocumentStore { return result; } - private Set bulkUpdate(Collection collection, List updates, Map oldDocs, boolean upsert) { + private Map bulkUpdate(Collection collection, List updates, Map oldDocs, boolean upsert) { Set missingDocs = new HashSet(); for (UpdateOp op : updates) { if (!oldDocs.containsKey(op.getId())) { @@ -451,7 +453,13 @@ public class RDBDocumentStore implements DocumentStore { } } - return successfulUpdates; + Map result = new HashMap(); + for (UpdateOp op : updates) { + if (successfulUpdates.contains(op.getId())) { + result.put(op, oldDocs.get(op.getId())); + } + } + return result; } catch (SQLException ex) { this.ch.rollbackConnection(connection); throw new DocumentStoreException(ex); diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BulkCreateOrUpdateTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BulkCreateOrUpdateTest.java index c630fb7..8039c3d 100644 --- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BulkCreateOrUpdateTest.java +++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BulkCreateOrUpdateTest.java @@ -153,4 +153,41 @@ public class BulkCreateOrUpdateTest extends AbstractDocumentStoreTest { assertEquals("The document hasn't been updated", 200l, newDoc.get("prop")); } } + + /** + * This method adds a few updateOperations modifying the same document. + */ + @Test + public void testUpdateSameDocument() { + final int amount = 5; + List updates = new ArrayList(amount); + String id = this.getClass().getName() + ".testUpdateSameDocument"; + removeMe.add(id); + + for (int i = 0; i < amount; i++) { + UpdateOp up = new UpdateOp(id, true); + up.set("_id", id); + up.set("update_id", i); + up.set("prop_" + i, 100); + updates.add(up); + } + + List docs = ds.createOrUpdate(Collection.NODES, updates); + assertEquals(amount, docs.size()); + assertNull("The old value should be null for the first update", docs.get(0)); + for (int i = 1; i < amount; i++) { + if (docs.get(i).getModCount() != null) { + assertEquals("The mod count is not correct", Long.valueOf(i), docs.get(i).getModCount()); + } + assertEquals("The old value is not correct", Long.valueOf(i-1), docs.get(i).get("update_id")); + } + + NodeDocument newDoc = ds.find(Collection.NODES, id); + if (newDoc.getModCount() != null) { + assertEquals("The final mod count is not correct", Long.valueOf(5), newDoc.getModCount()); + } + for (int i = 0; i < amount; i++) { + assertEquals("The value is not correct", 100l, newDoc.get("prop_" + i)); + } + } }