diff --git oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckInvalidRepositoryTest.java oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckInvalidRepositoryTest.java new file mode 100644 index 0000000..ed2ed8e --- /dev/null +++ oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckInvalidRepositoryTest.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.segment.file.tooling; + +import java.io.File; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.jackrabbit.oak.segment.tool.Check; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; + +import com.google.common.collect.Lists; + +/** + * Tests for {@link CheckCommand} assuming an invalid repository. + */ +public class CheckInvalidRepositoryTest extends CheckRepositoryTestBase { + + @Before + public void setup() throws Exception { + super.setup(); + super.addInvalidRevision(); + } + + @Ignore + @Test + public void testInvalidRevision() { + StringWriter strOut = new StringWriter(); + StringWriter strErr = new StringWriter(); + + PrintWriter outWriter = new PrintWriter(strOut, true); + PrintWriter errWriter = new PrintWriter(strErr, true); + + Set filterPaths = new LinkedHashSet<>(); + filterPaths.add("/"); + + Check.builder() + .withPath(new File(temporaryFolder.getRoot().getAbsolutePath())) + .withJournal("journal.log") + .withDebugInterval(Long.MAX_VALUE) + .withCheckBinaries(false) + .withFilterPaths(filterPaths) + .withOutWriter(outWriter) + .withErrWriter(errWriter) + .build() + .run(); + + outWriter.close(); + errWriter.close(); + + assertExpectedOutput(strOut.toString(), Lists.newArrayList("Broken revision", + "Checked 7 nodes and 15 properties", "Found latest good revision", "Searched through 2 revisions")); + assertExpectedOutput(strErr.toString(), Lists.newArrayList("Error while traversing /z")); + } +} diff --git oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckRepositoryTestBase.java oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckRepositoryTestBase.java new file mode 100644 index 0000000..d6cbd80 --- /dev/null +++ oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckRepositoryTestBase.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.segment.file.tooling; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.util.List; +import java.util.Random; + +import org.apache.jackrabbit.oak.api.CommitFailedException; +import org.apache.jackrabbit.oak.segment.RecordType; +import org.apache.jackrabbit.oak.segment.SegmentNodeState; +import org.apache.jackrabbit.oak.segment.SegmentNodeStore; +import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; +import org.apache.jackrabbit.oak.segment.file.FileStore; +import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; +import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException; +import org.apache.jackrabbit.oak.spi.commit.CommitInfo; +import org.apache.jackrabbit.oak.spi.commit.EmptyHook; +import org.apache.jackrabbit.oak.spi.state.NodeBuilder; +import org.apache.jackrabbit.oak.spi.state.NodeState; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CheckRepositoryTestBase { + private static final int HEADER_SIZE = 512; + + private static final int MAX_SEGMENT_SIZE = 262144; + + private static final Logger log = LoggerFactory.getLogger(CheckRepositoryTestBase.class); + + @Rule + public final TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target")); + + @Before + public void setup() throws Exception { + addValidRevision(); + } + + protected void addValidRevision() throws InvalidFileStoreVersionException, IOException, CommitFailedException { + FileStore fileStore = FileStoreBuilder.fileStoreBuilder(temporaryFolder.getRoot()).withMaxFileSize(256) + .withSegmentCacheSize(64).build(); + + SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(fileStore).build(); + NodeBuilder builder = nodeStore.getRoot().builder(); + + addChildWithBlobProperties(nodeStore, builder, "a", 5); + addChildWithBlobProperties(nodeStore, builder, "b", 10); + addChildWithBlobProperties(nodeStore, builder, "c", 15); + + addChildWithProperties(nodeStore, builder, "d", 5); + addChildWithProperties(nodeStore, builder, "e", 5); + addChildWithProperties(nodeStore, builder, "f", 5); + + nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + fileStore.close(); + } + + protected void addInvalidRevision() throws InvalidFileStoreVersionException, IOException, CommitFailedException { + FileStore fileStore = FileStoreBuilder.fileStoreBuilder(temporaryFolder.getRoot()).withMaxFileSize(256) + .withSegmentCacheSize(64).build(); + + SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(fileStore).build(); + NodeBuilder builder = nodeStore.getRoot().builder(); + + addChildWithBlobProperties(nodeStore, builder, "z", 5); + + NodeState after = nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + + // get record number to corrupt (NODE record for "z") + SegmentNodeState child = (SegmentNodeState) after.getChildNode("z"); + int recordNumber = child.getRecordId().getRecordNumber(); + fileStore.close(); + + //since the filestore was closed after writing the first revision, we're always dealing with the 2nd tar file + RandomAccessFile file = new RandomAccessFile(new File(temporaryFolder.getRoot(),"data00001a.tar"), "rw"); + + // read segment header + ByteBuffer header = ByteBuffer.allocate(HEADER_SIZE); + file.readFully(header.array()); + + // read segment size from header + byte[] segmentSizeBytes = new byte[11]; + System.arraycopy(header.array(), 124, segmentSizeBytes, 0, 11); + int size = Integer.parseInt(new String(segmentSizeBytes, Charset.forName("UTF-8")), 8); + System.out.println("SegmentSize from header: " + size); + + // read actual segment + ByteBuffer segmentBytes = ByteBuffer.allocate(size); + file.readFully(segmentBytes.array()); + + int segmentRefs = segmentBytes.getInt(14); + System.out.println("Segment refs: " + segmentRefs); + + int recNo = segmentBytes.getInt(18); + System.out.println("Record numbers: " + recNo); + + // read the header for our record + int skip = 32 + segmentRefs * 16 + recordNumber * 9; + int number = segmentBytes.getInt(skip); + byte type = segmentBytes.get(skip + 4); + int offset = segmentBytes.getInt(skip + 4 + 1); + + Assert.assertEquals(recordNumber, number); + Assert.assertEquals(RecordType.NODE.ordinal(), type); + + // read the offset of previous record to derive length of our record + int prevSkip = 32 + segmentRefs * 16 + (recordNumber - 1) * 9; + int prevOffset = segmentBytes.getInt(prevSkip + 4 + 1); + + int length = prevOffset - offset; + + int realOffset = size - (MAX_SEGMENT_SIZE - offset); + + // write random bytes inside the NODE record to corrupt it + Random r = new Random(10); + byte[] bogusData = new byte[length]; + r.nextBytes(bogusData); + file.seek(HEADER_SIZE + realOffset); + file.write(bogusData); + + file.close(); + } + + protected static void assertExpectedOutput(String message, List assertMessages) { + log.info("Assert message: {}", assertMessages); + log.info("Message logged: {}", message); + + for (String msg : assertMessages) { + Assert.assertTrue(message.contains(msg)); + } + } + + protected static void addChildWithBlobProperties(SegmentNodeStore nodeStore, NodeBuilder builder, String childName, + int propCount) throws IOException { + NodeBuilder child = builder.child(childName); + for (int i = 0; i < propCount; i++) { + child.setProperty(childName + i, nodeStore.createBlob(randomStream(i, 2000))); + } + } + + protected static void addChildWithProperties(SegmentNodeStore nodeStore, NodeBuilder builder, String childName, + int propCount) throws IOException { + NodeBuilder child = builder.child(childName); + for (int i = 0; i < propCount; i++) { + child.setProperty(childName + i, childName + i); + } + } + + protected static InputStream randomStream(int seed, int size) { + Random r = new Random(seed); + byte[] data = new byte[size]; + r.nextBytes(data); + return new ByteArrayInputStream(data); + } +} diff --git oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckValidRepositoryTest.java oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckValidRepositoryTest.java index 3e04bb4..a751a2b 100644 --- oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckValidRepositoryTest.java +++ oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/file/tooling/CheckValidRepositoryTest.java @@ -18,66 +18,22 @@ */ package org.apache.jackrabbit.oak.segment.file.tooling; -import java.io.ByteArrayInputStream; import java.io.File; -import java.io.IOException; -import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.util.LinkedHashSet; -import java.util.List; -import java.util.Random; import java.util.Set; -import org.apache.jackrabbit.oak.segment.SegmentNodeStore; -import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; -import org.apache.jackrabbit.oak.segment.file.FileStore; -import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; import org.apache.jackrabbit.oak.segment.tool.Check; -import org.apache.jackrabbit.oak.spi.commit.CommitInfo; -import org.apache.jackrabbit.oak.spi.commit.EmptyHook; -import org.apache.jackrabbit.oak.spi.state.NodeBuilder; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; /** - * Tests for {@link CheckCommand} + * Tests for {@link CheckCommand} assuming a valid repository. */ -public class CheckValidRepositoryTest { - private static final Logger log = LoggerFactory.getLogger(CheckValidRepositoryTest.class); - - @Rule - public final TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target")); +public class CheckValidRepositoryTest extends CheckRepositoryTestBase { - @Before - public void setup() throws Exception { - FileStore fileStore = FileStoreBuilder.fileStoreBuilder(temporaryFolder.getRoot()) - .withMaxFileSize(256) - .withSegmentCacheSize(64) - .build(); - - SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(fileStore).build(); - NodeBuilder builder = nodeStore.getRoot().builder(); - - addChildWithBlobProperties(nodeStore, builder, "a", 5); - addChildWithBlobProperties(nodeStore, builder, "b", 10); - addChildWithBlobProperties(nodeStore, builder, "c", 15); - - addChildWithProperties(nodeStore, builder, "d", 5); - addChildWithProperties(nodeStore, builder, "e", 5); - addChildWithProperties(nodeStore, builder, "f", 5); - - nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); - fileStore.close(); - } - @Test public void testSuccessfulFullCheckWithBinaryTraversal() throws Exception { StringWriter strOut = new StringWriter(); @@ -266,37 +222,4 @@ public class CheckValidRepositoryTest { assertExpectedOutput(strOut.toString(), Lists.newArrayList("Broken revision", "Checked 2 nodes and 10 properties", "No good revision found")); assertExpectedOutput(strErr.toString(), Lists.newArrayList("Invalid path: /g")); } - - private static void assertExpectedOutput(String message, List assertMessages) { - log.info("Assert message: {}", assertMessages); - log.info("Message logged: {}", message); - - - for (String msg : assertMessages) { - Assert.assertTrue(message.contains(msg)); - } - } - - private static void addChildWithBlobProperties(SegmentNodeStore nodeStore, NodeBuilder builder, String childName, - int propCount) throws IOException { - NodeBuilder child = builder.child(childName); - for (int i = 0; i < propCount; i++) { - child.setProperty(childName + i, nodeStore.createBlob(randomStream(i, 2000))); - } - } - - private static void addChildWithProperties(SegmentNodeStore nodeStore, NodeBuilder builder, String childName, - int propCount) throws IOException { - NodeBuilder child = builder.child(childName); - for (int i = 0; i < propCount; i++) { - child.setProperty(childName + i, childName + i); - } - } - - private static InputStream randomStream(int seed, int size) { - Random r = new Random(seed); - byte[] data = new byte[size]; - r.nextBytes(data); - return new ByteArrayInputStream(data); - } }