diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java index 6d46fa8..0abe7a2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java @@ -22,7 +22,6 @@ import java.io.OutputStream; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.io.util.StreamUtils; -import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.yetus.audience.InterfaceAudience; /** @@ -41,27 +40,51 @@ import org.apache.yetus.audience.InterfaceAudience; public class ByteBufferWriterOutputStream extends OutputStream implements ByteBufferWriter { - private static final int TEMP_BUF_LENGTH = 4 * 1024; + private static final int DEFAULT_BUFFER_SIZE = 4096; + private final OutputStream os; - private byte[] tempBuf = null; + private final int bufSize; + private byte[] buf; public ByteBufferWriterOutputStream(OutputStream os) { + this(os, DEFAULT_BUFFER_SIZE); + } + + public ByteBufferWriterOutputStream(OutputStream os, int size) { this.os = os; + this.bufSize = size; + this.buf = null; } + /** + * Writes len bytes from the specified ByteBuffer starting at offset off to + * this OutputStream. If b is null, a NullPointerException is thrown. If off + * is negative or larger than the ByteBuffer then an IllegalArgumentException + * is thrown. If len is greater than the length of the ByteBuffer, then an + * BufferUnderflowException is thrown. This method does not change the + * position of the ByteBuffer. + * + * @param b the ByteBuffer + * @param off the start offset in the data + * @param len the number of bytes to write + * @throws IOException + * if an I/O error occurs. In particular, an IOException is thrown + * if the output stream is closed. + */ @Override public void write(ByteBuffer b, int off, int len) throws IOException { - byte[] buf = null; - if (len > TEMP_BUF_LENGTH) { - buf = new byte[len]; - } else { - if (this.tempBuf == null) { - this.tempBuf = new byte[TEMP_BUF_LENGTH]; - } - buf = this.tempBuf; + ByteBuffer c = (ByteBuffer) b.duplicate().position(off); + // Lazily load in the event that this version of 'write' is not invoked + if (this.buf == null) { + this.buf = new byte[this.bufSize]; + } + int totalCopied = 0; + while (totalCopied < len) { + int bytesToCopy = Math.min((len - totalCopied), this.bufSize); + c.get(buf, 0, bytesToCopy); + os.write(buf, 0, bytesToCopy); + totalCopied += bytesToCopy; } - ByteBufferUtils.copyFromBufferToArray(buf, b, off, 0, len); - this.os.write(buf, 0, len); } @Override @@ -75,7 +98,7 @@ public class ByteBufferWriterOutputStream extends OutputStream } @Override - public void write(byte b[], int off, int len) throws IOException { + public void write(byte[] b, int off, int len) throws IOException { this.os.write(b, off, len); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferWriterOutputStream.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferWriterOutputStream.java new file mode 100644 index 0000000..62fcb1a --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferWriterOutputStream.java @@ -0,0 +1,109 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io; + +import java.io.IOException; +import java.nio.BufferUnderflowException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Random; + +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.IOTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; + +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({ IOTests.class, SmallTests.class }) +public class TestByteBufferWriterOutputStream { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestByteBufferWriterOutputStream.class); + + private static final Random RANDOM = new Random(31L); + + // Default buffer size is 8Kb = pick number that does not fall on a boundary + private static final int TEST_BUF_SIZE = 16512; + + private static byte[] testBuf; + + @BeforeClass + public static void setup() { + testBuf = new byte[TEST_BUF_SIZE]; + RANDOM.nextBytes(testBuf); + } + + @Test + public void testWriteByteBuffer() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ByteBufferWriterOutputStream os = new ByteBufferWriterOutputStream(baos); + + ByteBuffer bb = ByteBuffer.wrap(testBuf); + os.write(bb, 0, bb.remaining()); + os.close(); + + Assert.assertArrayEquals(testBuf, baos.toByteArray()); + Assert.assertEquals(0, bb.position()); + } + + @Test + public void testWriteByteBufferSlice() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ByteBufferWriterOutputStream os = new ByteBufferWriterOutputStream(baos); + + byte[] expected = Arrays.copyOfRange(testBuf, 1, 6); + + ByteBuffer bb = ByteBuffer.wrap(testBuf); + os.write(bb, 1, 5); + os.close(); + + Assert.assertArrayEquals(expected, baos.toByteArray()); + Assert.assertEquals(0, bb.position()); + } + + @Test(expected = NullPointerException.class) + @SuppressWarnings("resource") + public void testWriteByteBufferNull() throws IOException { + new ByteBufferWriterOutputStream(null).write((ByteBuffer)null, 0, 0); + } + + @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("resource") + public void testWriteByteBufferArrayIndexOutOfBoundsNegative() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ByteBufferWriterOutputStream os = new ByteBufferWriterOutputStream(baos); + + ByteBuffer bb = ByteBuffer.wrap(testBuf); + os.write(bb, -1, bb.remaining()); + } + + @Test(expected = BufferUnderflowException.class) + @SuppressWarnings("resource") + public void testWriteByteBufferArrayIndexOutOfBoundsBeyond() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ByteBufferWriterOutputStream os = new ByteBufferWriterOutputStream(baos); + + ByteBuffer bb = ByteBuffer.wrap(testBuf); + os.write(bb, 0, bb.remaining() + 1); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java index 6dce132..5a03e6c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java @@ -175,7 +175,9 @@ public class Reference { in.mark(pblen); byte [] pbuf = new byte[pblen]; int read = in.read(pbuf); - if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen); + if (read != pblen) { + throw new IOException("read=" + read + ", wanted=" + pblen); + } // WATCHOUT! Return in middle of function!!! if (ProtobufUtil.isPBMagicPrefix(pbuf)) return convert(FSProtos.Reference.parseFrom(in)); // Else presume Writables. Need to reset the stream since it didn't start w/ pb.