.../hadoop/hbase/io/TagCompressionContext.java | 32 +------- .../apache/hadoop/hbase/io/util/Dictionary.java | 52 +++++++++++++ .../hbase/regionserver/wal/SecureWALCellCodec.java | 38 +++++---- .../hbase/regionserver/wal/WALCellCodec.java | 90 ++++++++++++++-------- .../wal/TestWALCellCodecWithCompression.java | 40 ++++++++-- .../hadoop/hbase/wal/TestWALReaderOnSecureWAL.java | 31 ++++++-- 6 files changed, 194 insertions(+), 89 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java index 278dfc4..fea2f0c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java @@ -70,7 +70,7 @@ public class TagCompressionContext { while (pos < endOffset) { int tagLen = Bytes.readAsInt(in, pos, Tag.TAG_LENGTH_SIZE); pos += Tag.TAG_LENGTH_SIZE; - write(in, pos, tagLen, out); + Dictionary.write(out, in, pos, tagLen, tagDict); pos += tagLen; } } @@ -94,7 +94,7 @@ public class TagCompressionContext { while (pos < endOffset) { int tagLen = ByteBufferUtils.readAsInt(in, pos, Tag.TAG_LENGTH_SIZE); pos += Tag.TAG_LENGTH_SIZE; - write(in, pos, tagLen, out); + Dictionary.write(out, in, pos, tagLen, tagDict);; pos += tagLen; } } @@ -185,32 +185,4 @@ public class TagCompressionContext { dest.put(tagBuf); } } - - private void write(byte[] data, int offset, int length, OutputStream out) throws IOException { - short dictIdx = Dictionary.NOT_IN_DICTIONARY; - if (tagDict != null) { - dictIdx = tagDict.findEntry(data, offset, length); - } - if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { - out.write(Dictionary.NOT_IN_DICTIONARY); - StreamUtils.writeRawVInt32(out, length); - out.write(data, offset, length); - } else { - StreamUtils.writeShort(out, dictIdx); - } - } - - private void write(ByteBuffer data, int offset, int length, OutputStream out) throws IOException { - short dictIdx = Dictionary.NOT_IN_DICTIONARY; - if (tagDict != null) { - dictIdx = tagDict.findEntry(data, offset, length); - } - if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { - out.write(Dictionary.NOT_IN_DICTIONARY); - StreamUtils.writeRawVInt32(out, length); - ByteBufferUtils.copyBufferToStream(out, data, offset, length); - } else { - StreamUtils.writeShort(out, dictIdx); - } - } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java index 54677da..ca1af40 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/Dictionary.java @@ -18,9 +18,12 @@ package org.apache.hadoop.hbase.io.util; +import java.io.IOException; +import java.io.OutputStream; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.ByteBufferWriter; /** * Dictionary interface @@ -80,4 +83,53 @@ public interface Dictionary { * Flushes the dictionary, empties all values. */ void clear(); + + /** + * Helper methods to write the dictionary data to the OutputStream + * @param out the outputstream to which data needs to be written + * @param data the data to be written in byte[] + * @param offset the offset + * @param length length to be written + * @param dict the dictionary whose contents are to written + * @throws IOException + */ + public static void write(OutputStream out, byte[] data, int offset, int length, Dictionary dict) + throws IOException { + short dictIdx = Dictionary.NOT_IN_DICTIONARY; + if (dict != null) { + dictIdx = dict.findEntry(data, offset, length); + } + if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { + out.write(Dictionary.NOT_IN_DICTIONARY); + StreamUtils.writeRawVInt32(out, length); + out.write(data, offset, length); + } else { + StreamUtils.writeShort(out, dictIdx); + } + } + + /** + * Helper methods to write the dictionary data to the OutputStream + * @param out the outputstream to which data needs to be written + * @param data the data to be written in ByteBuffer + * @param offset the offset + * @param length length to be written + * @param dict the dictionary whose contents are to written + * @throws IOException + */ + public static void write(OutputStream out, ByteBuffer data, int offset, int length, + Dictionary dict) throws IOException { + short dictIdx = Dictionary.NOT_IN_DICTIONARY; + if (dict != null) { + dictIdx = dict.findEntry(data, offset, length); + } + if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { + assert out instanceof ByteBufferWriter; + out.write(Dictionary.NOT_IN_DICTIONARY); + StreamUtils.writeRawVInt32(out, length); + ((ByteBufferWriter) out).write(data, offset, length); + } else { + StreamUtils.writeShort(out, dictIdx); + } + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java index 603496f..35efb16 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -27,10 +28,14 @@ import java.security.SecureRandom; import org.apache.commons.io.IOUtils; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ByteBufferedCell; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; +import org.apache.hadoop.hbase.io.ByteBufferWriterDataOutputStream; +import org.apache.hadoop.hbase.io.ByteBufferWriterOutputStream; import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.Encryptor; @@ -195,29 +200,32 @@ public class SecureWALCellCodec extends WALCellCodec { ByteArrayOutputStream baos = new ByteArrayOutputStream(); OutputStream cout = encryptor.createEncryptionStream(baos); - + ByteBufferWriterDataOutputStream dos = new ByteBufferWriterDataOutputStream(cout); int tlen = cell.getTagsLength(); // Write the KeyValue infrastructure as VInts. - StreamUtils.writeRawVInt32(cout, KeyValueUtil.keyLength(cell)); - StreamUtils.writeRawVInt32(cout, cell.getValueLength()); + StreamUtils.writeRawVInt32(dos, KeyValueUtil.keyLength(cell)); + StreamUtils.writeRawVInt32(dos, cell.getValueLength()); // To support tags - StreamUtils.writeRawVInt32(cout, tlen); + StreamUtils.writeRawVInt32(dos, tlen); // Write row, qualifier, and family - StreamUtils.writeRawVInt32(cout, cell.getRowLength()); - cout.write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); - StreamUtils.writeRawVInt32(cout, cell.getFamilyLength()); - cout.write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); - StreamUtils.writeRawVInt32(cout, cell.getQualifierLength()); - cout.write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); + short rowLength = cell.getRowLength(); + StreamUtils.writeRawVInt32(dos, rowLength); + CellUtil.writeRow(dos, cell, rowLength); + byte familyLength = cell.getFamilyLength(); + StreamUtils.writeRawVInt32(dos, familyLength); + CellUtil.writeFamily(dos, cell, familyLength); + int qualifierLength = cell.getQualifierLength(); + StreamUtils.writeRawVInt32(dos, qualifierLength); + CellUtil.writeQualifier(dos, cell, qualifierLength); // Write the rest ie. ts, type, value and tags parts - StreamUtils.writeLong(cout, cell.getTimestamp()); - cout.write(cell.getTypeByte()); - cout.write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); + StreamUtils.writeLong(dos, cell.getTimestamp()); + dos.write(cell.getTypeByte()); + CellUtil.writeValue(dos, cell, cell.getValueLength()); if (tlen > 0) { - cout.write(cell.getTagsArray(), cell.getTagsOffset(), tlen); + CellUtil.writeTags(dos, cell, tlen); } - cout.close(); + dos.close(); StreamUtils.writeRawVInt32(out, baos.size()); baos.writeTo(out); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 1a18087..fcb207c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -21,9 +21,12 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ByteBufferedCell; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -35,6 +38,7 @@ import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.io.ByteBufferWriter; import org.apache.hadoop.hbase.io.ByteBufferWriterOutputStream; +import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.util.Dictionary; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.nio.ByteBuff; @@ -201,49 +205,69 @@ public class WALCellCodec implements Codec { @Override public void write(Cell cell) throws IOException { - // We first write the KeyValue infrastructure as VInts. - StreamUtils.writeRawVInt32(out, KeyValueUtil.keyLength(cell)); - StreamUtils.writeRawVInt32(out, cell.getValueLength()); - // To support tags - int tagsLength = cell.getTagsLength(); - StreamUtils.writeRawVInt32(out, tagsLength); - - // Write row, qualifier, and family; use dictionary - // compression as they're likely to have duplicates. - write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), compression.rowDict); - write(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), - compression.familyDict); - write(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), - compression.qualifierDict); + compressCellForWal(out, cell, compression.rowDict, compression.familyDict, + compression.qualifierDict, compression.tagCompressionContext); + } + } + + private static void compressCellForWal(OutputStream out, Cell cell, Dictionary rowDict, + Dictionary famDict, Dictionary colDict, TagCompressionContext tagCompressionContext) + throws IOException { + // We first write the KeyValue infrastructure as VInts. + StreamUtils.writeRawVInt32(out, KeyValueUtil.keyLength(cell)); + StreamUtils.writeRawVInt32(out, cell.getValueLength()); + // To support tags + int tagsLength = cell.getTagsLength(); + StreamUtils.writeRawVInt32(out, tagsLength); + + // Write row, qualifier, and family; use dictionary + // compression as they're likely to have duplicates. + if (cell instanceof ByteBufferedCell) { + assert out instanceof ByteBufferWriter; + Dictionary.write(out, ((ByteBufferedCell) cell).getRowByteBuffer(), + ((ByteBufferedCell) cell).getRowPosition(), cell.getRowLength(), rowDict); + Dictionary.write(out, ((ByteBufferedCell) cell).getFamilyByteBuffer(), + ((ByteBufferedCell) cell).getFamilyPosition(), cell.getFamilyLength(), famDict); + Dictionary.write(out, ((ByteBufferedCell) cell).getQualifierByteBuffer(), + ((ByteBufferedCell) cell).getQualifierPosition(), cell.getQualifierLength(), colDict); // Write timestamp, type and value as uncompressed. StreamUtils.writeLong(out, cell.getTimestamp()); out.write(cell.getTypeByte()); - out.write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); + ((ByteBufferWriter) out).write(((ByteBufferedCell) cell).getValueByteBuffer(), + ((ByteBufferedCell) cell).getValuePosition(), cell.getValueLength()); if (tagsLength > 0) { - if (compression.tagCompressionContext != null) { + if (tagCompressionContext != null) { // Write tags using Dictionary compression - compression.tagCompressionContext.compressTags(out, cell.getTagsArray(), - cell.getTagsOffset(), tagsLength); + tagCompressionContext.compressTags(out, ((ByteBufferedCell) cell).getTagsByteBuffer(), + ((ByteBufferedCell) cell).getTagsPosition(), tagsLength); } else { // Tag compression is disabled within the WAL compression. Just write the tags bytes as // it is. - out.write(cell.getTagsArray(), cell.getTagsOffset(), tagsLength); + ((ByteBufferWriter) out).write(((ByteBufferedCell) cell).getTagsByteBuffer(), + cell.getTagsOffset(), tagsLength); } } - } + } else { + Dictionary.write(out, cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), rowDict); + Dictionary.write(out, cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), famDict); + Dictionary.write(out, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), + colDict); - private void write(byte[] data, int offset, int length, Dictionary dict) throws IOException { - short dictIdx = Dictionary.NOT_IN_DICTIONARY; - if (dict != null) { - dictIdx = dict.findEntry(data, offset, length); - } - if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { - out.write(Dictionary.NOT_IN_DICTIONARY); - StreamUtils.writeRawVInt32(out, length); - out.write(data, offset, length); - } else { - StreamUtils.writeShort(out, dictIdx); + // Write timestamp, type and value as uncompressed. + StreamUtils.writeLong(out, cell.getTimestamp()); + out.write(cell.getTypeByte()); + out.write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); + if (tagsLength > 0) { + if (tagCompressionContext != null) { + // Write tags using Dictionary compression + tagCompressionContext.compressTags(out, cell.getTagsArray(), cell.getTagsOffset(), + tagsLength); + } else { + // Tag compression is disabled within the WAL compression. Just write the tags bytes as + // it is. + out.write(cell.getTagsArray(), cell.getTagsOffset(), tagsLength); + } } } } @@ -364,9 +388,9 @@ public class WALCellCodec implements Codec { @Override public Encoder getEncoder(OutputStream os) { + os = (os instanceof ByteBufferWriter) ? os + : new ByteBufferWriterOutputStream(os); if (compression == null) { - os = (os instanceof ByteBufferWriter) ? os - : new ByteBufferWriterOutputStream(os); return new EnsureKvEncoder(os); } return new CompressedKvEncoder(os, compression); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java index e834ac8..ba5bfa3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java @@ -23,12 +23,14 @@ import static org.junit.Assert.assertEquals; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.OffheapKeyValue; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.ArrayBackedTag; @@ -46,24 +48,35 @@ public class TestWALCellCodecWithCompression { @Test public void testEncodeDecodeKVsWithTags() throws Exception { - doTest(false); + doTest(false, false); } @Test public void testEncodeDecodeKVsWithTagsWithTagsCompression() throws Exception { - doTest(true); + doTest(true, false); } - private void doTest(boolean compressTags) throws Exception { + @Test + public void testEncodeDecodeOffKVsWithTagsWithTagsCompression() throws Exception { + doTest(true, true); + } + + private void doTest(boolean compressTags, boolean offheapKV) throws Exception { Configuration conf = new Configuration(false); conf.setBoolean(CompressionContext.ENABLE_WAL_TAGS_COMPRESSION, compressTags); WALCellCodec codec = new WALCellCodec(conf, new CompressionContext(LRUDictionary.class, false, compressTags)); ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); Encoder encoder = codec.getEncoder(bos); - encoder.write(createKV(1)); - encoder.write(createKV(0)); - encoder.write(createKV(2)); + if (offheapKV) { + encoder.write(createOffheapKV(1)); + encoder.write(createOffheapKV(0)); + encoder.write(createOffheapKV(2)); + } else { + encoder.write(createKV(1)); + encoder.write(createKV(0)); + encoder.write(createKV(2)); + } InputStream is = new ByteArrayInputStream(bos.toByteArray()); Decoder decoder = codec.getDecoder(is); @@ -95,4 +108,19 @@ public class TestWALCellCodecWithCompression { } return new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags); } + + private OffheapKeyValue createOffheapKV(int noOfTags) { + byte[] row = Bytes.toBytes("myRow"); + byte[] cf = Bytes.toBytes("myCF"); + byte[] q = Bytes.toBytes("myQualifier"); + byte[] value = Bytes.toBytes("myValue"); + List tags = new ArrayList(noOfTags); + for (int i = 1; i <= noOfTags; i++) { + tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i))); + } + KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags); + ByteBuffer dbb = ByteBuffer.allocateDirect(kv.getBuffer().length); + dbb.put(kv.getBuffer()); + return new OffheapKeyValue(dbb, 0, kv.getBuffer().length); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java index 3e060ab..0562fd9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java @@ -20,7 +20,9 @@ package org.apache.hadoop.hbase.wal; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.NavigableMap; import java.util.TreeMap; @@ -38,6 +40,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.OffheapKeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; @@ -90,7 +93,8 @@ public class TestWALReaderOnSecureWAL { FSUtils.setRootDir(conf, TEST_UTIL.getDataTestDir()); } - private Path writeWAL(final WALFactory wals, final String tblName) throws IOException { + @SuppressWarnings("deprecation") + private Path writeWAL(final WALFactory wals, final String tblName, boolean offheap) throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); String clsName = conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName()); conf.setClass(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, SecureWALCellCodec.class, @@ -116,7 +120,15 @@ public class TestWALReaderOnSecureWAL { wals.getWAL(regioninfo.getEncodedNameAsBytes(), regioninfo.getTable().getNamespace()); for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); - kvs.add(new KeyValue(row, family, Bytes.toBytes(i), value)); + KeyValue kv = new KeyValue(row, family, Bytes.toBytes(i), value); + if (offheap) { + ByteBuffer bb = ByteBuffer.allocateDirect(kv.getBuffer().length); + bb.put(kv.getBuffer()); + OffheapKeyValue offheapKV = new OffheapKeyValue(bb, 0, kv.getLength()); + kvs.add(offheapKV); + } else { + kvs.add(kv); + } wal.append(regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName, System.currentTimeMillis(), mvcc, scopes), kvs, true); } @@ -132,7 +144,16 @@ public class TestWALReaderOnSecureWAL { } @Test() - public void testWALReaderOnSecureWAL() throws Exception { + public void testWALReaderOnSecureWALWithKeyValues() throws Exception { + testSecureWALInternal(false); + } + + @Test() + public void testWALReaderOnSecureWALWithOffheapKeyValues() throws Exception { + testSecureWALInternal(true); + } + + private void testSecureWALInternal(boolean offheap) throws IOException, FileNotFoundException { Configuration conf = TEST_UTIL.getConfiguration(); conf.setClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class, WAL.Reader.class); @@ -143,7 +164,7 @@ public class TestWALReaderOnSecureWAL { conf.setBoolean(WAL_ENCRYPTION, true); FileSystem fs = TEST_UTIL.getTestFileSystem(); final WALFactory wals = new WALFactory(conf, null, currentTest.getMethodName()); - Path walPath = writeWAL(wals, currentTest.getMethodName()); + Path walPath = writeWAL(wals, currentTest.getMethodName(), offheap); // Insure edits are not plaintext long length = fs.getFileStatus(walPath).getLen(); @@ -188,7 +209,7 @@ public class TestWALReaderOnSecureWAL { conf.setBoolean(WAL_ENCRYPTION, false); FileSystem fs = TEST_UTIL.getTestFileSystem(); final WALFactory wals = new WALFactory(conf, null, currentTest.getMethodName()); - Path walPath = writeWAL(wals, currentTest.getMethodName()); + Path walPath = writeWAL(wals, currentTest.getMethodName(), false); // Ensure edits are plaintext long length = fs.getFileStatus(walPath).getLen();