Index: hbase-chang/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestHBASE17375.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- hbase-chang/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestHBASE17375.java (date 1482824974000) +++ hbase-chang/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestHBASE17375.java (date 1482824974000) @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hbase.io.encoding; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec; +import org.apache.hadoop.hbase.io.ByteArrayOutputStream; +import org.apache.hadoop.hbase.io.compress.Compression; +import org.apache.hadoop.hbase.io.hfile.HFileContext; +import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; +import org.apache.hadoop.hbase.nio.SingleByteBuff; +import org.apache.hadoop.hbase.testclassification.IOTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Assert; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.DataOutputStream; +import java.nio.ByteBuffer; +import java.util.concurrent.ConcurrentSkipListSet; + +@Category({IOTests.class, SmallTests.class}) +public class TestHBASE17375 { + private static final Log LOG = LogFactory.getLog(TestHBASE17375.class); + private static final byte[] CF_BYTES = "x".getBytes(); + private static final String rows[] = {"A","Aaeeee","Abc","Abde"}; + private static final String qualifiers[] = {"a","v"}; + private static final byte[] seekQualifier = "v".getBytes(); + + private static ConcurrentSkipListSet generateRandomTestData(){ + ConcurrentSkipListSet kvset = new ConcurrentSkipListSet(CellComparator.COMPARATOR); + + //row + byte[][] rowBytes = new byte[rows.length][]; + for( int i = 0; i < rows.length; i++){ + rowBytes[i] = rows[i].getBytes(); + } + + //qualifier + byte[][] qualifierBytes = new byte[qualifiers.length][]; + for( int i = 0; i < qualifiers.length; i++){ + qualifierBytes[i] = qualifiers[i].getBytes(); + } + + // value + byte[] value = new byte[]{1}; + // create kv + for (int i = 0; i < rowBytes.length;i++){ + for (int j = 0; j < qualifierBytes.length;j++){ + KeyValue kv = new KeyValue(rowBytes[i],CF_BYTES,qualifierBytes[j],System.currentTimeMillis(),value); + kvset.add(kv); + } + } + return kvset; + } + private static + void encodeData(ConcurrentSkipListSet kvset, PrefixTreeCodec encoder, + HFileBlockEncodingContext blkEncodingCtx, DataOutputStream userDataStream) throws Exception { + encoder.startBlockEncoding(blkEncodingCtx, userDataStream); + for (Cell kv : kvset) { + encoder.encode(kv, blkEncodingCtx, userDataStream); + } + encoder.endBlockEncoding(blkEncodingCtx, userDataStream, null); + } + + private static Cell createSeekCell(String rowStr) { + byte[] row = rowStr.getBytes(); + return KeyValueUtil.createFirstOnRow(row); + } + + private static Cell getKeyForNextColumn(Cell kv) { + return KeyValueUtil.createLastOnRow( + kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), + kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength(), + kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()); + } + + private static DataBlockEncoder.EncodedSeeker createEncodedSeekerForTest() throws Exception { + PrefixTreeCodec encoder = new PrefixTreeCodec(); + ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream(); + DataOutputStream userDataStream = new DataOutputStream(baosInMemory); + + HFileContext meta = new HFileContextBuilder() + .withHBaseCheckSum(false) + .withIncludesMvcc(false) + .withIncludesTags(false) + .withCompression(Compression.Algorithm.NONE) + .build(); + HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext( + DataBlockEncoding.PREFIX_TREE, new byte[0], meta); + + encodeData(generateRandomTestData(), encoder, blkEncodingCtx, userDataStream); + + DataBlockEncoder.EncodedSeeker encodeSeeker = encoder.createSeeker(CellComparator.COMPARATOR, + encoder.newDataBlockDecodingContext(meta)); + byte[] onDiskBytes = baosInMemory.toByteArray(); + ByteBuffer encodedData = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, + onDiskBytes.length - DataBlockEncoding.ID_SIZE); + encodeSeeker.setCurrentBuffer(new SingleByteBuff(encodedData)); + return encodeSeeker; + } + + + /** + * Test seeking + */ + @Test + public void testSeek() throws Exception { + DataBlockEncoder.EncodedSeeker encodeSeeker = createEncodedSeekerForTest(); + Cell seekCell = createSeekCell("Aa"); + + encodeSeeker.seekToKeyInBlock(seekCell,false); + do { + Cell cell = encodeSeeker.getCell(); + int cmp = CellComparator.COMPARATOR.compareRows(cell,seekCell); + if (cmp < 0){ + continue; + } + LOG.info(encodeSeeker.getKey()); + if( Bytes.compareTo(cell.getQualifierArray(), seekQualifier) == 0){ + break; + } + }while (encodeSeeker.next()); + + Cell x = getKeyForNextColumn(encodeSeeker.getCell()); + encodeSeeker.seekToKeyInBlock(x,false); + + LOG.info(encodeSeeker.getCell()); + + Assert.assertTrue(CellComparator.COMPARATOR.compareRows(encodeSeeker.getCell(),x) >= 0); + } +} Index: hbase-chang/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- hbase-chang/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java (date 1482818271000) +++ hbase-chang/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java (date 1482824974000) @@ -114,7 +114,7 @@ return false; } } - if (currentRowNode.hasOccurrences()) {// escape clause + if (currentRowNode.hasOccurrences() && !currentRowNode.isNub()) {// escape clause currentRowNode.resetFanIndex(); return true;// found some values }