diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 14b2ada..e245309 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -1347,6 +1347,9 @@ public class ThriftServerRunner implements Runnable { scan.setFilter( parseFilter.parseFilterString(tScan.getFilterString())); } + if (tScan.isSetReversed()) { + scan.setReversed(tScan.isReversed()); + } return addScanner(table.getScanner(scan), tScan.sortColumns); } catch (IOException e) { LOG.warn(e.getMessage(), e); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java index 2eefad4..bd5bd52 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java @@ -10061,7 +10061,7 @@ public class Hbase { struct.success = new HashMap(2*_map42.size); for (int _i43 = 0; _i43 < _map42.size; ++_i43) { - ByteBuffer _key44; // required + ByteBuffer _key44; // optional ColumnDescriptor _val45; // required _key44 = iprot.readBinary(); _val45 = new ColumnDescriptor(); @@ -10167,7 +10167,7 @@ public class Hbase { struct.success = new HashMap(2*_map48.size); for (int _i49 = 0; _i49 < _map48.size; ++_i49) { - ByteBuffer _key50; // required + ByteBuffer _key50; // optional ColumnDescriptor _val51; // required _key50 = iprot.readBinary(); _val51 = new ColumnDescriptor(); @@ -13642,7 +13642,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map68.size); for (int _i69 = 0; _i69 < _map68.size; ++_i69) { - ByteBuffer _key70; // required + ByteBuffer _key70; // optional ByteBuffer _val71; // required _key70 = iprot.readBinary(); _val71 = iprot.readBinary(); @@ -13772,7 +13772,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map74.size); for (int _i75 = 0; _i75 < _map74.size; ++_i75) { - ByteBuffer _key76; // required + ByteBuffer _key76; // optional ByteBuffer _val77; // required _key76 = iprot.readBinary(); _val77 = iprot.readBinary(); @@ -15083,7 +15083,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map86.size); for (int _i87 = 0; _i87 < _map86.size; ++_i87) { - ByteBuffer _key88; // required + ByteBuffer _key88; // optional ByteBuffer _val89; // required _key88 = iprot.readBinary(); _val89 = iprot.readBinary(); @@ -15226,7 +15226,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map92.size); for (int _i93 = 0; _i93 < _map92.size; ++_i93) { - ByteBuffer _key94; // required + ByteBuffer _key94; // optional ByteBuffer _val95; // required _key94 = iprot.readBinary(); _val95 = iprot.readBinary(); @@ -16635,7 +16635,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map104.size); for (int _i105 = 0; _i105 < _map104.size; ++_i105) { - ByteBuffer _key106; // required + ByteBuffer _key106; // optional ByteBuffer _val107; // required _key106 = iprot.readBinary(); _val107 = iprot.readBinary(); @@ -16791,7 +16791,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map110.size); for (int _i111 = 0; _i111 < _map110.size; ++_i111) { - ByteBuffer _key112; // required + ByteBuffer _key112; // optional ByteBuffer _val113; // required _key112 = iprot.readBinary(); _val113 = iprot.readBinary(); @@ -17888,7 +17888,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map122.size); for (int _i123 = 0; _i123 < _map122.size; ++_i123) { - ByteBuffer _key124; // required + ByteBuffer _key124; // optional ByteBuffer _val125; // required _key124 = iprot.readBinary(); _val125 = iprot.readBinary(); @@ -18003,7 +18003,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map128.size); for (int _i129 = 0; _i129 < _map128.size; ++_i129) { - ByteBuffer _key130; // required + ByteBuffer _key130; // optional ByteBuffer _val131; // required _key130 = iprot.readBinary(); _val131 = iprot.readBinary(); @@ -19232,7 +19232,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map143.size); for (int _i144 = 0; _i144 < _map143.size; ++_i144) { - ByteBuffer _key145; // required + ByteBuffer _key145; // optional ByteBuffer _val146; // required _key145 = iprot.readBinary(); _val146 = iprot.readBinary(); @@ -19384,7 +19384,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map154.size); for (int _i155 = 0; _i155 < _map154.size; ++_i155) { - ByteBuffer _key156; // required + ByteBuffer _key156; // optional ByteBuffer _val157; // required _key156 = iprot.readBinary(); _val157 = iprot.readBinary(); @@ -20583,7 +20583,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map166.size); for (int _i167 = 0; _i167 < _map166.size; ++_i167) { - ByteBuffer _key168; // required + ByteBuffer _key168; // optional ByteBuffer _val169; // required _key168 = iprot.readBinary(); _val169 = iprot.readBinary(); @@ -20711,7 +20711,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map172.size); for (int _i173 = 0; _i173 < _map172.size; ++_i173) { - ByteBuffer _key174; // required + ByteBuffer _key174; // optional ByteBuffer _val175; // required _key174 = iprot.readBinary(); _val175 = iprot.readBinary(); @@ -22030,7 +22030,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map187.size); for (int _i188 = 0; _i188 < _map187.size; ++_i188) { - ByteBuffer _key189; // required + ByteBuffer _key189; // optional ByteBuffer _val190; // required _key189 = iprot.readBinary(); _val190 = iprot.readBinary(); @@ -22195,7 +22195,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map198.size); for (int _i199 = 0; _i199 < _map198.size; ++_i199) { - ByteBuffer _key200; // required + ByteBuffer _key200; // optional ByteBuffer _val201; // required _key200 = iprot.readBinary(); _val201 = iprot.readBinary(); @@ -23312,7 +23312,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map213.size); for (int _i214 = 0; _i214 < _map213.size; ++_i214) { - ByteBuffer _key215; // required + ByteBuffer _key215; // optional ByteBuffer _val216; // required _key215 = iprot.readBinary(); _val216 = iprot.readBinary(); @@ -23449,7 +23449,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map224.size); for (int _i225 = 0; _i225 < _map224.size; ++_i225) { - ByteBuffer _key226; // required + ByteBuffer _key226; // optional ByteBuffer _val227; // required _key226 = iprot.readBinary(); _val227 = iprot.readBinary(); @@ -24698,7 +24698,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map242.size); for (int _i243 = 0; _i243 < _map242.size; ++_i243) { - ByteBuffer _key244; // required + ByteBuffer _key244; // optional ByteBuffer _val245; // required _key244 = iprot.readBinary(); _val245 = iprot.readBinary(); @@ -24872,7 +24872,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map258.size); for (int _i259 = 0; _i259 < _map258.size; ++_i259) { - ByteBuffer _key260; // required + ByteBuffer _key260; // optional ByteBuffer _val261; // required _key260 = iprot.readBinary(); _val261 = iprot.readBinary(); @@ -26091,7 +26091,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map273.size); for (int _i274 = 0; _i274 < _map273.size; ++_i274) { - ByteBuffer _key275; // required + ByteBuffer _key275; // optional ByteBuffer _val276; // required _key275 = iprot.readBinary(); _val276 = iprot.readBinary(); @@ -26241,7 +26241,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map284.size); for (int _i285 = 0; _i285 < _map284.size; ++_i285) { - ByteBuffer _key286; // required + ByteBuffer _key286; // optional ByteBuffer _val287; // required _key286 = iprot.readBinary(); _val287 = iprot.readBinary(); @@ -27580,7 +27580,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map302.size); for (int _i303 = 0; _i303 < _map302.size; ++_i303) { - ByteBuffer _key304; // required + ByteBuffer _key304; // optional ByteBuffer _val305; // required _key304 = iprot.readBinary(); _val305 = iprot.readBinary(); @@ -27767,7 +27767,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map318.size); for (int _i319 = 0; _i319 < _map318.size; ++_i319) { - ByteBuffer _key320; // required + ByteBuffer _key320; // optional ByteBuffer _val321; // required _key320 = iprot.readBinary(); _val321 = iprot.readBinary(); @@ -28997,7 +28997,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map333.size); for (int _i334 = 0; _i334 < _map333.size; ++_i334) { - ByteBuffer _key335; // required + ByteBuffer _key335; // optional ByteBuffer _val336; // required _key335 = iprot.readBinary(); _val336 = iprot.readBinary(); @@ -29150,7 +29150,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map344.size); for (int _i345 = 0; _i345 < _map344.size; ++_i345) { - ByteBuffer _key346; // required + ByteBuffer _key346; // optional ByteBuffer _val347; // required _key346 = iprot.readBinary(); _val347 = iprot.readBinary(); @@ -30430,7 +30430,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map351.size); for (int _i352 = 0; _i352 < _map351.size; ++_i352) { - ByteBuffer _key353; // required + ByteBuffer _key353; // optional ByteBuffer _val354; // required _key353 = iprot.readBinary(); _val354 = iprot.readBinary(); @@ -30596,7 +30596,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map362.size); for (int _i363 = 0; _i363 < _map362.size; ++_i363) { - ByteBuffer _key364; // required + ByteBuffer _key364; // optional ByteBuffer _val365; // required _key364 = iprot.readBinary(); _val365 = iprot.readBinary(); @@ -31662,7 +31662,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map369.size); for (int _i370 = 0; _i370 < _map369.size; ++_i370) { - ByteBuffer _key371; // required + ByteBuffer _key371; // optional ByteBuffer _val372; // required _key371 = iprot.readBinary(); _val372 = iprot.readBinary(); @@ -31800,7 +31800,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map380.size); for (int _i381 = 0; _i381 < _map380.size; ++_i381) { - ByteBuffer _key382; // required + ByteBuffer _key382; // optional ByteBuffer _val383; // required _key382 = iprot.readBinary(); _val383 = iprot.readBinary(); @@ -32968,7 +32968,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map387.size); for (int _i388 = 0; _i388 < _map387.size; ++_i388) { - ByteBuffer _key389; // required + ByteBuffer _key389; // optional ByteBuffer _val390; // required _key389 = iprot.readBinary(); _val390 = iprot.readBinary(); @@ -33119,7 +33119,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map398.size); for (int _i399 = 0; _i399 < _map398.size; ++_i399) { - ByteBuffer _key400; // required + ByteBuffer _key400; // optional ByteBuffer _val401; // required _key400 = iprot.readBinary(); _val401 = iprot.readBinary(); @@ -35603,7 +35603,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map402.size); for (int _i403 = 0; _i403 < _map402.size; ++_i403) { - ByteBuffer _key404; // required + ByteBuffer _key404; // optional ByteBuffer _val405; // required _key404 = iprot.readBinary(); _val405 = iprot.readBinary(); @@ -35733,7 +35733,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map408.size); for (int _i409 = 0; _i409 < _map408.size; ++_i409) { - ByteBuffer _key410; // required + ByteBuffer _key410; // optional ByteBuffer _val411; // required _key410 = iprot.readBinary(); _val411 = iprot.readBinary(); @@ -36885,7 +36885,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map412.size); for (int _i413 = 0; _i413 < _map412.size; ++_i413) { - ByteBuffer _key414; // required + ByteBuffer _key414; // optional ByteBuffer _val415; // required _key414 = iprot.readBinary(); _val415 = iprot.readBinary(); @@ -37028,7 +37028,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map418.size); for (int _i419 = 0; _i419 < _map418.size; ++_i419) { - ByteBuffer _key420; // required + ByteBuffer _key420; // optional ByteBuffer _val421; // required _key420 = iprot.readBinary(); _val421 = iprot.readBinary(); @@ -37966,7 +37966,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map422.size); for (int _i423 = 0; _i423 < _map422.size; ++_i423) { - ByteBuffer _key424; // required + ByteBuffer _key424; // optional ByteBuffer _val425; // required _key424 = iprot.readBinary(); _val425 = iprot.readBinary(); @@ -38081,7 +38081,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map428.size); for (int _i429 = 0; _i429 < _map428.size; ++_i429) { - ByteBuffer _key430; // required + ByteBuffer _key430; // optional ByteBuffer _val431; // required _key430 = iprot.readBinary(); _val431 = iprot.readBinary(); @@ -40652,7 +40652,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map440.size); for (int _i441 = 0; _i441 < _map440.size; ++_i441) { - ByteBuffer _key442; // required + ByteBuffer _key442; // optional ByteBuffer _val443; // required _key442 = iprot.readBinary(); _val443 = iprot.readBinary(); @@ -40780,7 +40780,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map446.size); for (int _i447 = 0; _i447 < _map446.size; ++_i447) { - ByteBuffer _key448; // required + ByteBuffer _key448; // optional ByteBuffer _val449; // required _key448 = iprot.readBinary(); _val449 = iprot.readBinary(); @@ -41712,7 +41712,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map450.size); for (int _i451 = 0; _i451 < _map450.size; ++_i451) { - ByteBuffer _key452; // required + ByteBuffer _key452; // optional ByteBuffer _val453; // required _key452 = iprot.readBinary(); _val453 = iprot.readBinary(); @@ -41828,7 +41828,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map456.size); for (int _i457 = 0; _i457 < _map456.size; ++_i457) { - ByteBuffer _key458; // required + ByteBuffer _key458; // optional ByteBuffer _val459; // required _key458 = iprot.readBinary(); _val459 = iprot.readBinary(); @@ -43015,7 +43015,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map463.size); for (int _i464 = 0; _i464 < _map463.size; ++_i464) { - ByteBuffer _key465; // required + ByteBuffer _key465; // optional ByteBuffer _val466; // required _key465 = iprot.readBinary(); _val466 = iprot.readBinary(); @@ -43167,7 +43167,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map474.size); for (int _i475 = 0; _i475 < _map474.size; ++_i475) { - ByteBuffer _key476; // required + ByteBuffer _key476; // optional ByteBuffer _val477; // required _key476 = iprot.readBinary(); _val477 = iprot.readBinary(); @@ -44470,7 +44470,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map481.size); for (int _i482 = 0; _i482 < _map481.size; ++_i482) { - ByteBuffer _key483; // required + ByteBuffer _key483; // optional ByteBuffer _val484; // required _key483 = iprot.readBinary(); _val484 = iprot.readBinary(); @@ -44637,7 +44637,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map492.size); for (int _i493 = 0; _i493 < _map492.size; ++_i493) { - ByteBuffer _key494; // required + ByteBuffer _key494; // optional ByteBuffer _val495; // required _key494 = iprot.readBinary(); _val495 = iprot.readBinary(); @@ -45812,7 +45812,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map499.size); for (int _i500 = 0; _i500 < _map499.size; ++_i500) { - ByteBuffer _key501; // required + ByteBuffer _key501; // optional ByteBuffer _val502; // required _key501 = iprot.readBinary(); _val502 = iprot.readBinary(); @@ -45964,7 +45964,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map510.size); for (int _i511 = 0; _i511 < _map510.size; ++_i511) { - ByteBuffer _key512; // required + ByteBuffer _key512; // optional ByteBuffer _val513; // required _key512 = iprot.readBinary(); _val513 = iprot.readBinary(); @@ -47253,7 +47253,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map517.size); for (int _i518 = 0; _i518 < _map517.size; ++_i518) { - ByteBuffer _key519; // required + ByteBuffer _key519; // optional ByteBuffer _val520; // required _key519 = iprot.readBinary(); _val520 = iprot.readBinary(); @@ -47418,7 +47418,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map528.size); for (int _i529 = 0; _i529 < _map528.size; ++_i529) { - ByteBuffer _key530; // required + ByteBuffer _key530; // optional ByteBuffer _val531; // required _key530 = iprot.readBinary(); _val531 = iprot.readBinary(); @@ -48823,7 +48823,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map535.size); for (int _i536 = 0; _i536 < _map535.size; ++_i536) { - ByteBuffer _key537; // required + ByteBuffer _key537; // optional ByteBuffer _val538; // required _key537 = iprot.readBinary(); _val538 = iprot.readBinary(); @@ -49003,7 +49003,7 @@ public class Hbase { struct.attributes = new HashMap(2*_map546.size); for (int _i547 = 0; _i547 < _map546.size; ++_i547) { - ByteBuffer _key548; // required + ByteBuffer _key548; // optional ByteBuffer _val549; // required _key548 = iprot.readBinary(); _val549 = iprot.readBinary(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java index 3bb244d..66757d6 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java @@ -562,7 +562,7 @@ public class TRowResult implements org.apache.thrift.TBase(2*_map8.size); for (int _i9 = 0; _i9 < _map8.size; ++_i9) { - ByteBuffer _key10; // required + ByteBuffer _key10; // optional TCell _val11; // required _key10 = iprot.readBinary(); _val11 = new TCell(); @@ -710,7 +710,7 @@ public class TRowResult implements org.apache.thrift.TBase(2*_map19.size); for (int _i20 = 0; _i20 < _map19.size; ++_i20) { - ByteBuffer _key21; // required + ByteBuffer _key21; // optional TCell _val22; // required _key21 = iprot.readBinary(); _val22 = new TCell(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java index 7e6a144..5f5cdc7 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java @@ -45,6 +45,7 @@ public class TScan implements org.apache.thrift.TBase, jav private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)6); private static final org.apache.thrift.protocol.TField BATCH_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("batchSize", org.apache.thrift.protocol.TType.I32, (short)7); private static final org.apache.thrift.protocol.TField SORT_COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("sortColumns", org.apache.thrift.protocol.TType.BOOL, (short)8); + private static final org.apache.thrift.protocol.TField REVERSED_FIELD_DESC = new org.apache.thrift.protocol.TField("reversed", org.apache.thrift.protocol.TType.BOOL, (short)9); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -60,6 +61,7 @@ public class TScan implements org.apache.thrift.TBase, jav public ByteBuffer filterString; // optional public int batchSize; // optional public boolean sortColumns; // optional + public boolean reversed; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { @@ -70,7 +72,8 @@ public class TScan implements org.apache.thrift.TBase, jav CACHING((short)5, "caching"), FILTER_STRING((short)6, "filterString"), BATCH_SIZE((short)7, "batchSize"), - SORT_COLUMNS((short)8, "sortColumns"); + SORT_COLUMNS((short)8, "sortColumns"), + REVERSED((short)9, "reversed"); private static final Map byName = new HashMap(); @@ -101,6 +104,8 @@ public class TScan implements org.apache.thrift.TBase, jav return BATCH_SIZE; case 8: // SORT_COLUMNS return SORT_COLUMNS; + case 9: // REVERSED + return REVERSED; default: return null; } @@ -145,8 +150,9 @@ public class TScan implements org.apache.thrift.TBase, jav private static final int __CACHING_ISSET_ID = 1; private static final int __BATCHSIZE_ISSET_ID = 2; private static final int __SORTCOLUMNS_ISSET_ID = 3; + private static final int __REVERSED_ISSET_ID = 4; private byte __isset_bitfield = 0; - private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.TIMESTAMP,_Fields.COLUMNS,_Fields.CACHING,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.SORT_COLUMNS}; + private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.TIMESTAMP,_Fields.COLUMNS,_Fields.CACHING,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.SORT_COLUMNS,_Fields.REVERSED}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); @@ -167,6 +173,8 @@ public class TScan implements org.apache.thrift.TBase, jav new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.SORT_COLUMNS, new org.apache.thrift.meta_data.FieldMetaData("sortColumns", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); + tmpMap.put(_Fields.REVERSED, new org.apache.thrift.meta_data.FieldMetaData("reversed", org.apache.thrift.TFieldRequirementType.OPTIONAL, + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TScan.class, metaDataMap); } @@ -199,6 +207,7 @@ public class TScan implements org.apache.thrift.TBase, jav } this.batchSize = other.batchSize; this.sortColumns = other.sortColumns; + this.reversed = other.reversed; } public TScan deepCopy() { @@ -219,6 +228,8 @@ public class TScan implements org.apache.thrift.TBase, jav this.batchSize = 0; setSortColumnsIsSet(false); this.sortColumns = false; + setReversedIsSet(false); + this.reversed = false; } public byte[] getStartRow() { @@ -454,6 +465,29 @@ public class TScan implements org.apache.thrift.TBase, jav __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SORTCOLUMNS_ISSET_ID, value); } + public boolean isReversed() { + return this.reversed; + } + + public TScan setReversed(boolean reversed) { + this.reversed = reversed; + setReversedIsSet(true); + return this; + } + + public void unsetReversed() { + __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __REVERSED_ISSET_ID); + } + + /** Returns true if field reversed is set (has been assigned a value) and false otherwise */ + public boolean isSetReversed() { + return EncodingUtils.testBit(__isset_bitfield, __REVERSED_ISSET_ID); + } + + public void setReversedIsSet(boolean value) { + __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __REVERSED_ISSET_ID, value); + } + public void setFieldValue(_Fields field, Object value) { switch (field) { case START_ROW: @@ -520,6 +554,14 @@ public class TScan implements org.apache.thrift.TBase, jav } break; + case REVERSED: + if (value == null) { + unsetReversed(); + } else { + setReversed((Boolean)value); + } + break; + } } @@ -549,6 +591,9 @@ public class TScan implements org.apache.thrift.TBase, jav case SORT_COLUMNS: return Boolean.valueOf(isSortColumns()); + case REVERSED: + return Boolean.valueOf(isReversed()); + } throw new IllegalStateException(); } @@ -576,6 +621,8 @@ public class TScan implements org.apache.thrift.TBase, jav return isSetBatchSize(); case SORT_COLUMNS: return isSetSortColumns(); + case REVERSED: + return isSetReversed(); } throw new IllegalStateException(); } @@ -665,6 +712,15 @@ public class TScan implements org.apache.thrift.TBase, jav return false; } + boolean this_present_reversed = true && this.isSetReversed(); + boolean that_present_reversed = true && that.isSetReversed(); + if (this_present_reversed || that_present_reversed) { + if (!(this_present_reversed && that_present_reversed)) + return false; + if (this.reversed != that.reversed) + return false; + } + return true; } @@ -712,6 +768,11 @@ public class TScan implements org.apache.thrift.TBase, jav if (present_sortColumns) builder.append(sortColumns); + boolean present_reversed = true && (isSetReversed()); + builder.append(present_reversed); + if (present_reversed) + builder.append(reversed); + return builder.toHashCode(); } @@ -803,6 +864,16 @@ public class TScan implements org.apache.thrift.TBase, jav return lastComparison; } } + lastComparison = Boolean.valueOf(isSetReversed()).compareTo(typedOther.isSetReversed()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetReversed()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.reversed, typedOther.reversed); + if (lastComparison != 0) { + return lastComparison; + } + } return 0; } @@ -886,6 +957,12 @@ public class TScan implements org.apache.thrift.TBase, jav sb.append(this.sortColumns); first = false; } + if (isSetReversed()) { + if (!first) sb.append(", "); + sb.append("reversed:"); + sb.append(this.reversed); + first = false; + } sb.append(")"); return sb.toString(); } @@ -1005,6 +1082,14 @@ public class TScan implements org.apache.thrift.TBase, jav org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; + case 9: // REVERSED + if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { + struct.reversed = iprot.readBool(); + struct.setReversedIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -1075,6 +1160,11 @@ public class TScan implements org.apache.thrift.TBase, jav oprot.writeBool(struct.sortColumns); oprot.writeFieldEnd(); } + if (struct.isSetReversed()) { + oprot.writeFieldBegin(REVERSED_FIELD_DESC); + oprot.writeBool(struct.reversed); + oprot.writeFieldEnd(); + } oprot.writeFieldStop(); oprot.writeStructEnd(); } @@ -1117,7 +1207,10 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetSortColumns()) { optionals.set(7); } - oprot.writeBitSet(optionals, 8); + if (struct.isSetReversed()) { + optionals.set(8); + } + oprot.writeBitSet(optionals, 9); if (struct.isSetStartRow()) { oprot.writeBinary(struct.startRow); } @@ -1148,12 +1241,15 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetSortColumns()) { oprot.writeBool(struct.sortColumns); } + if (struct.isSetReversed()) { + oprot.writeBool(struct.reversed); + } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TScan struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - BitSet incoming = iprot.readBitSet(8); + BitSet incoming = iprot.readBitSet(9); if (incoming.get(0)) { struct.startRow = iprot.readBinary(); struct.setStartRowIsSet(true); @@ -1195,6 +1291,10 @@ public class TScan implements org.apache.thrift.TBase, jav struct.sortColumns = iprot.readBool(); struct.setSortColumnsIsSet(true); } + if (incoming.get(8)) { + struct.reversed = iprot.readBool(); + struct.setReversedIsSet(true); + } } } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 3dc0eda..2fb17d5 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -421,6 +421,10 @@ public class ThriftUtilities { out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels())); } + if (in.isSetReversed()) { + out.setReversed(in.isReversed()); + } + return out; } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java index 49d0e75..262cb82 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java @@ -770,7 +770,7 @@ public class TAppend implements org.apache.thrift.TBase(2*_map91.size); for (int _i92 = 0; _i92 < _map91.size; ++_i92) { - ByteBuffer _key93; // required + ByteBuffer _key93; // optional ByteBuffer _val94; // required _key93 = iprot.readBinary(); _val94 = iprot.readBinary(); @@ -939,7 +939,7 @@ public class TAppend implements org.apache.thrift.TBase(2*_map102.size); for (int _i103 = 0; _i103 < _map102.size; ++_i103) { - ByteBuffer _key104; // required + ByteBuffer _key104; // optional ByteBuffer _val105; // required _key104 = iprot.readBinary(); _val105 = iprot.readBinary(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java index a4d7777..abbad1c 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java @@ -906,7 +906,7 @@ public class TDelete implements org.apache.thrift.TBase(2*_map55.size); for (int _i56 = 0; _i56 < _map55.size; ++_i56) { - ByteBuffer _key57; // required + ByteBuffer _key57; // optional ByteBuffer _val58; // required _key57 = iprot.readBinary(); _val58 = iprot.readBinary(); @@ -1094,7 +1094,7 @@ public class TDelete implements org.apache.thrift.TBase(2*_map66.size); for (int _i67 = 0; _i67 < _map66.size; ++_i67) { - ByteBuffer _key68; // required + ByteBuffer _key68; // optional ByteBuffer _val69; // required _key68 = iprot.readBinary(); _val69 = iprot.readBinary(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java index 7e93341..8e242b2 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java @@ -1051,7 +1051,7 @@ public class TGet implements org.apache.thrift.TBase, java.i struct.attributes = new HashMap(2*_map19.size); for (int _i20 = 0; _i20 < _map19.size; ++_i20) { - ByteBuffer _key21; // required + ByteBuffer _key21; // optional ByteBuffer _val22; // required _key21 = iprot.readBinary(); _val22 = iprot.readBinary(); @@ -1273,7 +1273,7 @@ public class TGet implements org.apache.thrift.TBase, java.i struct.attributes = new HashMap(2*_map30.size); for (int _i31 = 0; _i31 < _map30.size; ++_i31) { - ByteBuffer _key32; // required + ByteBuffer _key32; // optional ByteBuffer _val33; // required _key32 = iprot.readBinary(); _val33 = iprot.readBinary(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java index 358a9fc..90345aa 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java @@ -777,7 +777,7 @@ public class TIncrement implements org.apache.thrift.TBase(2*_map73.size); for (int _i74 = 0; _i74 < _map73.size; ++_i74) { - ByteBuffer _key75; // required + ByteBuffer _key75; // optional ByteBuffer _val76; // required _key75 = iprot.readBinary(); _val76 = iprot.readBinary(); @@ -946,7 +946,7 @@ public class TIncrement implements org.apache.thrift.TBase(2*_map84.size); for (int _i85 = 0; _i85 < _map84.size; ++_i85) { - ByteBuffer _key86; // required + ByteBuffer _key86; // optional ByteBuffer _val87; // required _key86 = iprot.readBinary(); _val87 = iprot.readBinary(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java index c9a122c..62ddffb 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java @@ -871,7 +871,7 @@ public class TPut implements org.apache.thrift.TBase, java.i struct.attributes = new HashMap(2*_map37.size); for (int _i38 = 0; _i38 < _map37.size; ++_i38) { - ByteBuffer _key39; // required + ByteBuffer _key39; // optional ByteBuffer _val40; // required _key39 = iprot.readBinary(); _val40 = iprot.readBinary(); @@ -1055,7 +1055,7 @@ public class TPut implements org.apache.thrift.TBase, java.i struct.attributes = new HashMap(2*_map48.size); for (int _i49 = 0; _i49 < _map48.size; ++_i49) { - ByteBuffer _key50; // required + ByteBuffer _key50; // optional ByteBuffer _val51; // required _key50 = iprot.readBinary(); _val51 = iprot.readBinary(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java index 542d068..0486b6b 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java @@ -48,6 +48,7 @@ public class TScan implements org.apache.thrift.TBase, jav private static final org.apache.thrift.protocol.TField BATCH_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("batchSize", org.apache.thrift.protocol.TType.I32, (short)8); private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)9); private static final org.apache.thrift.protocol.TField AUTHORIZATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("authorizations", org.apache.thrift.protocol.TType.STRUCT, (short)10); + private static final org.apache.thrift.protocol.TField REVERSED_FIELD_DESC = new org.apache.thrift.protocol.TField("reversed", org.apache.thrift.protocol.TType.BOOL, (short)11); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -65,6 +66,7 @@ public class TScan implements org.apache.thrift.TBase, jav public int batchSize; // optional public Map attributes; // optional public TAuthorization authorizations; // optional + public boolean reversed; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { @@ -77,7 +79,8 @@ public class TScan implements org.apache.thrift.TBase, jav FILTER_STRING((short)7, "filterString"), BATCH_SIZE((short)8, "batchSize"), ATTRIBUTES((short)9, "attributes"), - AUTHORIZATIONS((short)10, "authorizations"); + AUTHORIZATIONS((short)10, "authorizations"), + REVERSED((short)11, "reversed"); private static final Map byName = new HashMap(); @@ -112,6 +115,8 @@ public class TScan implements org.apache.thrift.TBase, jav return ATTRIBUTES; case 10: // AUTHORIZATIONS return AUTHORIZATIONS; + case 11: // REVERSED + return REVERSED; default: return null; } @@ -155,8 +160,9 @@ public class TScan implements org.apache.thrift.TBase, jav private static final int __CACHING_ISSET_ID = 0; private static final int __MAXVERSIONS_ISSET_ID = 1; private static final int __BATCHSIZE_ISSET_ID = 2; + private static final int __REVERSED_ISSET_ID = 3; private byte __isset_bitfield = 0; - private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS}; + private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS,_Fields.REVERSED}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); @@ -183,6 +189,8 @@ public class TScan implements org.apache.thrift.TBase, jav new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); tmpMap.put(_Fields.AUTHORIZATIONS, new org.apache.thrift.meta_data.FieldMetaData("authorizations", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TAuthorization.class))); + tmpMap.put(_Fields.REVERSED, new org.apache.thrift.meta_data.FieldMetaData("reversed", org.apache.thrift.TFieldRequirementType.OPTIONAL, + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TScan.class, metaDataMap); } @@ -242,6 +250,7 @@ public class TScan implements org.apache.thrift.TBase, jav if (other.isSetAuthorizations()) { this.authorizations = new TAuthorization(other.authorizations); } + this.reversed = other.reversed; } public TScan deepCopy() { @@ -263,6 +272,8 @@ public class TScan implements org.apache.thrift.TBase, jav this.batchSize = 0; this.attributes = null; this.authorizations = null; + setReversedIsSet(false); + this.reversed = false; } public byte[] getStartRow() { @@ -558,6 +569,29 @@ public class TScan implements org.apache.thrift.TBase, jav } } + public boolean isReversed() { + return this.reversed; + } + + public TScan setReversed(boolean reversed) { + this.reversed = reversed; + setReversedIsSet(true); + return this; + } + + public void unsetReversed() { + __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __REVERSED_ISSET_ID); + } + + /** Returns true if field reversed is set (has been assigned a value) and false otherwise */ + public boolean isSetReversed() { + return EncodingUtils.testBit(__isset_bitfield, __REVERSED_ISSET_ID); + } + + public void setReversedIsSet(boolean value) { + __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __REVERSED_ISSET_ID, value); + } + public void setFieldValue(_Fields field, Object value) { switch (field) { case START_ROW: @@ -640,6 +674,14 @@ public class TScan implements org.apache.thrift.TBase, jav } break; + case REVERSED: + if (value == null) { + unsetReversed(); + } else { + setReversed((Boolean)value); + } + break; + } } @@ -675,6 +717,9 @@ public class TScan implements org.apache.thrift.TBase, jav case AUTHORIZATIONS: return getAuthorizations(); + case REVERSED: + return Boolean.valueOf(isReversed()); + } throw new IllegalStateException(); } @@ -706,6 +751,8 @@ public class TScan implements org.apache.thrift.TBase, jav return isSetAttributes(); case AUTHORIZATIONS: return isSetAuthorizations(); + case REVERSED: + return isSetReversed(); } throw new IllegalStateException(); } @@ -813,6 +860,15 @@ public class TScan implements org.apache.thrift.TBase, jav return false; } + boolean this_present_reversed = true && this.isSetReversed(); + boolean that_present_reversed = true && that.isSetReversed(); + if (this_present_reversed || that_present_reversed) { + if (!(this_present_reversed && that_present_reversed)) + return false; + if (this.reversed != that.reversed) + return false; + } + return true; } @@ -870,6 +926,11 @@ public class TScan implements org.apache.thrift.TBase, jav if (present_authorizations) builder.append(authorizations); + boolean present_reversed = true && (isSetReversed()); + builder.append(present_reversed); + if (present_reversed) + builder.append(reversed); + return builder.toHashCode(); } @@ -981,6 +1042,16 @@ public class TScan implements org.apache.thrift.TBase, jav return lastComparison; } } + lastComparison = Boolean.valueOf(isSetReversed()).compareTo(typedOther.isSetReversed()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetReversed()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.reversed, typedOther.reversed); + if (lastComparison != 0) { + return lastComparison; + } + } return 0; } @@ -1088,6 +1159,12 @@ public class TScan implements org.apache.thrift.TBase, jav } first = false; } + if (isSetReversed()) { + if (!first) sb.append(", "); + sb.append("reversed:"); + sb.append(this.reversed); + first = false; + } sb.append(")"); return sb.toString(); } @@ -1222,7 +1299,7 @@ public class TScan implements org.apache.thrift.TBase, jav struct.attributes = new HashMap(2*_map109.size); for (int _i110 = 0; _i110 < _map109.size; ++_i110) { - ByteBuffer _key111; // required + ByteBuffer _key111; // optional ByteBuffer _val112; // required _key111 = iprot.readBinary(); _val112 = iprot.readBinary(); @@ -1244,6 +1321,14 @@ public class TScan implements org.apache.thrift.TBase, jav org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; + case 11: // REVERSED + if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { + struct.reversed = iprot.readBool(); + struct.setReversedIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -1338,6 +1423,11 @@ public class TScan implements org.apache.thrift.TBase, jav oprot.writeFieldEnd(); } } + if (struct.isSetReversed()) { + oprot.writeFieldBegin(REVERSED_FIELD_DESC); + oprot.writeBool(struct.reversed); + oprot.writeFieldEnd(); + } oprot.writeFieldStop(); oprot.writeStructEnd(); } @@ -1386,7 +1476,10 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetAuthorizations()) { optionals.set(9); } - oprot.writeBitSet(optionals, 10); + if (struct.isSetReversed()) { + optionals.set(10); + } + oprot.writeBitSet(optionals, 11); if (struct.isSetStartRow()) { oprot.writeBinary(struct.startRow); } @@ -1430,12 +1523,15 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetAuthorizations()) { struct.authorizations.write(oprot); } + if (struct.isSetReversed()) { + oprot.writeBool(struct.reversed); + } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TScan struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - BitSet incoming = iprot.readBitSet(10); + BitSet incoming = iprot.readBitSet(11); if (incoming.get(0)) { struct.startRow = iprot.readBinary(); struct.setStartRowIsSet(true); @@ -1485,7 +1581,7 @@ public class TScan implements org.apache.thrift.TBase, jav struct.attributes = new HashMap(2*_map120.size); for (int _i121 = 0; _i121 < _map120.size; ++_i121) { - ByteBuffer _key122; // required + ByteBuffer _key122; // optional ByteBuffer _val123; // required _key122 = iprot.readBinary(); _val123 = iprot.readBinary(); @@ -1499,6 +1595,10 @@ public class TScan implements org.apache.thrift.TBase, jav struct.authorizations.read(iprot); struct.setAuthorizationsIsSet(true); } + if (incoming.get(10)) { + struct.reversed = iprot.readBool(); + struct.setReversedIsSet(true); + } } } diff --git a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift index 9fe6468..f2d264a 100644 --- a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift +++ b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift @@ -149,7 +149,8 @@ struct TScan { 5:optional i32 caching, 6:optional Text filterString, 7:optional i32 batchSize, - 8:optional bool sortColumns + 8:optional bool sortColumns, + 9:optional bool reversed } // diff --git a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift index 4a8ac1b..7c6d6a2 100644 --- a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift +++ b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift @@ -223,6 +223,7 @@ struct TScan { 8: optional i32 batchSize, 9: optional map attributes 10: optional TAuthorization authorizations + 11: optional bool reversed } /** diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java index 43cf37f..f5f61a6 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java @@ -556,6 +556,17 @@ public class TestThriftServer { smallerColumn = currentColumn; } + TScan reversedScan = new TScan(); + reversedScan.setReversed(true); + reversedScan.setStartRow(rowBname); + reversedScan.setStopRow(rowAname); + + int scanner8 = handler.scannerOpenWithScan(tableAname , reversedScan, null); + List results = handler.scannerGet(scanner8); + handler.scannerClose(scanner8); + assertEquals(results.size(), 1); + assertEquals(ByteBuffer.wrap(results.get(0).getRow()), rowBname); + // Teardown handler.disableTable(tableAname); handler.deleteTable(tableAname); diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java index db05c8d..5227df4 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java @@ -583,6 +583,55 @@ public class TestThriftHBaseServiceHandler { } @Test + public void testReverseScan() throws Exception { + ThriftHBaseServiceHandler handler = createHandler(); + ByteBuffer table = wrap(tableAname); + + // insert data + TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), + wrap(valueAname)); + List columnValues = new ArrayList(); + columnValues.add(columnValue); + for (int i = 0; i < 10; i++) { + TPut put = new TPut(wrap(("testReverseScan" + i).getBytes()), columnValues); + handler.put(table, put); + } + + // create reverse scan instance + TScan scan = new TScan(); + scan.setReversed(true); + List columns = new ArrayList(); + TColumn column = new TColumn(); + column.setFamily(familyAname); + column.setQualifier(qualifierAname); + columns.add(column); + scan.setColumns(columns); + scan.setStartRow("testReverseScan\uffff".getBytes()); + scan.setStopRow("testReverseScan".getBytes()); + + // get scanner and rows + int scanId = handler.openScanner(table, scan); + List results = handler.getScannerRows(scanId, 10); + assertEquals(10, results.size()); + for (int i = 0; i < 10; i++) { + // check if the rows are returned and in order + assertArrayEquals(("testReverseScan" + (9 - i)).getBytes(), results.get(i).getRow()); + } + + // check that we are at the end of the scan + results = handler.getScannerRows(scanId, 10); + assertEquals(0, results.size()); + + // close scanner and check that it was indeed closed + handler.closeScanner(scanId); + try { + handler.getScannerRows(scanId, 10); + fail("Scanner id should be invalid"); + } catch (TIllegalArgument e) { + } + } + + @Test public void testScanWithFilter() throws Exception { ThriftHBaseServiceHandler handler = createHandler(); ByteBuffer table = wrap(tableAname); @@ -761,6 +810,20 @@ public class TestThriftHBaseServiceHandler { assertArrayEquals(("testGetScannerResults" + pad(i, (byte) 2)).getBytes(), results.get(i) .getRow()); } + + // reverse scan + scan = new TScan(); + scan.setColumns(columns); + scan.setReversed(true); + scan.setStartRow("testGetScannerResults20".getBytes()); + scan.setStopRow("testGetScannerResults".getBytes()); + results = handler.getScannerResults(table, scan, 20); + assertEquals(20, results.size()); + for (int i = 0; i < 20; i++) { + // check if the rows are returned and in order + assertArrayEquals(("testGetScannerResults" + pad(19 - i, (byte) 2)).getBytes(), results.get(i) + .getRow()); + } } @Test