From c4d00f2b8e6eae985bb17063085cf60c22332830 Mon Sep 17 00:00:00 2001 From: Jan Hentschel Date: Thu, 28 Dec 2017 11:14:22 +0100 Subject: [PATCH] HBASE-13300 Fixed casing in set/getTimeStamp for Mutations --- .../org/apache/hadoop/hbase/MetaTableAccessor.java | 22 +++++++++++----------- .../java/org/apache/hadoop/hbase/client/Get.java | 20 ++++++++++++++++---- .../org/apache/hadoop/hbase/client/Mutation.java | 14 +++++++++++++- .../java/org/apache/hadoop/hbase/client/Scan.java | 22 ++++++++++++++++++++-- .../hadoop/hbase/filter/TimestampsFilter.java | 4 ++-- .../apache/hadoop/hbase/protobuf/ProtobufUtil.java | 2 +- .../hadoop/hbase/shaded/protobuf/ProtobufUtil.java | 2 +- .../apache/hadoop/hbase/client/TestMutation.java | 6 +++--- .../apache/hadoop/hbase/client/TestOperation.java | 10 +++++----- .../hbase/shaded/protobuf/TestProtobufUtil.java | 6 +++--- .../client/example/MultiThreadedClientExample.java | 4 ++-- .../hadoop/hbase/mapreduce/TableInputFormat.java | 2 +- .../hadoop/hbase/rest/client/RemoteHTable.java | 4 ++-- .../hadoop/hbase/rest/client/TestRemoteTable.java | 2 +- .../hadoop/hbase/master/TableNamespaceManager.java | 2 +- .../hbase/master/assignment/RegionStateStore.java | 4 ++-- .../apache/hadoop/hbase/regionserver/HRegion.java | 2 +- .../hbase/security/access/AccessControlLists.java | 2 +- .../hbase/security/access/AccessController.java | 2 +- .../DefaultVisibilityLabelServiceImpl.java | 6 +++--- .../org/apache/hadoop/hbase/HBaseTestCase.java | 2 +- .../org/apache/hadoop/hbase/TestMultiVersions.java | 6 +++--- .../hadoop/hbase/client/TestFromClientSide.java | 8 ++++---- .../hbase/client/TestMultiRespectsLimits.java | 2 +- .../hadoop/hbase/protobuf/TestProtobufUtil.java | 6 +++--- .../ExpAsStringVisibilityLabelServiceImpl.java | 2 +- .../spark/datasources/HBaseTableScanRDD.scala | 4 ++-- .../hadoop/hbase/thrift/ThriftServerRunner.java | 6 +++--- .../hadoop/hbase/thrift2/ThriftUtilities.java | 6 +++--- 29 files changed, 111 insertions(+), 69 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index c53e2acb66..c11dd51aba 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -1357,7 +1357,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.REPLICATION_BARRIER_FAMILY) .setQualifier(seqBytes) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(seqBytes) .build()) @@ -1365,7 +1365,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.REPLICATION_META_FAMILY) .setQualifier(tableNameCq) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Cell.Type.Put) .setValue(tableName) .build()); @@ -1379,7 +1379,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.REPLICATION_META_FAMILY) .setQualifier(daughterNameCq) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(value) .build()); @@ -1392,7 +1392,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.REPLICATION_META_FAMILY) .setQualifier(parentNameCq) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(value) .build()); @@ -1409,7 +1409,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.SPLITA_QUALIFIER) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(RegionInfo.toByteArray(splitA)) .build()); @@ -1419,7 +1419,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.SPLITB_QUALIFIER) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(RegionInfo.toByteArray(splitB)) .build()); @@ -1554,7 +1554,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(getRegionStateColumn()) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Cell.Type.Put) .setValue(Bytes.toBytes(state.name())) .build()); @@ -1666,7 +1666,7 @@ public class MetaTableAccessor { .setRow(putOfMerged.getRow()) .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.MERGEA_QUALIFIER) - .setTimestamp(putOfMerged.getTimeStamp()) + .setTimestamp(putOfMerged.getTimestamp()) .setType(Type.Put) .setValue(RegionInfo.toByteArray(regionA)) .build()) @@ -1674,7 +1674,7 @@ public class MetaTableAccessor { .setRow(putOfMerged.getRow()) .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.MERGEB_QUALIFIER) - .setTimestamp(putOfMerged.getTimeStamp()) + .setTimestamp(putOfMerged.getTimestamp()) .setType(Type.Put) .setValue(RegionInfo.toByteArray(regionB)) .build()); @@ -1917,7 +1917,7 @@ public class MetaTableAccessor { .setRow(put.getRow()) .setFamily(HConstants.REPLICATION_POSITION_FAMILY) .setQualifier(Bytes.toBytes(peerId)) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Cell.Type.Put) .setValue(Bytes.toBytes(Math.abs(entry.getValue()))) .build()); @@ -2051,7 +2051,7 @@ public class MetaTableAccessor { .setRow(p.getRow()) .setFamily(getCatalogFamily()) .setQualifier(HConstants.REGIONINFO_QUALIFIER) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Type.Put) .setValue(RegionInfo.toByteArray(hri)) .build()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java index 80b8a221d3..a9cfc7f986 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.util.Bytes; * execute {@link #setTimeRange(long, long) setTimeRange}. *

* To only retrieve columns with a specific timestamp, execute - * {@link #setTimeStamp(long) setTimestamp}. + * {@link #setTimestamp(long) setTimestamp}. *

* To limit the number of versions of each column to be returned, execute * {@link #setMaxVersions(int) setMaxVersions}. @@ -232,16 +232,28 @@ public class Get extends Query * Get versions of columns with the specified timestamp. * @param timestamp version timestamp * @return this for invocation chaining + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link #setTimestamp(long)} instead */ - public Get setTimeStamp(long timestamp) - throws IOException { + @Deprecated + public Get setTimeStamp(long timestamp) throws IOException { + return this.setTimestamp(timestamp); + } + + /** + * Get versions of columns with the specified timestamp. + * @param timestamp version timestamp + * @return this for invocation chaining + */ + public Get setTimestamp(long timestamp) { try { - tr = new TimeRange(timestamp, timestamp+1); + tr = new TimeRange(timestamp, timestamp + 1); } catch(Exception e) { // This should never happen, unless integer overflow or something extremely wrong... LOG.error("TimeRange failed, likely caused by integer overflow. ", e); throw e; } + return this; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java index 8a2957467c..b10d3875a0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java @@ -113,7 +113,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C protected Mutation(Mutation clone) { super(clone); this.row = clone.getRow(); - this.ts = clone.getTimeStamp(); + this.ts = clone.getTimestamp(); this.familyMap = clone.getFamilyCellMap().entrySet().stream() .collect(Collectors.toMap(e -> e.getKey(), e -> new ArrayList<>(e.getValue()), (k, v) -> { @@ -339,8 +339,20 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C /** * Method for retrieving the timestamp * @return timestamp + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link #getTimestamp()} instead */ + @Deprecated public long getTimeStamp() { + return this.getTimestamp(); + } + + /** + * Method for retrieving the timestamp. + * + * @return timestamp + */ + public long getTimestamp() { return this.ts; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 7139b26da9..41ddb03832 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -59,7 +59,7 @@ import org.apache.hadoop.hbase.util.Bytes; * To only retrieve columns within a specific range of version timestamps, call * {@link #setTimeRange(long, long) setTimeRange}. *

- * To only retrieve columns with a specific timestamp, call {@link #setTimeStamp(long) setTimestamp} + * To only retrieve columns with a specific timestamp, call {@link #setTimestamp(long) setTimestamp} * . *

* To limit the number of versions of each column to be returned, call {@link #setMaxVersions(int) @@ -376,16 +376,34 @@ public class Scan extends Query { * @see #setMaxVersions() * @see #setMaxVersions(int) * @return this + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link #setTimestamp(long)} instead */ + @Deprecated public Scan setTimeStamp(long timestamp) throws IOException { + return this.setTimestamp(timestamp); + } + + /** + * Get versions of columns with the specified timestamp. Note, default maximum + * versions to return is 1. If your time range spans more than one version + * and you want all versions returned, up the number of versions beyond the + * defaut. + * @param timestamp version timestamp + * @see #setMaxVersions() + * @see #setMaxVersions(int) + * @return this + */ + public Scan setTimestamp(long timestamp) { try { - tr = new TimeRange(timestamp, timestamp+1); + tr = new TimeRange(timestamp, timestamp + 1); } catch(Exception e) { // This should never happen, unless integer overflow or something extremely wrong... LOG.error("TimeRange failed, likely caused by integer overflow. ", e); throw e; } + return this; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index e0ec83a30a..465b3945d4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE *

* Note: Use of this filter overrides any time range/time stamp * options specified using {@link org.apache.hadoop.hbase.client.Get#setTimeRange(long, long)}, - * {@link org.apache.hadoop.hbase.client.Scan#setTimeRange(long, long)}, {@link org.apache.hadoop.hbase.client.Get#setTimeStamp(long)}, - * or {@link org.apache.hadoop.hbase.client.Scan#setTimeStamp(long)}. + * {@link org.apache.hadoop.hbase.client.Scan#setTimeRange(long, long)}, {@link org.apache.hadoop.hbase.client.Get#setTimestamp(long)}, + * or {@link org.apache.hadoop.hbase.client.Scan#setTimestamp(long)}. */ @InterfaceAudience.Public public class TimestampsFilter extends FilterBase { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 2152c7c8a3..7e3cdda90f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -1269,7 +1269,7 @@ public final class ProtobufUtil { builder.setRow(ByteStringer.wrap(mutation.getRow())); builder.setMutateType(type); builder.setDurability(toDurability(mutation.getDurability())); - builder.setTimestamp(mutation.getTimeStamp()); + builder.setTimestamp(mutation.getTimestamp()); Map attributes = mutation.getAttributesMap(); if (!attributes.isEmpty()) { NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index 8d7c95689d..c917cbc9e3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -1432,7 +1432,7 @@ public final class ProtobufUtil { builder.setRow(UnsafeByteOperations.unsafeWrap(mutation.getRow())); builder.setMutateType(type); builder.setDurability(toDurability(mutation.getDurability())); - builder.setTimestamp(mutation.getTimeStamp()); + builder.setTimestamp(mutation.getTimestamp()); Map attributes = mutation.getAttributesMap(); if (!attributes.isEmpty()) { NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java index 9b8144db86..0f24eda3ae 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java @@ -153,7 +153,7 @@ public class TestMutation { byte[] cloneValue = clone.getAttributesMap().get(name); assertTrue(Bytes.equals(originValue, cloneValue)); } - Assert.assertEquals(origin.getTimeStamp(), clone.getTimeStamp()); + Assert.assertEquals(origin.getTimestamp(), clone.getTimestamp()); Assert.assertEquals(origin.getPriority(), clone.getPriority()); if (origin instanceof Append) { assertEquals(((Append)origin).getTimeRange(), ((Append)clone).getTimeRange()); @@ -201,7 +201,7 @@ public class TestMutation { .setRow(row) .setFamily(family) .setQualifier(qualifier0) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(value0) .build()) @@ -223,7 +223,7 @@ public class TestMutation { assertTrue(cell0.getValueArray() == value0); // Verify timestamp - assertTrue(cell0.getTimestamp() == put.getTimeStamp()); + assertTrue(cell0.getTimestamp() == put.getTimestamp()); // Verify the cell of family:qualifier1 Cell cell1 = put.get(family, qualifier1).get(0); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index cf40a69093..47ed35c118 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -363,14 +363,14 @@ public class TestOperation { Put p = new Put(ROW); List c = p.get(FAMILY, QUALIFIER); Assert.assertEquals(0, c.size()); - Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimeStamp()); + Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimestamp()); p.addColumn(FAMILY, ByteBuffer.wrap(QUALIFIER), 1984L, ByteBuffer.wrap(VALUE)); c = p.get(FAMILY, QUALIFIER); Assert.assertEquals(1, c.size()); Assert.assertEquals(1984L, c.get(0).getTimestamp()); Assert.assertArrayEquals(VALUE, CellUtil.cloneValue(c.get(0))); - Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimeStamp()); + Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimestamp()); Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); p = new Put(ROW); @@ -379,7 +379,7 @@ public class TestOperation { Assert.assertEquals(1, c.size()); Assert.assertEquals(2013L, c.get(0).getTimestamp()); Assert.assertArrayEquals(new byte[]{}, CellUtil.cloneValue(c.get(0))); - Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimeStamp()); + Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimestamp()); Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); p = new Put(ByteBuffer.wrap(ROW)); @@ -389,7 +389,7 @@ public class TestOperation { Assert.assertEquals(2001L, c.get(0).getTimestamp()); Assert.assertArrayEquals(new byte[]{}, CellUtil.cloneValue(c.get(0))); Assert.assertArrayEquals(ROW, CellUtil.cloneRow(c.get(0))); - Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimeStamp()); + Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimestamp()); Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); p = new Put(ByteBuffer.wrap(ROW), 1970L); @@ -399,7 +399,7 @@ public class TestOperation { Assert.assertEquals(2001L, c.get(0).getTimestamp()); Assert.assertArrayEquals(new byte[]{}, CellUtil.cloneValue(c.get(0))); Assert.assertArrayEquals(ROW, CellUtil.cloneRow(c.get(0))); - Assert.assertEquals(1970L, p.getTimeStamp()); + Assert.assertEquals(1970L, p.getTimestamp()); Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java index 91b02e6fad..839665f0ef 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java @@ -190,7 +190,7 @@ public class TestProtobufUtil { // put value always use the default timestamp if no // value level timestamp specified, // add the timestamp to the original mutate - long timestamp = put.getTimeStamp(); + long timestamp = put.getTimestamp(); for (ColumnValue.Builder column: mutateBuilder.getColumnValueBuilderList()) { for (QualifierValue.Builder qualifier: @@ -296,7 +296,7 @@ public class TestProtobufUtil { mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT); Increment increment = ProtobufUtil.toIncrement(proto, null); - mutateBuilder.setTimestamp(increment.getTimeStamp()); + mutateBuilder.setTimestamp(increment.getTimestamp()); assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.INCREMENT, increment)); } @@ -336,7 +336,7 @@ public class TestProtobufUtil { // append always use the latest timestamp, // reset the timestamp to the original mutate - mutateBuilder.setTimestamp(append.getTimeStamp()); + mutateBuilder.setTimestamp(append.getTimestamp()); assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.APPEND, append)); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java index 7401f0b661..b0c2968e43 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java @@ -226,7 +226,7 @@ public class MultiThreadedClientExample extends Configured implements Tool { .setRow(rk) .setFamily(FAMILY) .setQualifier(QUAL) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Cell.Type.Put) .setValue(value) .build()); @@ -263,7 +263,7 @@ public class MultiThreadedClientExample extends Configured implements Tool { .setRow(rk) .setFamily(FAMILY) .setQualifier(QUAL) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Type.Put) .setValue(value) .build()); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index 480c6118b0..3eb7d699bd 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -164,7 +164,7 @@ implements Configurable { } if (conf.get(SCAN_TIMESTAMP) != null) { - scan.setTimeStamp(Long.parseLong(conf.get(SCAN_TIMESTAMP))); + scan.setTimestamp(Long.parseLong(conf.get(SCAN_TIMESTAMP))); } if (conf.get(SCAN_TIMERANGE_START) != null && conf.get(SCAN_TIMERANGE_END) != null) { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index bb48243ad7..8991d3795d 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -201,7 +201,7 @@ public class RemoteHTable implements Table { protected CellSetModel buildModelFromPut(Put put) { RowModel row = new RowModel(put.getRow()); - long ts = put.getTimeStamp(); + long ts = put.getTimestamp(); for (List cells: put.getFamilyCellMap().values()) { for (Cell cell: cells) { row.addCell(new CellModel(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), @@ -468,7 +468,7 @@ public class RemoteHTable implements Table { @Override public void delete(Delete delete) throws IOException { String spec = buildRowSpec(delete.getRow(), delete.getFamilyCellMap(), - delete.getTimeStamp(), delete.getTimeStamp(), 1); + delete.getTimestamp(), delete.getTimestamp(), 1); for (int i = 0; i < maxRetries; i++) { Response response = client.delete(spec); int code = response.getCode(); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java index 1fa17ca5ba..a6ce61b5d2 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java @@ -219,7 +219,7 @@ public class TestRemoteTable { get = new Get(ROW_2); get.addFamily(COLUMN_1); get.addFamily(COLUMN_2); - get.setTimeStamp(TS_1); + get.setTimestamp(TS_1); result = remoteTable.get(get); value1 = result.getValue(COLUMN_1, QUALIFIER_1); value2 = result.getValue(COLUMN_2, QUALIFIER_2); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java index c018383d75..469499f7d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java @@ -157,7 +157,7 @@ public class TableNamespaceManager { .setRow(row) .setFamily(TableDescriptorBuilder.NAMESPACE_FAMILY_INFO_BYTES) .setQualifier(TableDescriptorBuilder.NAMESPACE_COL_DESC_BYTES) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Cell.Type.Put) .setValue(ProtobufUtil.toProtoNamespaceDescriptor(ns).toByteArray()) .build()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java index d201e0f05b..f72f2ad601 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java @@ -184,7 +184,7 @@ public class RegionStateStore { .setRow(put.getRow()) .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(getServerNameColumn(replicaId)) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Cell.Type.Put) .setValue(Bytes.toBytes(regionLocation.getServerName())) .build()); @@ -194,7 +194,7 @@ public class RegionStateStore { .setRow(put.getRow()) .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(getStateColumn(replicaId)) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Cell.Type.Put) .setValue(Bytes.toBytes(state.name())) .build()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index b9a5a2ba36..122e78269d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -2797,7 +2797,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi if(delete.getFamilyCellMap().isEmpty()){ for(byte [] family : this.htableDescriptor.getColumnFamilyNames()){ // Don't eat the timestamp - delete.addFamily(family, delete.getTimeStamp()); + delete.addFamily(family, delete.getTimestamp()); } } else { for(byte [] family : delete.getFamilyCellMap().keySet()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 1a192ea2d5..9b2ee4e56d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -172,7 +172,7 @@ public class AccessControlLists { .setRow(p.getRow()) .setFamily(ACL_LIST_FAMILY) .setQualifier(key) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Type.Put) .setValue(value) .build()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 0bb61c921f..d530f3b39d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -1761,7 +1761,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, } AuthResult authResult = null; if (checkCoveringPermission(user, opType, c.getEnvironment(), m.getRow(), - m.getFamilyCellMap(), m.getTimeStamp(), Action.WRITE)) { + m.getFamilyCellMap(), m.getTimestamp(), Action.WRITE)) { authResult = AuthResult.allow(opType.toString(), "Covering cell set", user, Action.WRITE, table, m.getFamilyCellMap()); } else { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java index 104c33f9e9..519502e5ae 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java @@ -217,7 +217,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService .setRow(row) .setFamily(LABELS_TABLE_FAMILY) .setQualifier(LABEL_QUALIFIER) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Type.Put) .setValue(Bytes.toBytes(SYSTEM_LABEL)) .build()); @@ -245,7 +245,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService .setRow(row) .setFamily(LABELS_TABLE_FAMILY) .setQualifier(LABEL_QUALIFIER) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Type.Put) .setValue(label) .setTags(TagUtil.fromList(Arrays.asList(LABELS_TABLE_TAGS))) @@ -285,7 +285,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService .setRow(row) .setFamily(LABELS_TABLE_FAMILY) .setQualifier(user) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Cell.Type.Put) .setValue(DUMMY_VALUE) .setTags(TagUtil.fromList(Arrays.asList(LABELS_TABLE_TAGS))) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 92581b8fb6..ec294e1c43 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -365,7 +365,7 @@ public abstract class HBaseTestCase extends TestCase { final byte [] value) throws IOException { Get get = new Get(row); - get.setTimeStamp(timestamp); + get.setTimestamp(timestamp); Result res = region.get(get); NavigableMap>> map = res.getMap(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java index 1a0215e5a9..44c43bd51b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java @@ -232,7 +232,7 @@ public class TestMultiVersions { for (int j = 0; j < timestamp.length; j++) { Get get = new Get(rows[i]); get.addFamily(HConstants.CATALOG_FAMILY); - get.setTimeStamp(timestamp[j]); + get.setTimestamp(timestamp[j]); Result result = table.get(get); int cellCount = 0; for(@SuppressWarnings("unused")Cell kv : result.listCells()) { @@ -280,7 +280,7 @@ public class TestMultiVersions { count = 0; scan = new Scan(); - scan.setTimeStamp(1000L); + scan.setTimestamp(1000L); scan.addFamily(HConstants.CATALOG_FAMILY); s = table.getScanner(scan); @@ -316,7 +316,7 @@ public class TestMultiVersions { count = 0; scan = new Scan(); - scan.setTimeStamp(100L); + scan.setTimestamp(100L); scan.addFamily(HConstants.CATALOG_FAMILY); s = table.getScanner(scan); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index e48667e971..d2b84cd709 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -2744,7 +2744,7 @@ public class TestFromClientSide { throws Exception { Get get = new Get(row); get.addColumn(family, qualifier); - get.setTimeStamp(stamp); + get.setTimestamp(stamp); get.setMaxVersions(Integer.MAX_VALUE); Result result = ht.get(get); assertSingleResult(result, row, family, qualifier, stamp, value); @@ -2755,7 +2755,7 @@ public class TestFromClientSide { throws Exception { Get get = new Get(row); get.addColumn(family, qualifier); - get.setTimeStamp(stamp); + get.setTimestamp(stamp); get.setMaxVersions(Integer.MAX_VALUE); Result result = ht.get(get); assertEmptyResult(result); @@ -2766,7 +2766,7 @@ public class TestFromClientSide { throws Exception { Scan scan = new Scan(row); scan.addColumn(family, qualifier); - scan.setTimeStamp(stamp); + scan.setTimestamp(stamp); scan.setMaxVersions(Integer.MAX_VALUE); Result result = getSingleScanResult(ht, scan); assertSingleResult(result, row, family, qualifier, stamp, value); @@ -2777,7 +2777,7 @@ public class TestFromClientSide { throws Exception { Scan scan = new Scan(row); scan.addColumn(family, qualifier); - scan.setTimeStamp(stamp); + scan.setTimestamp(stamp); scan.setMaxVersions(Integer.MAX_VALUE); Result result = getSingleScanResult(ht, scan); assertNullResult(result); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java index 43f2c499df..1950425631 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java @@ -156,7 +156,7 @@ public class TestMultiRespectsLimits { .setRow(row) .setFamily(FAMILY) .setQualifier(col) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Cell.Type.Put) .setValue(value) .build()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java index 52ae0ab321..7c80075e25 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java @@ -141,7 +141,7 @@ public class TestProtobufUtil { // append always use the latest timestamp, // reset the timestamp to the original mutate - mutateBuilder.setTimestamp(append.getTimeStamp()); + mutateBuilder.setTimestamp(append.getTimestamp()); assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.APPEND, append)); } @@ -224,7 +224,7 @@ public class TestProtobufUtil { mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT); Increment increment = ProtobufUtil.toIncrement(proto, null); - mutateBuilder.setTimestamp(increment.getTimeStamp()); + mutateBuilder.setTimestamp(increment.getTimestamp()); assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.INCREMENT, increment)); } @@ -264,7 +264,7 @@ public class TestProtobufUtil { // put value always use the default timestamp if no // value level timestamp specified, // add the timestamp to the original mutate - long timestamp = put.getTimeStamp(); + long timestamp = put.getTimestamp(); for (ColumnValue.Builder column: mutateBuilder.getColumnValueBuilderList()) { for (QualifierValue.Builder qualifier: diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index 8be934fa93..58e8075ed4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -110,7 +110,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer .setRow(p.getRow()) .setFamily(LABELS_TABLE_FAMILY) .setQualifier(auth) - .setTimestamp(p.getTimeStamp()) + .setTimestamp(p.getTimestamp()) .setType(Cell.Type.Put) .setValue(DUMMY_VALUE) .build()); diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala index 1ca1b454f1..8b3dafc4f9 100644 --- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala +++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/datasources/HBaseTableScanRDD.scala @@ -243,8 +243,8 @@ class HBaseTableScanRDD(relation: HBaseRelation, private def handleTimeSemantics(query: Query): Unit = { // Set timestamp related values if present (query, relation.timestamp, relation.minTimestamp, relation.maxTimestamp) match { - case (q: Scan, Some(ts), None, None) => q.setTimeStamp(ts) - case (q: Get, Some(ts), None, None) => q.setTimeStamp(ts) + case (q: Scan, Some(ts), None, None) => q.setTimestamp(ts) + case (q: Get, Some(ts), None, None) => q.setTimestamp(ts) case (q:Scan, None, Some(minStamp), Some(maxStamp)) => q.setTimeRange(minStamp, maxStamp) case (q:Get, None, Some(minStamp), Some(maxStamp)) => q.setTimeRange(minStamp, maxStamp) diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index daad446273..754302b778 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -1352,7 +1352,7 @@ public class ThriftServerRunner implements Runnable { .setRow(put.getRow()) .setFamily(famAndQf[0]) .setQualifier(famAndQf[1]) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(m.value != null ? getBytes(m.value) : HConstants.EMPTY_BYTE_ARRAY) @@ -1420,7 +1420,7 @@ public class ThriftServerRunner implements Runnable { .setRow(put.getRow()) .setFamily(famAndQf[0]) .setQualifier(famAndQf[1]) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(m.value != null ? getBytes(m.value) : HConstants.EMPTY_BYTE_ARRAY) @@ -1903,7 +1903,7 @@ public class ThriftServerRunner implements Runnable { .setRow(put.getRow()) .setFamily(famAndQf[0]) .setQualifier(famAndQf[1]) - .setTimestamp(put.getTimeStamp()) + .setTimestamp(put.getTimestamp()) .setType(Type.Put) .setValue(mput.value != null ? getBytes(mput.value) : HConstants.EMPTY_BYTE_ARRAY) diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 8ab5a01358..24aff87656 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -96,7 +96,7 @@ public class ThriftUtilities { // Timestamp overwrites time range if both are set if (in.isSetTimestamp()) { - out.setTimeStamp(in.getTimestamp()); + out.setTimestamp(in.getTimestamp()); } else if (in.isSetTimeRange()) { out.setTimeRange(in.getTimeRange().getMinStamp(), in.getTimeRange().getMaxStamp()); } @@ -234,7 +234,7 @@ public class ThriftUtilities { .setRow(out.getRow()) .setFamily(columnValue.getFamily()) .setQualifier(columnValue.getQualifier()) - .setTimestamp(out.getTimeStamp()) + .setTimestamp(out.getTimestamp()) .setType(Cell.Type.Put) .setValue(columnValue.getValue()) .build()); @@ -362,7 +362,7 @@ public class ThriftUtilities { TDelete out = new TDelete(ByteBuffer.wrap(in.getRow())); List columns = new ArrayList<>(in.getFamilyCellMap().entrySet().size()); - long rowTimestamp = in.getTimeStamp(); + long rowTimestamp = in.getTimestamp(); if (rowTimestamp != HConstants.LATEST_TIMESTAMP) { out.setTimestamp(rowTimestamp); } -- 2.14.3 (Apple Git-98)