From 5f53fc55d945a55c98ad644e222848ec54a1e38d Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Fri, 13 Jul 2018 09:40:51 -0500 Subject: [PATCH] HBASE-20884 Replace Base64 with j.u.Base64 Signed-off-by: Andrew Purtell Signed-off-by: tedyu --- .../org/apache/hadoop/hbase/client/TestGet.java | 8 +-- .../java/org/apache/hadoop/hbase/util/Base64.java | 8 +-- .../apache/hadoop/hbase/thrift/HttpDoAsClient.java | 5 +- .../hadoop/hbase/rest/ScannerInstanceResource.java | 10 ++-- .../hadoop/hbase/rest/model/ScannerModel.java | 65 +++++++++++----------- .../hadoop/hbase/rest/model/TestModelBase.java | 3 +- .../apache/hadoop/hbase/mapreduce/ImportTsv.java | 4 +- .../mapreduce/SimpleTotalOrderPartitioner.java | 10 ++-- .../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 5 +- .../hadoop/hbase/mapreduce/TextSortReducer.java | 4 +- .../hadoop/hbase/mapreduce/TsvImporterMapper.java | 4 +- .../hbase/mapreduce/TsvImporterTextMapper.java | 10 ++-- .../hadoop/hbase/thrift/ThriftHttpServlet.java | 6 +- 13 files changed, 74 insertions(+), 68 deletions(-) diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java index fdb07d0d11..bfa3b16c79 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java @@ -29,6 +29,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.util.Arrays; +import java.util.Base64; import java.util.List; import java.util.Set; @@ -43,7 +44,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.visibility.Authorizations; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Test; @@ -210,9 +210,9 @@ public class TestGet { assertFalse("Should be deleted: " + jarFile.getPath(), jarFile.exists()); ClientProtos.Get getProto1 = - ClientProtos.Get.parseFrom(Base64.decode(PB_GET)); + ClientProtos.Get.parseFrom(Base64.getDecoder().decode(PB_GET)); ClientProtos.Get getProto2 = - ClientProtos.Get.parseFrom(Base64.decode(PB_GET_WITH_FILTER_LIST)); + ClientProtos.Get.parseFrom(Base64.getDecoder().decode(PB_GET_WITH_FILTER_LIST)); try { ProtobufUtil.toGet(getProto1); fail("Should not be able to load the filter class"); @@ -229,7 +229,7 @@ public class TestGet { instanceof DeserializationException); } FileOutputStream fos = new FileOutputStream(jarFile); - fos.write(Base64.decode(MOCK_FILTER_JAR)); + fos.write(Base64.getDecoder().decode(MOCK_FILTER_JAR)); fos.close(); Get get1 = ProtobufUtil.toGet(getProto1); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index 9fd84e682b..03193b611b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -118,7 +118,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; *
* version: 2.2.1 */ -@InterfaceAudience.Public +@InterfaceAudience.Private @InterfaceStability.Stable public class Base64 { @@ -1290,7 +1290,7 @@ public class Base64 { * @see Base64 * @since 1.3 */ - @InterfaceAudience.Public + @InterfaceAudience.Private @InterfaceStability.Stable public static class Base64InputStream extends FilterInputStream { private boolean encode; // Encoding or decoding @@ -1492,7 +1492,7 @@ public class Base64 { * @see Base64 * @since 1.3 */ - @InterfaceAudience.Public + @InterfaceAudience.Private @InterfaceStability.Stable public static class Base64OutputStream extends FilterOutputStream { private boolean encode; @@ -1538,7 +1538,7 @@ public class Base64 { * @see Base64#DONT_BREAK_LINES * @since 1.3 */ - @InterfaceAudience.Public + @InterfaceAudience.Private @InterfaceStability.Stable public Base64OutputStream(OutputStream out, int options) { super(out); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java index df18fed853..4000f862f3 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java @@ -26,6 +26,7 @@ import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; +import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -42,7 +43,7 @@ import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; import org.apache.hadoop.hbase.thrift.generated.Hbase; import org.apache.hadoop.hbase.thrift.generated.TCell; import org.apache.hadoop.hbase.thrift.generated.TRowResult; -import org.apache.hadoop.hbase.util.Base64; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.THttpClient; @@ -219,7 +220,7 @@ public class HttpDoAsClient { final byte[] outToken = context.initSecContext(new byte[0], 0, 0); StringBuffer outputBuffer = new StringBuffer(); outputBuffer.append("Negotiate "); - outputBuffer.append(Base64.encodeBytes(outToken).replace("\n", "")); + outputBuffer.append(Bytes.toString(Base64.getEncoder().encode(outToken))); System.out.print("Ticket is: " + outputBuffer); return outputBuffer.toString(); } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java index 49a2ef1f2f..cd98a8ada2 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.rest; import java.io.IOException; +import java.util.Base64; import javax.ws.rs.DELETE; import javax.ws.rs.GET; @@ -41,7 +42,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private @@ -172,10 +172,10 @@ public class ScannerInstanceResource extends ResourceBase { } ResponseBuilder response = Response.ok(CellUtil.cloneValue(value)); response.cacheControl(cacheControl); - response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value))); - response.header("X-Column", - Base64.encodeBytes( - KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value)))); + response.header("X-Row", Bytes.toString(Base64.getEncoder().encode( + CellUtil.cloneRow(value)))); + response.header("X-Column", Bytes.toString(Base64.getEncoder().encode( + KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))))); response.header("X-Timestamp", value.getTimestamp()); servlet.getMetrics().incrementSucessfulGetRequests(1); return response.build(); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index 7512d3e746..e6807bde2f 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -24,6 +24,7 @@ import java.io.Serializable; import java.io.StringReader; import java.io.StringWriter; import java.util.ArrayList; +import java.util.Base64; import java.util.List; import java.util.Map; import java.util.NavigableSet; @@ -73,7 +74,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner; import org.apache.hadoop.hbase.security.visibility.Authorizations; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; @@ -148,10 +148,10 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { switch (type) { case BinaryComparator: case BinaryPrefixComparator: - this.value = Base64.encodeBytes(comparator.getValue()); + this.value = Bytes.toString(Base64.getEncoder().encode(comparator.getValue())); break; case BitComparator: - this.value = Base64.encodeBytes(comparator.getValue()); + this.value = Bytes.toString(Base64.getEncoder().encode(comparator.getValue())); this.op = ((BitComparator)comparator).getOperator().toString(); break; case NullComparator: @@ -169,13 +169,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { ByteArrayComparable comparator; switch (ComparatorType.valueOf(type)) { case BinaryComparator: - comparator = new BinaryComparator(Base64.decode(value)); + comparator = new BinaryComparator(Base64.getDecoder().decode(value)); break; case BinaryPrefixComparator: - comparator = new BinaryPrefixComparator(Base64.decode(value)); + comparator = new BinaryPrefixComparator(Base64.getDecoder().decode(value)); break; case BitComparator: - comparator = new BitComparator(Base64.decode(value), + comparator = new BitComparator(Base64.getDecoder().decode(value), BitComparator.BitwiseOp.valueOf(op)); break; case NullComparator: @@ -259,20 +259,22 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { this.offset = ((ColumnPaginationFilter)filter).getOffset(); break; case ColumnPrefixFilter: - this.value = Base64.encodeBytes(((ColumnPrefixFilter)filter).getPrefix()); + byte[] src = ((ColumnPrefixFilter)filter).getPrefix(); + this.value = Bytes.toString(Base64.getEncoder().encode(src)); break; case ColumnRangeFilter: - this.minColumn = Base64.encodeBytes(((ColumnRangeFilter)filter).getMinColumn()); - this.minColumnInclusive = ((ColumnRangeFilter)filter).getMinColumnInclusive(); - this.maxColumn = Base64.encodeBytes(((ColumnRangeFilter)filter).getMaxColumn()); - this.maxColumnInclusive = ((ColumnRangeFilter)filter).getMaxColumnInclusive(); + ColumnRangeFilter crf = (ColumnRangeFilter)filter; + this.minColumn = Bytes.toString(Base64.getEncoder().encode(crf.getMinColumn())); + this.minColumnInclusive = crf.getMinColumnInclusive(); + this.maxColumn = Bytes.toString(Base64.getEncoder().encode(crf.getMaxColumn())); + this.maxColumnInclusive = crf.getMaxColumnInclusive(); break; case DependentColumnFilter: { DependentColumnFilter dcf = (DependentColumnFilter)filter; - this.family = Base64.encodeBytes(dcf.getFamily()); + this.family = Bytes.toString(Base64.getEncoder().encode(dcf.getFamily())); byte[] qualifier = dcf.getQualifier(); if (qualifier != null) { - this.qualifier = Base64.encodeBytes(qualifier); + this.qualifier = Bytes.toString(Base64.getEncoder().encode(qualifier)); } this.op = dcf.getOperator().toString(); this.comparator = new ByteArrayComparableModel(dcf.getComparator()); @@ -289,13 +291,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { case KeyOnlyFilter: break; case InclusiveStopFilter: - this.value = - Base64.encodeBytes(((InclusiveStopFilter)filter).getStopRowKey()); + this.value = Bytes.toString(Base64.getEncoder().encode( + ((InclusiveStopFilter)filter).getStopRowKey())); break; case MultipleColumnPrefixFilter: this.prefixes = new ArrayList(); for (byte[] prefix: ((MultipleColumnPrefixFilter)filter).getPrefix()) { - this.prefixes.add(Base64.encodeBytes(prefix)); + this.prefixes.add(Bytes.toString(Base64.getEncoder().encode(prefix))); } break; case MultiRowRangeFilter: @@ -309,7 +311,8 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { this.value = Long.toString(((PageFilter)filter).getPageSize()); break; case PrefixFilter: - this.value = Base64.encodeBytes(((PrefixFilter)filter).getPrefix()); + this.value = Bytes.toString(Base64.getEncoder().encode( + ((PrefixFilter)filter).getPrefix())); break; case FamilyFilter: case QualifierFilter: @@ -326,10 +329,10 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { case SingleColumnValueExcludeFilter: case SingleColumnValueFilter: { SingleColumnValueFilter scvf = (SingleColumnValueFilter) filter; - this.family = Base64.encodeBytes(scvf.getFamily()); + this.family = Bytes.toString(Base64.getEncoder().encode(scvf.getFamily())); byte[] qualifier = scvf.getQualifier(); if (qualifier != null) { - this.qualifier = Base64.encodeBytes(qualifier); + this.qualifier = Bytes.toString(Base64.getEncoder().encode(qualifier)); } this.op = scvf.getOperator().toString(); this.comparator = @@ -368,16 +371,16 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { filter = new ColumnPaginationFilter(limit, offset); break; case ColumnPrefixFilter: - filter = new ColumnPrefixFilter(Base64.decode(value)); + filter = new ColumnPrefixFilter(Base64.getDecoder().decode(value)); break; case ColumnRangeFilter: - filter = new ColumnRangeFilter(Base64.decode(minColumn), - minColumnInclusive, Base64.decode(maxColumn), + filter = new ColumnRangeFilter(Base64.getDecoder().decode(minColumn), + minColumnInclusive, Base64.getDecoder().decode(maxColumn), maxColumnInclusive); break; case DependentColumnFilter: - filter = new DependentColumnFilter(Base64.decode(family), - qualifier != null ? Base64.decode(qualifier) : null, + filter = new DependentColumnFilter(Base64.getDecoder().decode(family), + qualifier != null ? Base64.getDecoder().decode(qualifier) : null, dropDependentColumn, CompareOp.valueOf(op), comparator.build()); break; case FamilyFilter: @@ -394,7 +397,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { filter = new FirstKeyOnlyFilter(); break; case InclusiveStopFilter: - filter = new InclusiveStopFilter(Base64.decode(value)); + filter = new InclusiveStopFilter(Base64.getDecoder().decode(value)); break; case KeyOnlyFilter: filter = new KeyOnlyFilter(); @@ -402,7 +405,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { case MultipleColumnPrefixFilter: { byte[][] values = new byte[prefixes.size()][]; for (int i = 0; i < prefixes.size(); i++) { - values[i] = Base64.decode(prefixes.get(i)); + values[i] = Base64.getDecoder().decode(prefixes.get(i)); } filter = new MultipleColumnPrefixFilter(values); } break; @@ -417,7 +420,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { filter = new PageFilter(Long.parseLong(value)); break; case PrefixFilter: - filter = new PrefixFilter(Base64.decode(value)); + filter = new PrefixFilter(Base64.getDecoder().decode(value)); break; case QualifierFilter: filter = new QualifierFilter(CompareOp.valueOf(op), comparator.build()); @@ -429,8 +432,8 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { filter = new RowFilter(CompareOp.valueOf(op), comparator.build()); break; case SingleColumnValueFilter: - filter = new SingleColumnValueFilter(Base64.decode(family), - qualifier != null ? Base64.decode(qualifier) : null, + filter = new SingleColumnValueFilter(Base64.getDecoder().decode(family), + qualifier != null ? Base64.getDecoder().decode(qualifier) : null, CompareOp.valueOf(op), comparator.build()); if (ifMissing != null) { ((SingleColumnValueFilter)filter).setFilterIfMissing(ifMissing); @@ -440,8 +443,8 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { } break; case SingleColumnValueExcludeFilter: - filter = new SingleColumnValueExcludeFilter(Base64.decode(family), - qualifier != null ? Base64.decode(qualifier) : null, + filter = new SingleColumnValueExcludeFilter(Base64.getDecoder().decode(family), + qualifier != null ? Base64.getDecoder().decode(qualifier) : null, CompareOp.valueOf(op), comparator.build()); if (ifMissing != null) { ((SingleColumnValueExcludeFilter)filter).setFilterIfMissing(ifMissing); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java index ad3a667a22..c558dbc629 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java @@ -34,6 +34,7 @@ import javax.xml.bind.JAXBException; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; +import java.util.Base64; @Category(SmallTests.class) public abstract class TestModelBase extends TestCase { @@ -94,7 +95,7 @@ public abstract class TestModelBase extends TestCase { Exception { return (T)clazz.getMethod("getObjectFromMessage", byte[].class).invoke( clazz.getDeclaredConstructor().newInstance(), - Base64.decode(AS_PB)); + Base64.getDecoder().decode(AS_PB)); } protected abstract void checkModel(T model); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java index 323e1e7b87..ac821f1357 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java @@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.Text; @@ -63,6 +62,7 @@ import org.apache.hadoop.util.ToolRunner; import java.io.File; import java.io.IOException; import java.util.ArrayList; +import java.util.Base64; import java.util.HashSet; import java.util.Set; @@ -472,7 +472,7 @@ public class ImportTsv extends Configured implements Tool { String actualSeparator = conf.get(SEPARATOR_CONF_KEY); if (actualSeparator != null) { conf.set(SEPARATOR_CONF_KEY, - Base64.encodeBytes(actualSeparator.getBytes())); + Bytes.toString(Base64.getEncoder().encode(actualSeparator.getBytes()))); } // See if a non-default Mapper was set diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java index 5370477548..ba1ba1d0f6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java @@ -18,14 +18,16 @@ */ package org.apache.hadoop.hbase.mapreduce; +import java.util.Base64; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; + import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Partitioner; @@ -65,11 +67,11 @@ implements Configurable { private int lastReduces = -1; public static void setStartKey(Configuration conf, byte[] startKey) { - conf.set(START_BASE64, Base64.encodeBytes(startKey)); + conf.set(START_BASE64, Bytes.toString(Base64.getEncoder().encode(startKey))); } public static void setEndKey(Configuration conf, byte[] endKey) { - conf.set(END_BASE64, Base64.encodeBytes(endKey)); + conf.set(END_BASE64, Bytes.toString(Base64.getEncoder().encode(endKey))); } @SuppressWarnings("deprecation") @@ -86,7 +88,7 @@ implements Configurable { String base64Key, String deprecatedKey) { String encoded = conf.get(base64Key); if (encoded != null) { - return Base64.decode(encoded); + return Base64.getDecoder().decode(encoded); } String oldStyleVal = conf.get(deprecatedKey); if (oldStyleVal == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index b4210361f5..c803668b2a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.token.TokenUtil; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.hadoop.io.Writable; @@ -594,7 +593,7 @@ public class TableMapReduceUtil { */ public static String convertScanToString(Scan scan) throws IOException { ClientProtos.Scan proto = ProtobufUtil.toScan(scan); - return Base64.encodeBytes(proto.toByteArray()); + return Bytes.toString(Base64.getEncoder().encode(proto.toByteArray())); } /** @@ -605,7 +604,7 @@ public class TableMapReduceUtil { * @throws IOException When reading the scan instance fails. */ public static Scan convertStringToScan(String base64) throws IOException { - byte [] decoded = Base64.decode(base64); + byte [] decoded = Base64.getDecoder().decode(base64); ClientProtos.Scan scan; try { scan = ClientProtos.Scan.parseFrom(decoded); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java index 56b58c4cbc..57c6e520aa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.ArrayList; +import java.util.Base64; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -34,7 +35,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.security.visibility.InvalidLabelException; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Counter; @@ -121,7 +121,7 @@ public class TextSortReducer extends if (separator == null) { separator = ImportTsv.DEFAULT_SEPARATOR; } else { - separator = new String(Base64.decode(separator)); + separator = Bytes.toString(Base64.getDecoder().decode(separator)); } // Should never get 0 as we are setting this to a valid value in job configuration. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java index e61894336e..8eb12144ae 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.ArrayList; +import java.util.Base64; import java.util.List; import org.apache.hadoop.conf.Configuration; @@ -33,7 +34,6 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.BadTsvLineException; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.security.visibility.InvalidLabelException; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; @@ -124,7 +124,7 @@ extends Mapper if (separator == null) { separator = ImportTsv.DEFAULT_SEPARATOR; } else { - separator = new String(Base64.decode(separator)); + separator = new String(Base64.getDecoder().decode(separator)); } // Should never get 0 as we are setting this to a valid value in job // configuration. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java index 7744ea7457..b0258dfe96 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java @@ -17,10 +17,12 @@ */ package org.apache.hadoop.hbase.mapreduce; +import java.io.IOException; +import java.util.Base64; + import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Counter; @@ -28,8 +30,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import java.io.IOException; - /** * Write table content out to map output files. */ @@ -63,7 +63,7 @@ extends Mapper /** * Handles initializing this class with objects specific to it (i.e., the parser). - * Common initialization that might be leveraged by a subsclass is done in + * Common initialization that might be leveraged by a subclass is done in * doSetup. Hence a subclass may choose to override this method * and call doSetup as well before handling it's own custom params. * @@ -94,7 +94,7 @@ extends Mapper if (separator == null) { separator = ImportTsv.DEFAULT_SEPARATOR; } else { - separator = new String(Base64.decode(separator)); + separator = new String(Base64.getDecoder().decode(separator)); } skipBadLines = context.getConfiguration().getBoolean(ImportTsv.SKIP_LINES_CONF_KEY, true); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java index 3dfa50aac0..dcfeec2097 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.thrift; import java.io.IOException; import java.security.PrivilegedExceptionAction; +import java.util.Base64; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -30,7 +31,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.security.SecurityUtil; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.ProxyUsers; @@ -186,10 +186,10 @@ public class ThriftHttpServlet extends TServlet { gssContext = manager.createContext(serverCreds); // Get service ticket from the authorization header String serviceTicketBase64 = getAuthHeader(request); - byte[] inToken = Base64.decode(serviceTicketBase64); + byte[] inToken = Base64.getDecoder().decode(serviceTicketBase64); byte[] res = gssContext.acceptSecContext(inToken, 0, inToken.length); if(res != null) { - outToken = Base64.encodeBytes(res).replace("\n", ""); + outToken = Base64.getEncoder().encodeToString(res).replace("\n", ""); } // Authenticate or deny based on its context completion if (!gssContext.isEstablished()) { -- 2.16.1