diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index 77aeb20c07..24e0d150a6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -80,6 +80,8 @@ import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_LONG_ARRAY;
+
/**
*
* Read/write operations on region and assignment information store in hbase:meta.
@@ -2110,7 +2112,7 @@ public class MetaTableAccessor {
for (Result result;;) {
result = scanner.next();
if (result == null) {
- return new ReplicationBarrierResult(new long[0], null, Collections.emptyList());
+ return new ReplicationBarrierResult(EMPTY_LONG_ARRAY, null, Collections.emptyList());
}
byte[] regionName = result.getRow();
// TODO: we may look up a region which has already been split or merged so we need to check
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index fb69a2530b..c605a9bb3d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -76,6 +76,7 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_BOOLEAN_ARRAY;
import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
/**
@@ -854,7 +855,7 @@ public class HTable implements Table {
@Override
public boolean[] exists(List gets) throws IOException {
- if (gets.isEmpty()) return new boolean[]{};
+ if (gets.isEmpty()) return EMPTY_BOOLEAN_ARRAY;
if (gets.size() == 1) return new boolean[]{exists(gets.get(0))};
ArrayList exists = new ArrayList<>(gets.size());
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
index 7401e4cc38..960f6f909b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
@@ -26,6 +26,8 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.yetus.audience.InterfaceAudience;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+
/**
* Abstract base class to help you implement new Filters. Common "ignore" or NOOP type
* methods can go here, helping to reduce boiler plate in an ever-expanding filter
@@ -167,7 +169,7 @@ public abstract class FilterBase extends Filter {
*/
@Override
public byte[] toByteArray() throws IOException {
- return new byte[0];
+ return EMPTY_BYTE_ARRAY;
}
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
index de3edb9d7c..3e2201e6ed 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
@@ -27,6 +27,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+
/**
* A binary comparator which lexicographically compares against the specified
* byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}.
@@ -36,7 +38,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce
public class NullComparator extends ByteArrayComparable {
public NullComparator() {
- super(new byte[0]);
+ super(EMPTY_BYTE_ARRAY);
}
@Override
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
index 108919740c..de7882bf77 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java
@@ -30,6 +30,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Keeps lists of superusers and super groups loaded from HBase configuration,
* checks if certain user is regarded as superuser.
@@ -68,7 +70,7 @@ public final class Superusers {
LOG.trace("Current user name is {}", currentUser);
superUsers.add(currentUser);
- String[] superUserList = conf.getStrings(SUPERUSER_CONF_KEY, new String[0]);
+ String[] superUserList = conf.getStrings(SUPERUSER_CONF_KEY, EMPTY_STRING_ARRAY);
for (String name : superUserList) {
if (AuthUtil.isGroupPrincipal(name)) {
superGroups.add(AuthUtil.getGroupName(name));
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
index 733a658ad4..cbdbf75aac 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
@@ -42,6 +42,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.cache.LoadingCache;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Wrapper to abstract out usage of user and group information in HBase.
*
@@ -330,7 +332,7 @@ public abstract class User {
try {
return this.cache.get(getShortName());
} catch (ExecutionException e) {
- return new String[0];
+ return EMPTY_STRING_ARRAY;
}
}
return ugi.getGroupNames();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
index 17796ee56d..5aaad7b0b8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
@@ -40,6 +40,8 @@ import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningEx
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.MoreExecutors;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Provide an instance of a user. Allows custom {@link User} creation.
*/
@@ -94,7 +96,7 @@ public class UserProvider extends BaseConfigurable {
Set result = new LinkedHashSet<>(groups.getGroups(ugi));
return result.toArray(new String[result.size()]);
} catch (Exception e) {
- return new String[0];
+ return EMPTY_STRING_ARRAY;
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index dcc1266df9..2716d714c1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -2442,7 +2442,7 @@ public class Bytes implements Comparable {
public static boolean isSorted(Collection arrays) {
if (!CollectionUtils.isEmpty(arrays)) {
- byte[] previous = new byte[0];
+ byte[] previous = EMPTY_BYTE_ARRAY;
for (byte[] array : arrays) {
if (Bytes.compareTo(previous, array) > 0) {
return false;
@@ -2595,7 +2595,7 @@ public class Bytes implements Comparable {
*/
public static byte[] multiple(byte[] srcBytes, int multiNum) {
if (multiNum <= 0) {
- return new byte[0];
+ return EMPTY_BYTE_ARRAY;
}
byte[] result = new byte[srcBytes.length * multiNum];
for (int i = 0; i < multiNum; i++) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
index ff2fd45383..d8ffa955e2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.util;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
import static org.apache.hadoop.hbase.util.Order.ASCENDING;
import static org.apache.hadoop.hbase.util.Order.DESCENDING;
@@ -1094,7 +1095,7 @@ public class OrderedBytes {
if (src.peek() == ord.apply(TERM)) {
// skip empty input buffer.
src.get();
- return new byte[0];
+ return EMPTY_BYTE_ARRAY;
}
final int offset = src.getOffset(), start = src.getPosition();
int end;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
index 61ecb4f832..f836ad07d3 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java
@@ -35,6 +35,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.primitives.Bytes;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+
/**
* Generate list of key values which are very useful to test data block encoding
* and compression.
@@ -45,7 +47,7 @@ import org.apache.hbase.thirdparty.com.google.common.primitives.Bytes;
@InterfaceAudience.Private
public class RedundantKVGenerator {
// row settings
- static byte[] DEFAULT_COMMON_PREFIX = new byte[0];
+ static byte[] DEFAULT_COMMON_PREFIX = EMPTY_BYTE_ARRAY;
static int DEFAULT_NUMBER_OF_ROW_PREFIXES = 10;
static int DEFAULT_AVERAGE_PREFIX_LENGTH = 6;
static int DEFAULT_PREFIX_LENGTH_VARIANCE = 3;
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
index edd8c7fbc8..9b082c9716 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
@@ -58,6 +58,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSn
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Hadoop MR API-agnostic implementation for mapreduce over table snapshots.
*/
@@ -120,7 +122,7 @@ public class TableSnapshotInputFormatImpl {
this.htd = htd;
this.regionInfo = regionInfo;
if (locations == null || locations.isEmpty()) {
- this.locations = new String[0];
+ this.locations = EMPTY_STRING_ARRAY;
} else {
this.locations = locations.toArray(new String[locations.size()]);
}
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
index 1815412721..e2214520b6 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java
@@ -50,6 +50,8 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.StringUtils;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Simple {@link InputFormat} for {@link org.apache.hadoop.hbase.wal.WAL} files.
*/
@@ -97,7 +99,7 @@ public class WALInputFormat extends InputFormat {
@Override
public String[] getLocations() throws IOException, InterruptedException {
// TODO: Find the data node with the most blocks for this WAL?
- return new String[] {};
+ return EMPTY_STRING_ARRAY;
}
public String getLogFileName() {
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
index 3203f0c898..addaa611f5 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Input format that creates a configurable number of map tasks
* each provided with a single row of NullWritables. This can be
@@ -76,7 +78,7 @@ public class NMapInputFormat extends InputFormat {
@Override
public String[] getLocations() throws IOException, InterruptedException {
- return new String[] {};
+ return EMPTY_STRING_ARRAY;
}
@Override
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
index 21d25e289e..0239a9f87d 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
@@ -94,6 +94,8 @@ import org.apache.hadoop.util.ToolRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Script used evaluating Stargate performance and scalability. Runs a SG
* client that steps through one of a set of hardcoded tests or 'experiments'
@@ -290,7 +292,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
@Override
public String[] getLocations() throws IOException, InterruptedException {
- return new String[0];
+ return EMPTY_STRING_ARRAY;
}
public int getStartRow() {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
index 4b69244383..6e53872ade 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
@@ -30,6 +30,8 @@ import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+
/**
* Distributed procedure member's Subprocedure. A procedure is sarted on a ProcedureCoordinator
* which communicates with ProcedureMembers who create and start its part of the Procedure. This
@@ -334,7 +336,7 @@ abstract public class Subprocedure implements Callable {
@Override
public byte[] insideBarrier() throws ForeignException {
- return new byte[0];
+ return EMPTY_BYTE_ARRAY;
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
index 57d929da54..326a0140cc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
@@ -32,6 +32,8 @@ import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+
/**
* ZooKeeper based controller for a procedure member.
*
@@ -271,7 +273,7 @@ public class ZKProcedureMemberRpcs implements ProcedureMemberRpcs {
ZNodePaths.joinZNode(zkController.getReachedBarrierNode(procName), memberName);
// ProtobufUtil.prependPBMagic does not take care of null
if (data == null) {
- data = new byte[0];
+ data = EMPTY_BYTE_ARRAY;
}
try {
ZKUtil.createAndFailSilent(zkController.getWatcher(), joinPath,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
index 566a6b688f..459e6aae94 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java
@@ -62,6 +62,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHF
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
+import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
+
/**
* Bulk loads in secure mode.
*
@@ -322,7 +324,7 @@ public class SecureBulkLoadManager {
// this is for testing
if (userProvider.isHadoopSecurityEnabled() &&
"simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) {
- return User.createUserForTesting(conf, user.getShortName(), new String[] {});
+ return User.createUserForTesting(conf, user.getShortName(), EMPTY_STRING_ARRAY);
}
return user;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
index e1d46ef92e..377ef41528 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
@@ -35,6 +35,8 @@ import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+
public class SimpleMasterProcedureManager extends MasterProcedureManager {
public static final String SIMPLE_SIGNATURE = "simple_test";
@@ -88,7 +90,7 @@ public class SimpleMasterProcedureManager extends MasterProcedureManager {
for (ServerName sn : serverNames) {
servers.add(sn.toString());
}
- Procedure proc = coordinator.startProcedure(monitor, desc.getInstance(), new byte[0], servers);
+ Procedure proc = coordinator.startProcedure(monitor, desc.getInstance(), EMPTY_BYTE_ARRAY, servers);
if (proc == null) {
String msg = "Failed to submit distributed procedure for '"
+ getProcedureSignature() + "'";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
index 3655352bf5..3bf4059949 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
@@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
@@ -156,7 +157,7 @@ public class SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table acl = connection.getTable(table)) {
AccessControlService.BlockingInterface protocol =
- AccessControlService.newBlockingStub(acl.coprocessorService(new byte[0]));
+ AccessControlService.newBlockingStub(acl.coprocessorService(EMPTY_BYTE_ARRAY));
try {
protocol.checkPermissions(null, request.build());
} catch (ServiceException se) {
@@ -756,7 +757,7 @@ public class SecureTestUtil {
}
try(Connection conn = ConnectionFactory.createConnection(testUtil.getConfiguration());
Table acl = conn.getTable(AccessControlLists.ACL_TABLE_NAME)) {
- BlockingRpcChannel channel = acl.coprocessorService(new byte[0]);
+ BlockingRpcChannel channel = acl.coprocessorService(EMPTY_BYTE_ARRAY);
AccessControlService.BlockingInterface protocol =
AccessControlService.newBlockingStub(channel);
try {
@@ -786,7 +787,7 @@ public class SecureTestUtil {
try(Connection conn = ConnectionFactory.createConnection(testUtil.getConfiguration());
Table acl = conn.getTable(table)) {
AccessControlService.BlockingInterface protocol =
- AccessControlService.newBlockingStub(acl.coprocessorService(new byte[0]));
+ AccessControlService.newBlockingStub(acl.coprocessorService(EMPTY_BYTE_ARRAY));
try {
protocol.checkPermissions(null, request.build());
} catch (ServiceException se) {
diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index 57c847c0fe..7be09b70b1 100644
--- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -77,6 +77,8 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
+import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+
/**
* Internal HBase utility class for ZooKeeper.
*
@@ -1115,7 +1117,7 @@ public final class ZKUtil {
*/
public static void createAndFailSilent(ZKWatcher zkw,
String znode) throws KeeperException {
- createAndFailSilent(zkw, znode, new byte[0]);
+ createAndFailSilent(zkw, znode, EMPTY_BYTE_ARRAY);
}
/**
@@ -1173,7 +1175,7 @@ public final class ZKUtil {
*/
public static void createWithParents(ZKWatcher zkw, String znode)
throws KeeperException {
- createWithParents(zkw, znode, new byte[0]);
+ createWithParents(zkw, znode, EMPTY_BYTE_ARRAY);
}
/**