From 94d88798339cce9a558ae4edbca4f5892e446c2a Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Fri, 2 Feb 2018 16:58:56 -0600 Subject: [PATCH] HBASE-19920 Split ProtobufUtil into two classes PU has static initializers that will attempt to do local fs operations for the dynamic jar classloaders. Split the class to avoid those operations when they aren't needed. --- .../hbase/client/ConnectionImplementation.java | 6 +- .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 13 ++-- .../hbase/protobuf/ProtobufExceptionUtil.java | 86 ++++++++++++++++++++++ .../apache/hadoop/hbase/protobuf/ProtobufUtil.java | 52 +++---------- .../shaded/protobuf/ProtobufExceptionUtil.java | 71 ++++++++++++++++++ .../hadoop/hbase/shaded/protobuf/ProtobufUtil.java | 48 +++--------- .../apache/hadoop/hbase/util/ClassLoaderBase.java | 5 +- .../hadoop/hbase/util/DynamicClassLoader.java | 11 ++- .../hadoop/hbase/security/token/TokenUtil.java | 16 +++- .../hadoop/hbase/security/token/TestTokenUtil.java | 75 +++++++++++++++++++ 10 files changed, 290 insertions(+), 93 deletions(-) create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenUtil.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index 8807884fdc..6fc449dcc9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -85,7 +85,7 @@ import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufExceptionUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; @@ -1011,7 +1011,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { try { response = this.stub.isMasterRunning(null, RequestConverter.buildIsMasterRunningRequest()); } catch (Exception e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } return response != null? response.getIsMasterRunning(): false; } @@ -1104,7 +1104,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { try { stub.isMasterRunning(null, RequestConverter.buildIsMasterRunningRequest()); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 86859847be..3c70ef47e6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -105,6 +105,7 @@ import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufExceptionUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; @@ -1510,7 +1511,7 @@ public class HBaseAdmin implements Admin { response = admin.clearRegionBlockCache(controller, request); return ProtobufUtil.toCacheEvictionStats(response.getStats()); } catch (ServiceException se) { - throw ProtobufUtil.getRemoteException(se); + throw ProtobufExceptionUtil.getRemoteException(se); } } @@ -2070,7 +2071,7 @@ public class HBaseAdmin implements Admin { try { admin.stopServer(controller, request); } catch (Exception e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } @@ -2110,7 +2111,7 @@ public class HBaseAdmin implements Admin { return admin.getRegionLoad(controller, request).getRegionLoadsList().stream() .map(RegionMetricsBuilder::toRegionMetrics).collect(Collectors.toList()); } catch (ServiceException se) { - throw ProtobufUtil.getRemoteException(se); + throw ProtobufExceptionUtil.getRemoteException(se); } } @@ -2397,7 +2398,7 @@ public class HBaseAdmin implements Admin { try { return admin.rollWALWriter(controller, request); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } @@ -2471,7 +2472,7 @@ public class HBaseAdmin implements Admin { try { response = admin.getRegionInfo(controller, request); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } if (response.getCompactionState() != null) { return ProtobufUtil.createCompactionState(response.getCompactionState()); @@ -3134,7 +3135,7 @@ public class HBaseAdmin implements Admin { execRegionServerService(connection.getRpcControllerFactory().newController(), csr); return CoprocessorRpcUtils.getResponse(result, responsePrototype); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } }; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java new file mode 100644 index 0000000000..a2f97118f7 --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.protobuf; + +import java.io.IOException; + +import org.apache.hadoop.hbase.HBaseIOException; +import org.apache.hadoop.hbase.util.ExceptionUtil; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Private +public final class ProtobufExceptionUtil { + private ProtobufExceptionUtil() { + // utility constructor + } + + /** + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * + * @param se {@link com.google.protobuf.ServiceException} that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + public static IOException getRemoteException(com.google.protobuf.ServiceException se) { + return makeIOExceptionOfException(se); + } + + /** + * Return the Exception thrown by the remote server wrapped in + * ServiceException as cause. RemoteException are left untouched. + * + * @param e {@link org.apache.hbase.thirdparty.com.google.protobuf.ServiceException} + * that wraps IO exception thrown by the server + * @return IOException wrapped in ServiceException. + */ + public static IOException getServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException e) { + Throwable t = e.getCause(); + if (ExceptionUtil.isInterrupt(t)) { + return ExceptionUtil.asInterrupt(t); + } + return t instanceof IOException ? (IOException) t : new HBaseIOException(t); + } + + /** + * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than + * just ServiceException. Prefer this method to + * {@link #getRemoteException(ServiceException)} because trying to + * contain direct protobuf references. + * @param e the exception to inspect + */ + public static IOException handleRemoteException(Exception e) { + return makeIOExceptionOfException(e); + } + + private static IOException makeIOExceptionOfException(Exception e) { + Throwable t = e; + if (e instanceof com.google.protobuf.ServiceException || + e instanceof org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) { + t = e.getCause(); + } + if (ExceptionUtil.isInterrupt(t)) { + return ExceptionUtil.asInterrupt(t); + } + if (t instanceof RemoteException) { + t = ((RemoteException)t).unwrapRemoteException(); + } + return t instanceof IOException? (IOException)t: new HBaseIOException(t); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 29ff2a2a61..c8067c3a5e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -50,7 +50,6 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ServerName; @@ -99,9 +98,7 @@ import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DynamicClassLoader; -import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Methods; -import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; /** @@ -229,56 +226,27 @@ public final class ProtobufUtil { } /** - * Return the IOException thrown by the remote server wrapped in - * ServiceException as cause. - * - * @param se ServiceException that wraps IO exception thrown by the server - * @return Exception wrapped in ServiceException or - * a new IOException that wraps the unexpected ServiceException. + * @deprecated use {@link ProtobufExceptionUtil} instead */ + @Deprecated public static IOException getRemoteException(ServiceException se) { - return makeIOExceptionOfException(se); + return ProtobufExceptionUtil.getRemoteException(se); } /** - * Return the Exception thrown by the remote server wrapped in - * ServiceException as cause. RemoteException are left untouched. - * - * @param e ServiceException that wraps IO exception thrown by the server - * @return Exception wrapped in ServiceException. + * @deprecated use {@link ProtobufExceptionUtil} instead */ + @Deprecated public static IOException getServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException e) { - Throwable t = e.getCause(); - if (ExceptionUtil.isInterrupt(t)) { - return ExceptionUtil.asInterrupt(t); - } - return t instanceof IOException ? (IOException) t : new HBaseIOException(t); + return ProtobufExceptionUtil.getServiceException(e); } /** - * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than - * just {@link ServiceException}. Prefer this method to - * {@link #getRemoteException(ServiceException)} because trying to - * contain direct protobuf references. - * @param e + * @deprecated use {@link ProtobufExceptionUtil} instead */ + @Deprecated public static IOException handleRemoteException(Exception e) { - return makeIOExceptionOfException(e); - } - - private static IOException makeIOExceptionOfException(Exception e) { - Throwable t = e; - if (e instanceof ServiceException || - e instanceof org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) { - t = e.getCause(); - } - if (ExceptionUtil.isInterrupt(t)) { - return ExceptionUtil.asInterrupt(t); - } - if (t instanceof RemoteException) { - t = ((RemoteException)t).unwrapRemoteException(); - } - return t instanceof IOException? (IOException)t: new HBaseIOException(t); + return ProtobufExceptionUtil.handleRemoteException(e); } /** @@ -1581,7 +1549,7 @@ public final class ProtobufUtil { GetServerInfoResponse response = admin.getServerInfo(controller, request); return response.getServerInfo(); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java new file mode 100644 index 0000000000..2bde94234e --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shaded.protobuf; + +import java.io.IOException; + +import org.apache.hadoop.hbase.HBaseIOException; +import org.apache.hadoop.hbase.util.ExceptionUtil; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.yetus.audience.InterfaceAudience; + +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; + +@InterfaceAudience.Private +public final class ProtobufExceptionUtil { + private ProtobufExceptionUtil() { + // utility constructor + } + + /** + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * + * @param se ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + public static IOException getRemoteException(ServiceException se) { + return makeIOExceptionOfException(se); + } + + /** + * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than + * just {@link ServiceException}. Prefer this method to + * {@link #getRemoteException(ServiceException)} because trying to + * contain direct protobuf references. + * @param e the exception to inspect + */ + public static IOException handleRemoteException(Exception e) { + return makeIOExceptionOfException(e); + } + + private static IOException makeIOExceptionOfException(Exception e) { + Throwable t = e; + if (e instanceof ServiceException) { + t = e.getCause(); + } + if (ExceptionUtil.isInterrupt(t)) { + return ExceptionUtil.asInterrupt(t); + } + if (t instanceof RemoteException) { + t = ((RemoteException)t).unwrapRemoteException(); + } + return t instanceof IOException? (IOException)t: new HBaseIOException(t); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index 5bb3b4ba04..e28911b177 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -53,7 +53,6 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -100,10 +99,8 @@ import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DynamicClassLoader; -import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.hbase.util.VersionInfo; -import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams; @@ -313,40 +310,19 @@ public final class ProtobufUtil { } /** - * Return the IOException thrown by the remote server wrapped in - * ServiceException as cause. - * - * @param se ServiceException that wraps IO exception thrown by the server - * @return Exception wrapped in ServiceException or - * a new IOException that wraps the unexpected ServiceException. + * @deprecated use {@link ProtobufExceptionUtil#getRemoteException(ServiceException)} */ + @Deprecated public static IOException getRemoteException(ServiceException se) { - return makeIOExceptionOfException(se); + return ProtobufExceptionUtil.getRemoteException(se); } /** - * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than - * just {@link ServiceException}. Prefer this method to - * {@link #getRemoteException(ServiceException)} because trying to - * contain direct protobuf references. - * @param e + * @deprecated use {@link ProtobufExceptionUtil#handleRemoteException(Exception)} */ + @Deprecated public static IOException handleRemoteException(Exception e) { - return makeIOExceptionOfException(e); - } - - private static IOException makeIOExceptionOfException(Exception e) { - Throwable t = e; - if (e instanceof ServiceException) { - t = e.getCause(); - } - if (ExceptionUtil.isInterrupt(t)) { - return ExceptionUtil.asInterrupt(t); - } - if (t instanceof RemoteException) { - t = ((RemoteException)t).unwrapRemoteException(); - } - return t instanceof IOException? (IOException)t: new HBaseIOException(t); + return ProtobufExceptionUtil.handleRemoteException(e); } /** @@ -1744,7 +1720,7 @@ public final class ProtobufUtil { admin.getRegionInfo(controller, request); return toRegionInfo(response.getRegionInfo()); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } @@ -1774,7 +1750,7 @@ public final class ProtobufUtil { try { admin.closeRegion(controller, closeRegionRequest); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } @@ -1794,8 +1770,8 @@ public final class ProtobufUtil { RequestConverter.buildWarmupRegionRequest(regionInfo); admin.warmupRegion(controller, warmupRegionRequest); - } catch (ServiceException e) { - throw getRemoteException(e); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); } } @@ -1843,7 +1819,7 @@ public final class ProtobufUtil { try { response = admin.getOnlineRegion(controller, request); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } return getRegionInfos(response); } @@ -1875,7 +1851,7 @@ public final class ProtobufUtil { GetServerInfoResponse response = admin.getServerInfo(controller, request); return response.getServerInfo(); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java index 7c566334af..ab525aba84 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java @@ -22,6 +22,7 @@ import java.net.URLClassLoader; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; /** @@ -31,7 +32,9 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; public class ClassLoaderBase extends URLClassLoader { protected static final String DEFAULT_LOCAL_DIR = "/tmp/hbase-local-dir"; - protected static final String LOCAL_DIR_KEY = "hbase.local.dir"; + + @VisibleForTesting + public static final String LOCAL_DIR_KEY = "hbase.local.dir"; /** * Parent class loader. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java index 28fce21b1c..da62857810 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -31,6 +31,8 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; + /** * This is a class loader that can load classes dynamically from new * jar files under a configured folder. The paths to the jar files are @@ -61,12 +63,15 @@ public class DynamicClassLoader extends ClassLoaderBase { LoggerFactory.getLogger(DynamicClassLoader.class); // Dynamic jars are put under ${hbase.local.dir}/jars/ - private static final String DYNAMIC_JARS_DIR = File.separator + @VisibleForTesting + public static final String DYNAMIC_JARS_DIR = File.separator + "jars" + File.separator; - private static final String DYNAMIC_JARS_DIR_KEY = "hbase.dynamic.jars.dir"; + @VisibleForTesting + public static final String DYNAMIC_JARS_DIR_KEY = "hbase.dynamic.jars.dir"; - private static final String DYNAMIC_JARS_OPTIONAL_CONF_KEY = "hbase.use.dynamic.jars"; + @VisibleForTesting + public static final String DYNAMIC_JARS_OPTIONAL_CONF_KEY = "hbase.use.dynamic.jars"; private static final boolean DYNAMIC_JARS_OPTIONAL_DEFAULT = true; private boolean useDynamicJars; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index 5461760137..c0ab834cdf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufExceptionUtil; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; @@ -45,6 +45,8 @@ import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; + /** * Utility methods for obtaining authentication tokens. */ @@ -53,6 +55,9 @@ public class TokenUtil { // This class is referenced indirectly by User out in common; instances are created by reflection private static final Logger LOG = LoggerFactory.getLogger(TokenUtil.class); + @VisibleForTesting + protected static boolean shouldInjectFault = false; + /** * Obtain and return an authentication token for the current user. * @param conn The HBase cluster connection @@ -63,6 +68,8 @@ public class TokenUtil { Connection conn) throws IOException { Table meta = null; try { + injectFault(); + meta = conn.getTable(TableName.META_TABLE_NAME); CoprocessorRpcChannel rpcChannel = meta.coprocessorService(HConstants.EMPTY_START_ROW); AuthenticationProtos.AuthenticationService.BlockingInterface service = @@ -72,7 +79,7 @@ public class TokenUtil { return toToken(response.getToken()); } catch (ServiceException se) { - throw ProtobufUtil.handleRemoteException(se); + throw ProtobufExceptionUtil.handleRemoteException(se); } finally { if (meta != null) { meta.close(); @@ -80,6 +87,11 @@ public class TokenUtil { } } + private static void injectFault() throws ServiceException { + if (shouldInjectFault) { + throw new ServiceException("injected"); + } + } /** * Converts a Token instance (with embedded identifier) to the protobuf representation. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenUtil.java new file mode 100644 index 0000000000..84718cb5b6 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenUtil.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security.token; + +import static org.apache.hadoop.hbase.util.ClassLoaderBase.LOCAL_DIR_KEY; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; + +import java.io.File; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({ SmallTests.class }) +public class TestTokenUtil { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestTokenUtil.class); + + private File localDir; + + @Before + public void setUp() throws Exception { + // Must read from default configuration because that's what DynamicClassLoader will use + Configuration conf = HBaseConfiguration.create(); + + String localDirPath = conf.get(LOCAL_DIR_KEY); + localDir = new File(localDirPath, "jars"); + + if (localDir.exists() && !localDir.delete()) { + fail("Could not delete dir to setup test"); + } + } + + @Test + public void testObtainToken() throws Exception { + try { + TokenUtil.shouldInjectFault = true; + TokenUtil.obtainToken(null); + } catch (Exception e) { + // don't care about results, since we are checking classloading effects + } + + // See HBASE-19920: TokenUtil would load ProtobufUtil which loads DynamicClassLoader + // which would touch local FS when there was no reason to. + assertFalse("An extra dir was created during test", localDir.exists()); + } + + @After + public void teardown() { + TokenUtil.shouldInjectFault = false; + } +} -- 2.15.1