From fadc9f1b97adbb083a4f7f1381156eddb403b2e2 Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Fri, 2 Feb 2018 16:58:56 -0600 Subject: [PATCH] HBASE-19920 Split ProtobufUtil into two classes PU has static initializers that will attempt to do local fs operations for the dynamic jar classloaders. Split the class to avoid those operations when they aren't needed. --- .../hbase/client/ConnectionImplementation.java | 5 +- .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 13 ++-- .../hbase/protobuf/ProtobufExceptionUtil.java | 81 ++++++++++++++++++++++ .../apache/hadoop/hbase/protobuf/ProtobufUtil.java | 52 +++----------- .../shaded/protobuf/ProtobufExceptionUtil.java | 66 ++++++++++++++++++ .../hadoop/hbase/shaded/protobuf/ProtobufUtil.java | 48 ++++--------- .../hadoop/hbase/security/token/TokenUtil.java | 4 +- 7 files changed, 181 insertions(+), 88 deletions(-) create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java create mode 100644 hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index 8807884fdc..0e927ba449 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -85,6 +85,7 @@ import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufExceptionUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; @@ -1011,7 +1012,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { try { response = this.stub.isMasterRunning(null, RequestConverter.buildIsMasterRunningRequest()); } catch (Exception e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } return response != null? response.getIsMasterRunning(): false; } @@ -1104,7 +1105,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable { try { stub.isMasterRunning(null, RequestConverter.buildIsMasterRunningRequest()); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 86859847be..ad0e57ecbc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -106,6 +106,7 @@ import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufExceptionUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService; @@ -1510,7 +1511,7 @@ public class HBaseAdmin implements Admin { response = admin.clearRegionBlockCache(controller, request); return ProtobufUtil.toCacheEvictionStats(response.getStats()); } catch (ServiceException se) { - throw ProtobufUtil.getRemoteException(se); + throw ProtobufExceptionUtil.getRemoteException(se); } } @@ -2070,7 +2071,7 @@ public class HBaseAdmin implements Admin { try { admin.stopServer(controller, request); } catch (Exception e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } @@ -2110,7 +2111,7 @@ public class HBaseAdmin implements Admin { return admin.getRegionLoad(controller, request).getRegionLoadsList().stream() .map(RegionMetricsBuilder::toRegionMetrics).collect(Collectors.toList()); } catch (ServiceException se) { - throw ProtobufUtil.getRemoteException(se); + throw ProtobufExceptionUtil.getRemoteException(se); } } @@ -2397,7 +2398,7 @@ public class HBaseAdmin implements Admin { try { return admin.rollWALWriter(controller, request); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } @@ -2471,7 +2472,7 @@ public class HBaseAdmin implements Admin { try { response = admin.getRegionInfo(controller, request); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } if (response.getCompactionState() != null) { return ProtobufUtil.createCompactionState(response.getCompactionState()); @@ -3134,7 +3135,7 @@ public class HBaseAdmin implements Admin { execRegionServerService(connection.getRpcControllerFactory().newController(), csr); return CoprocessorRpcUtils.getResponse(result, responsePrototype); } catch (ServiceException e) { - throw ProtobufUtil.handleRemoteException(e); + throw ProtobufExceptionUtil.handleRemoteException(e); } } }; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java new file mode 100644 index 0000000000..6e0af0a2ee --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufExceptionUtil.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.protobuf; + +import java.io.IOException; + +import org.apache.hadoop.hbase.HBaseIOException; +import org.apache.hadoop.hbase.util.ExceptionUtil; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Private +public class ProtobufExceptionUtil { + /** + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * + * @param se ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + public static IOException getRemoteException(com.google.protobuf.ServiceException se) { + return makeIOExceptionOfException(se); + } + + /** + * Return the Exception thrown by the remote server wrapped in + * ServiceException as cause. RemoteException are left untouched. + * + * @param e ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException. + */ + public static IOException getServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException e) { + Throwable t = e.getCause(); + if (ExceptionUtil.isInterrupt(t)) { + return ExceptionUtil.asInterrupt(t); + } + return t instanceof IOException ? (IOException) t : new HBaseIOException(t); + } + + /** + * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than + * just {@link ServiceException}. Prefer this method to + * {@link #getRemoteException(ServiceException)} because trying to + * contain direct protobuf references. + * @param e + */ + public static IOException handleRemoteException(Exception e) { + return makeIOExceptionOfException(e); + } + + private static IOException makeIOExceptionOfException(Exception e) { + Throwable t = e; + if (e instanceof com.google.protobuf.ServiceException || + e instanceof org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) { + t = e.getCause(); + } + if (ExceptionUtil.isInterrupt(t)) { + return ExceptionUtil.asInterrupt(t); + } + if (t instanceof RemoteException) { + t = ((RemoteException)t).unwrapRemoteException(); + } + return t instanceof IOException? (IOException)t: new HBaseIOException(t); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 417cfa5f6b..cd701109cd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ServerName; @@ -100,9 +99,7 @@ import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DynamicClassLoader; -import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Methods; -import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; /** @@ -230,56 +227,27 @@ public final class ProtobufUtil { } /** - * Return the IOException thrown by the remote server wrapped in - * ServiceException as cause. - * - * @param se ServiceException that wraps IO exception thrown by the server - * @return Exception wrapped in ServiceException or - * a new IOException that wraps the unexpected ServiceException. + * @deprecated use {@link ProtobufExceptionUtil} instead */ + @Deprecated public static IOException getRemoteException(ServiceException se) { - return makeIOExceptionOfException(se); + return ProtobufExceptionUtil.getRemoteException(se); } /** - * Return the Exception thrown by the remote server wrapped in - * ServiceException as cause. RemoteException are left untouched. - * - * @param e ServiceException that wraps IO exception thrown by the server - * @return Exception wrapped in ServiceException. + * @deprecated use {@link ProtobufExceptionUtil} instead */ + @Deprecated public static IOException getServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException e) { - Throwable t = e.getCause(); - if (ExceptionUtil.isInterrupt(t)) { - return ExceptionUtil.asInterrupt(t); - } - return t instanceof IOException ? (IOException) t : new HBaseIOException(t); + return ProtobufExceptionUtil.getServiceException(e); } /** - * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than - * just {@link ServiceException}. Prefer this method to - * {@link #getRemoteException(ServiceException)} because trying to - * contain direct protobuf references. - * @param e + * @deprecated use {@link ProtobufExceptionUtil} instead */ + @Deprecated public static IOException handleRemoteException(Exception e) { - return makeIOExceptionOfException(e); - } - - private static IOException makeIOExceptionOfException(Exception e) { - Throwable t = e; - if (e instanceof ServiceException || - e instanceof org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) { - t = e.getCause(); - } - if (ExceptionUtil.isInterrupt(t)) { - return ExceptionUtil.asInterrupt(t); - } - if (t instanceof RemoteException) { - t = ((RemoteException)t).unwrapRemoteException(); - } - return t instanceof IOException? (IOException)t: new HBaseIOException(t); + return ProtobufExceptionUtil.handleRemoteException(e); } /** @@ -1582,7 +1550,7 @@ public final class ProtobufUtil { GetServerInfoResponse response = admin.getServerInfo(controller, request); return response.getServerInfo(); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java new file mode 100644 index 0000000000..b5afcde6ab --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufExceptionUtil.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shaded.protobuf; + +import java.io.IOException; + +import org.apache.hadoop.hbase.HBaseIOException; +import org.apache.hadoop.hbase.util.ExceptionUtil; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Private +public class ProtobufExceptionUtil { + /** + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * + * @param se ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + public static IOException getRemoteException(ServiceException se) { + return makeIOExceptionOfException(se); + } + + /** + * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than + * just {@link ServiceException}. Prefer this method to + * {@link #getRemoteException(ServiceException)} because trying to + * contain direct protobuf references. + * @param e the exception to inspect + */ + public static IOException handleRemoteException(Exception e) { + return makeIOExceptionOfException(e); + } + + private static IOException makeIOExceptionOfException(Exception e) { + Throwable t = e; + if (e instanceof ServiceException) { + t = e.getCause(); + } + if (ExceptionUtil.isInterrupt(t)) { + return ExceptionUtil.asInterrupt(t); + } + if (t instanceof RemoteException) { + t = ((RemoteException)t).unwrapRemoteException(); + } + return t instanceof IOException? (IOException)t: new HBaseIOException(t); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index bd9ac98fc0..e9ab6f2cb5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -54,7 +54,6 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -101,10 +100,8 @@ import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DynamicClassLoader; -import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.hbase.util.VersionInfo; -import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams; @@ -314,40 +311,19 @@ public final class ProtobufUtil { } /** - * Return the IOException thrown by the remote server wrapped in - * ServiceException as cause. - * - * @param se ServiceException that wraps IO exception thrown by the server - * @return Exception wrapped in ServiceException or - * a new IOException that wraps the unexpected ServiceException. + * @deprecated use {@link ProtobufExceptionUtil#getRemoteException(ServiceException)} */ + @Deprecated public static IOException getRemoteException(ServiceException se) { - return makeIOExceptionOfException(se); + return ProtobufExceptionUtil.getRemoteException(se); } /** - * Like {@link #getRemoteException(ServiceException)} but more generic, able to handle more than - * just {@link ServiceException}. Prefer this method to - * {@link #getRemoteException(ServiceException)} because trying to - * contain direct protobuf references. - * @param e + * @deprecated use {@link ProtobufExceptionUtil#handleRemoteException(Exception)} */ + @Deprecated public static IOException handleRemoteException(Exception e) { - return makeIOExceptionOfException(e); - } - - private static IOException makeIOExceptionOfException(Exception e) { - Throwable t = e; - if (e instanceof ServiceException) { - t = e.getCause(); - } - if (ExceptionUtil.isInterrupt(t)) { - return ExceptionUtil.asInterrupt(t); - } - if (t instanceof RemoteException) { - t = ((RemoteException)t).unwrapRemoteException(); - } - return t instanceof IOException? (IOException)t: new HBaseIOException(t); + return ProtobufExceptionUtil.handleRemoteException(e); } /** @@ -1745,7 +1721,7 @@ public final class ProtobufUtil { admin.getRegionInfo(controller, request); return toRegionInfo(response.getRegionInfo()); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } @@ -1775,7 +1751,7 @@ public final class ProtobufUtil { try { admin.closeRegion(controller, closeRegionRequest); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } @@ -1795,8 +1771,8 @@ public final class ProtobufUtil { RequestConverter.buildWarmupRegionRequest(regionInfo); admin.warmupRegion(controller, warmupRegionRequest); - } catch (ServiceException e) { - throw getRemoteException(e); + } catch (ServiceException se) { + throw ProtobufUtil.getRemoteException(se); } } @@ -1844,7 +1820,7 @@ public final class ProtobufUtil { try { response = admin.getOnlineRegion(controller, request); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } return getRegionInfos(response); } @@ -1876,7 +1852,7 @@ public final class ProtobufUtil { GetServerInfoResponse response = admin.getServerInfo(controller, request); return response.getServerInfo(); } catch (ServiceException se) { - throw getRemoteException(se); + throw ProtobufUtil.getRemoteException(se); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index 5461760137..86faa6317a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufExceptionUtil; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; @@ -72,7 +72,7 @@ public class TokenUtil { return toToken(response.getToken()); } catch (ServiceException se) { - throw ProtobufUtil.handleRemoteException(se); + throw ProtobufExceptionUtil.handleRemoteException(se); } finally { if (meta != null) { meta.close(); -- 2.15.1