diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java index 760f630..7fc3099 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java @@ -196,6 +196,17 @@ public final class ResponseConverter { return parameterBuilder.build(); } + /** + * @param t + * @param trace + * @return NameValuePair of the exception name to stringified version os exception. + */ + public static NameBytesPair buildException(final Throwable t, String trace) { + NameBytesPair.Builder parameterBuilder = NameBytesPair.newBuilder(); + parameterBuilder.setName(t.getClass().getName()); + parameterBuilder.setValue(ByteString.copyFromUtf8(trace)); + return parameterBuilder.build(); + } // End utilities for Client // Start utilities for Admin diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 27fef8d..c994358 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -18,11 +18,11 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; - import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; +import java.io.PrintWriter; +import java.io.StringWriter; import java.net.BindException; import java.net.InetSocketAddress; import java.net.UnknownHostException; @@ -30,6 +30,7 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -96,6 +97,27 @@ import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.master.MasterRpcServices; +import org.apache.hadoop.hbase.quotas.OperationQuota; +import org.apache.hadoop.hbase.quotas.RegionServerQuotaManager; +import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; +import org.apache.hadoop.hbase.regionserver.Leases.Lease; +import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException; +import org.apache.hadoop.hbase.regionserver.Region.Operation; +import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; +import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler; +import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler; +import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; +import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionForSplitOrMergeRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionForSplitOrMergeResponse; @@ -107,10 +129,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegion import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest; @@ -136,9 +158,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavor import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; -import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest; @@ -178,18 +197,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescr import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor; -import org.apache.hadoop.hbase.quotas.OperationQuota; -import org.apache.hadoop.hbase.quotas.RegionServerQuotaManager; -import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; -import org.apache.hadoop.hbase.regionserver.Leases.Lease; -import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException; -import org.apache.hadoop.hbase.regionserver.Region.Operation; -import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope; -import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler; -import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler; -import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler; -import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -202,12 +209,7 @@ import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.zookeeper.KeeperException; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; +import com.google.common.annotations.VisibleForTesting; /** * Implements the regionserver RPC services. @@ -695,6 +697,25 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } /** + * Make a string representation of the exception. + * @param e The exception to stringify + * @param numLines limit on number of lines for stack trace + * @return A string with exception name and call stack. + */ + public static String stringifyException(Throwable e, int numLines) { + StringWriter stm = new StringWriter(); + PrintWriter wrt = new PrintWriter(stm); + StackTraceElement[] elements = e.getStackTrace(); + int limit = Math.min(numLines, elements.length); + wrt.println(e.toString()); + for (int i = 0; i < limit; i++) { + wrt.println(elements[i]); + } + wrt.close(); + return stm.toString(); + } + + /** * Run through the regionMutation rm and per Mutation, do the work, and then when * done, add an instance of a {@link ResultOrException} that corresponds to each Mutation. * @param region @@ -719,8 +740,11 @@ public class RSRpcServices implements HBaseRPCErrorHandler, long maxQuotaResultSize = Math.min(maxScannerResultSize, quota.getReadAvailable()); IOException sizeIOE = null; Object lastBlock = null; + ClientProtos.ResultOrException.Builder resultOrExceptionBuilder = ResultOrException.newBuilder(); + boolean hasResultOrException = false; + final int numLines = 10; for (ClientProtos.Action action : actions.getActionList()) { - ClientProtos.ResultOrException.Builder resultOrExceptionBuilder = null; + hasResultOrException = false; try { Result r = null; @@ -749,8 +773,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler, // use it for the response. // // This will create a copy in the builder. - resultOrExceptionBuilder = ResultOrException.newBuilder(). - setException(ResponseConverter.buildException(sizeIOE)); + hasResultOrException = true; + resultOrExceptionBuilder.clear(); + resultOrExceptionBuilder.setException(ResponseConverter.buildException( + sizeIOE, stringifyException(sizeIOE, numLines))); resultOrExceptionBuilder.setIndex(action.getIndex()); builder.addResultOrException(resultOrExceptionBuilder.build()); if (cellScanner != null) { @@ -774,7 +800,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } } } else if (action.hasServiceCall()) { - resultOrExceptionBuilder = ResultOrException.newBuilder(); + hasResultOrException = true; + resultOrExceptionBuilder.clear(); try { com.google.protobuf.Message result = execServiceOnRegion(region, action.getServiceCall()); @@ -788,7 +815,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler, .setValue(UnsafeByteOperations.unsafeWrap(result.toByteArray())))); } catch (IOException ioe) { rpcServer.getMetrics().exception(ioe); - resultOrExceptionBuilder.setException(ResponseConverter.buildException(ioe)); + resultOrExceptionBuilder.setException(ResponseConverter.buildException( + ioe, stringifyException(ioe, numLines))); } } else if (action.hasMutation()) { MutationType type = action.getMutation().getMutateType(); @@ -832,8 +860,9 @@ public class RSRpcServices implements HBaseRPCErrorHandler, pbResult = ProtobufUtil.toResult(r); } lastBlock = addSize(context, r, lastBlock); - resultOrExceptionBuilder = - ClientProtos.ResultOrException.newBuilder().setResult(pbResult); + hasResultOrException = true; + resultOrExceptionBuilder.clear(); + resultOrExceptionBuilder.setResult(pbResult); } // Could get to here and there was no result and no exception. Presumes we added // a Put or Delete to the collecting Mutations List for adding later. In this @@ -841,10 +870,12 @@ public class RSRpcServices implements HBaseRPCErrorHandler, // down in the doBatchOp method call rather than up here. } catch (IOException ie) { rpcServer.getMetrics().exception(ie); - resultOrExceptionBuilder = ResultOrException.newBuilder(). - setException(ResponseConverter.buildException(ie)); + hasResultOrException = true; + resultOrExceptionBuilder.clear(); + resultOrExceptionBuilder.setException(ResponseConverter.buildException( + ie, stringifyException(ie, numLines))); } - if (resultOrExceptionBuilder != null) { + if (hasResultOrException) { // Propagate index. resultOrExceptionBuilder.setIndex(action.getIndex()); builder.addResultOrException(resultOrExceptionBuilder.build());