diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java index 528b726..e745c29 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java @@ -55,17 +55,20 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader; import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; +import org.apache.hadoop.hbase.security.SaslUtil; import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.ExceptionUtil; @@ -111,6 +114,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { private byte[] connectionHeaderWithLength; + private boolean waitingConnectionHeaderResponse = false; + /** * If the client wants to interrupt its calls easily (i.e. call Thread#interrupt), it gets into a * java issue: an interruption during a write closes the socket/channel. A way to avoid this is to @@ -220,13 +225,6 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { } this.connectionHeaderPreamble = getConnectionHeaderPreamble(); - ConnectionHeader header = getConnectionHeader(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(4 + header.getSerializedSize()); - DataOutputStream dos = new DataOutputStream(baos); - dos.writeInt(header.getSerializedSize()); - header.writeTo(dos); - assert baos.size() == 4 + header.getSerializedSize(); - this.connectionHeaderWithLength = baos.getBuffer(); UserGroupInformation ticket = remoteId.ticket.getUGI(); this.threadName = "IPC Client (" + this.rpcClient.socketFactory.hashCode() + ") connection to " @@ -462,8 +460,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { } if (continueSasl) { // Sasl connect is successful. Let's set up Sasl i/o streams. - inStream = saslRpcClient.getInputStream(inStream); - outStream = saslRpcClient.getOutputStream(outStream); + inStream = saslRpcClient.getInputStream(); + outStream = saslRpcClient.getOutputStream(); } else { // fall back to simple auth because server told us so. // do not change authMethod and useSasl here, we should start from secure when @@ -474,6 +472,9 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { this.out = new DataOutputStream(new BufferedOutputStream(outStream)); // Now write out the connection header writeConnectionHeader(); + // process the response from server for connection header if necessary + processResponseForConnectionHeader(); + break; } } catch (Throwable t) { @@ -511,10 +512,73 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { * Write the connection header. */ private void writeConnectionHeader() throws IOException { + boolean isCryptoAesEnable = false; + // check if Crypto AES is enabled + if (saslRpcClient != null) { + boolean saslEncryptionEnabled = SaslUtil.QualityOfProtection.PRIVACY. + getSaslQop().equalsIgnoreCase(saslRpcClient.getSaslQOP()); + isCryptoAesEnable = saslEncryptionEnabled && conf.getBoolean( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT); + } + + ConnectionHeader.Builder headerBuilder = getConnectionHeaderBuilder(); + // if Crypto AES is enabled, set transformation and negotiate with server + if (isCryptoAesEnable) { + headerBuilder.setRpcCryptoCipherTransformation( + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR)); + waitingConnectionHeaderResponse = true; + } + ConnectionHeader header = headerBuilder.build(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(4 + header.getSerializedSize()); + DataOutputStream dos = new DataOutputStream(baos); + dos.writeInt(header.getSerializedSize()); + header.writeTo(dos); + assert baos.size() == 4 + header.getSerializedSize(); + this.connectionHeaderWithLength = baos.getBuffer(); + this.out.write(connectionHeaderWithLength); this.out.flush(); } + private void processResponseForConnectionHeader() throws IOException { + // if no response excepted, return + if (!waitingConnectionHeaderResponse) return; + try { + // read the ConnectionHeaderResponse from server + int len = this.in.readInt(); + byte[] buff = new byte[len]; + int readSize = this.in.read(buff); + if (LOG.isDebugEnabled()) { + LOG.debug("Length of response for connection header:" + readSize); + } + + RPCProtos.ConnectionHeaderResponse connectionHeaderResponse = + RPCProtos.ConnectionHeaderResponse.parseFrom(buff); + + // Get the CryptoCipherMeta, update the HBaseSaslRpcClient for Crypto Cipher + if (connectionHeaderResponse.hasCryptoCipherMeta()) { + negotiateCryptoAes(connectionHeaderResponse.getCryptoCipherMeta()); + } + waitingConnectionHeaderResponse = false; + } catch (SocketTimeoutException ste) { + LOG.fatal("Can't get the connection header response for rpc timeout, please check if" + + " server has the correct configuration to support the additional function.", ste); + // timeout when waiting the connection header response, ignore the additional function + throw new IOException("Timeout while waiting connection header response", ste); + } + } + + private void negotiateCryptoAes(RPCProtos.CryptoCipherMeta cryptoCipherMeta) + throws IOException { + // initilize the Crypto AES with CryptoCipherMeta + saslRpcClient.initCryptoCipher(cryptoCipherMeta, this.rpcClient.conf); + // reset the inputStream/outputStream for Crypto AES encryption + this.in = new DataInputStream(new BufferedInputStream(saslRpcClient.getInputStream())); + this.out = new DataOutputStream(new BufferedOutputStream(saslRpcClient.getOutputStream())); + } + private void tracedWriteRequest(Call call) throws IOException { try (TraceScope ignored = Trace.startSpan("RpcClientImpl.tracedWriteRequest", call.span)) { writeRequest(call); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java index 8a85580..18ad370 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java @@ -55,7 +55,6 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.security.NettyHBaseSaslRpcClientHandler; import org.apache.hadoop.hbase.security.SaslChallengeDecoder; -import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.security.UserGroupInformation; @@ -90,10 +89,6 @@ class NettyRpcConnection extends RpcConnection { byte[] connectionHeaderPreamble = getConnectionHeaderPreamble(); this.connectionHeaderPreamble = Unpooled.directBuffer(connectionHeaderPreamble.length) .writeBytes(connectionHeaderPreamble); - ConnectionHeader header = getConnectionHeader(); - this.connectionHeaderWithLength = Unpooled.directBuffer(4 + header.getSerializedSize()); - this.connectionHeaderWithLength.writeInt(header.getSerializedSize()); - header.writeTo(new ByteBufOutputStream(this.connectionHeaderWithLength)); } @Override @@ -130,8 +125,16 @@ class NettyRpcConnection extends RpcConnection { } } - private void established(Channel ch) { - ch.write(connectionHeaderWithLength.retainedDuplicate()); + private void established(Channel ch) throws IOException { + // NettyHBaseSaslRpcClientHandler already send the connection header to server + if (!useSasl) { + // send the connection to server if without sasl + ConnectionHeader header = getConnectionHeaderBuilder().build(); + this.connectionHeaderWithLength = Unpooled.directBuffer(4 + header.getSerializedSize()); + this.connectionHeaderWithLength.writeInt(header.getSerializedSize()); + header.writeTo(new ByteBufOutputStream(this.connectionHeaderWithLength)); + ch.write(connectionHeaderWithLength.retainedDuplicate()); + } ChannelPipeline p = ch.pipeline(); String addBeforeHandler = p.context(BufferCallBeforeInitHandler.class).name(); p.addBefore(addBeforeHandler, null, @@ -191,8 +194,8 @@ class NettyRpcConnection extends RpcConnection { ChannelHandler saslHandler; try { saslHandler = new NettyHBaseSaslRpcClientHandler(saslPromise, ticket, authMethod, token, - serverPrincipal, rpcClient.fallbackAllowed, this.rpcClient.conf.get( - "hbase.rpc.protection", QualityOfProtection.AUTHENTICATION.name().toLowerCase())); + serverPrincipal, rpcClient.fallbackAllowed, this.rpcClient.conf, + getConnectionHeaderBuilder()); } catch (IOException e) { failInit(ch, e); return; @@ -204,7 +207,6 @@ class NettyRpcConnection extends RpcConnection { public void operationComplete(Future future) throws Exception { if (future.isSuccess()) { ChannelPipeline p = ch.pipeline(); - p.remove(SaslChallengeDecoder.class); p.remove(NettyHBaseSaslRpcClientHandler.class); established(ch); } else { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java index 5e9e97e..43a5b01 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java @@ -72,6 +72,8 @@ abstract class RpcConnection { protected final HashedWheelTimer timeoutTimer; + protected final Configuration conf; + // the last time we were picked up from connection pool. protected long lastTouched; @@ -84,6 +86,7 @@ abstract class RpcConnection { this.timeoutTimer = timeoutTimer; this.codec = codec; this.compressor = compressor; + this.conf = conf; UserGroupInformation ticket = remoteId.getTicket().getUGI(); SecurityInfo securityInfo = SecurityInfo.getInfo(remoteId.getServiceName()); @@ -210,7 +213,7 @@ abstract class RpcConnection { return preamble; } - protected ConnectionHeader getConnectionHeader() { + protected ConnectionHeader.Builder getConnectionHeaderBuilder() { ConnectionHeader.Builder builder = ConnectionHeader.newBuilder(); builder.setServiceName(remoteId.getServiceName()); UserInformation userInfoPB; @@ -224,7 +227,7 @@ abstract class RpcConnection { builder.setCellBlockCompressorClass(this.compressor.getClass().getCanonicalName()); } builder.setVersionInfo(ProtobufUtil.getVersionInfo()); - return builder.build(); + return builder; } protected abstract void callTimeout(Call call); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java new file mode 100644 index 0000000..12e4a7a --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.ipc; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class UnsupportedCryptoException extends FatalConnectionException { + public UnsupportedCryptoException() { + super(); + } + + public UnsupportedCryptoException(String msg) { + super(msg); + } + + public UnsupportedCryptoException(String msg, Throwable t) { + super(msg, t); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/CryptoAESUnwrapHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/CryptoAESUnwrapHandler.java new file mode 100644 index 0000000..7b335da --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/CryptoAESUnwrapHandler.java @@ -0,0 +1,50 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; + +/** + * Unwrap messages with Crypto AES. Should be placed after a + * {@link io.netty.handler.codec.LengthFieldBasedFrameDecoder} + */ +public class CryptoAESUnwrapHandler extends SimpleChannelInboundHandler { + + private final CryptoAES cryptoAES; + + public CryptoAESUnwrapHandler(CryptoAES cryptoAES) { + this.cryptoAES = cryptoAES; + } + + @Override + public void channelInactive(ChannelHandlerContext ctx) throws Exception { + ctx.fireChannelInactive(); + } + + @Override + protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { + byte[] bytes = new byte[msg.readableBytes()]; + msg.readBytes(bytes); + ctx.fireChannelRead(Unpooled.wrappedBuffer(cryptoAES.unwrap(bytes, 0, bytes.length))); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/CryptoAESWrapHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/CryptoAESWrapHandler.java new file mode 100644 index 0000000..6c74ed8 --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/CryptoAESWrapHandler.java @@ -0,0 +1,98 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelOutboundHandlerAdapter; +import io.netty.channel.ChannelPromise; +import io.netty.channel.CoalescingBufferQueue; +import io.netty.util.ReferenceCountUtil; +import io.netty.util.concurrent.PromiseCombiner; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; + +import java.io.IOException; + +/** + * wrap messages with Crypto AES. + */ +@InterfaceAudience.Private +public class CryptoAESWrapHandler extends ChannelOutboundHandlerAdapter { + + private final CryptoAES cryptoAES; + + private CoalescingBufferQueue queue; + + public CryptoAESWrapHandler(CryptoAES cryptoAES) { + this.cryptoAES = cryptoAES; + } + + @Override + public void handlerAdded(ChannelHandlerContext ctx) throws Exception { + queue = new CoalescingBufferQueue(ctx.channel()); + } + + @Override + public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) + throws Exception { + if (msg instanceof ByteBuf) { + queue.add((ByteBuf) msg, promise); + } else { + ctx.write(msg, promise); + } + } + + @Override + public void flush(ChannelHandlerContext ctx) throws Exception { + if (queue.isEmpty()) { + return; + } + ByteBuf buf = null; + try { + ChannelPromise promise = ctx.newPromise(); + int readableBytes = queue.readableBytes(); + buf = queue.remove(readableBytes, promise); + byte[] bytes = new byte[readableBytes]; + buf.readBytes(bytes); + byte[] wrapperBytes = cryptoAES.wrap(bytes, 0, bytes.length); + ChannelPromise lenPromise = ctx.newPromise(); + ctx.write(ctx.alloc().buffer(4).writeInt(wrapperBytes.length), lenPromise); + ChannelPromise contentPromise = ctx.newPromise(); + ctx.write(Unpooled.wrappedBuffer(wrapperBytes), contentPromise); + PromiseCombiner combiner = new PromiseCombiner(); + combiner.addAll(lenPromise, contentPromise); + combiner.finish(promise); + ctx.flush(); + } finally { + if (buf != null) { + ReferenceCountUtil.safeRelease(buf); + } + } + } + + @Override + public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { + if (!queue.isEmpty()) { + queue.releaseAndFailAll(new IOException("Connection closed")); + } + ctx.close(promise); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java index d89d96c..f963b0a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java @@ -23,9 +23,11 @@ import java.io.IOException; import java.security.Key; import java.security.KeyException; import java.security.SecureRandom; +import java.util.Properties; import javax.crypto.spec.SecretKeySpec; +import org.apache.commons.crypto.cipher.CryptoCipherFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -35,7 +37,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; import org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; @@ -255,4 +259,27 @@ public final class EncryptionUtil { } return key; } + + /** + * Helper to create an instance of CryptoAES. + * + * @param conf The current configuration. + * @param cryptoCipherMeta The metadata for create CryptoAES. + * @return The instance of CryptoAES. + * @throws IOException if create CryptoAES failed + */ + public static CryptoAES createCryptoAES(RPCProtos.CryptoCipherMeta cryptoCipherMeta, + Configuration conf) throws IOException { + Properties properties = new Properties(); + // the property for cipher class + properties.setProperty(CryptoCipherFactory.CLASSES_KEY, + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_DEFAULT)); + // create SaslAES for client + return new CryptoAES(cryptoCipherMeta.getTransformation(), properties, + cryptoCipherMeta.getInKey().toByteArray(), + cryptoCipherMeta.getOutKey().toByteArray(), + cryptoCipherMeta.getInIv().toByteArray(), + cryptoCipherMeta.getOutIv().toByteArray()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java index 3f43f7f..f2144fc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java @@ -22,16 +22,22 @@ import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; +import java.io.FilterInputStream; +import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.SaslInputStream; @@ -47,6 +53,12 @@ import org.apache.hadoop.security.token.TokenIdentifier; public class HBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { private static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class); + private boolean cryptoAesEnable; + private CryptoAES cryptoAES; + private InputStream saslInputStream; + private InputStream cryptoInputStream; + private OutputStream saslOutputStream; + private OutputStream cryptoOutputStream; public HBaseSaslRpcClient(AuthMethod method, Token token, String serverPrincipal, boolean fallbackAllowed) throws IOException { @@ -133,6 +145,15 @@ public class HBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { LOG.debug("SASL client context established. Negotiated QoP: " + saslClient.getNegotiatedProperty(Sasl.QOP)); } + // initial the inputStream, outputStream for both Sasl encryption and Crypto encryption + // if Cryption encryption enabled, the saslInputStream/saslOutputStream is + // responsible for rpc encryption with sasl(eg, 3DES, DES), + // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with AES + saslInputStream = new SaslInputStream(inS, saslClient); + cryptoInputStream = new WrappedInputStream(inS); + saslOutputStream = new SaslOutputStream(outS, saslClient); + cryptoOutputStream = new WrappedOutputStream(outS); + return true; } catch (IOException e) { try { @@ -144,29 +165,112 @@ public class HBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { } } + public String getSaslQOP() { + return (String) saslClient.getNegotiatedProperty(Sasl.QOP); + } + + public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta, + Configuration conf) throws IOException { + // create SaslAES for client + cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf); + cryptoAesEnable = true; + } + /** * Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called. - * @param in the InputStream to wrap * @return a SASL wrapped InputStream * @throws IOException */ - public InputStream getInputStream(InputStream in) throws IOException { + public InputStream getInputStream() throws IOException { if (!saslClient.isComplete()) { throw new IOException("Sasl authentication exchange hasn't completed yet"); } - return new SaslInputStream(in, saslClient); + // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES. + if (cryptoAesEnable) { + return cryptoInputStream; + } + return saslInputStream; + } + + class WrappedInputStream extends FilterInputStream { + private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0); + public WrappedInputStream(InputStream in) throws IOException { + super(in); + } + + @Override + public int read() throws IOException { + byte[] b = new byte[1]; + int n = read(b, 0, 1); + return (n != -1) ? b[0] : -1; + } + + @Override + public int read(byte b[]) throws IOException { + return read(b, 0, b.length); + } + + @Override + public synchronized int read(byte[] buf, int off, int len) throws IOException { + // fill the buffer with the next RPC message + if (unwrappedRpcBuffer.remaining() == 0) { + readNextRpcPacket(); + } + // satisfy as much of the request as possible + int readLen = Math.min(len, unwrappedRpcBuffer.remaining()); + unwrappedRpcBuffer.get(buf, off, readLen); + return readLen; + } + + // all messages must be wrapped by saslAES, else an exception is thrown + private void readNextRpcPacket() throws IOException { + LOG.debug("reading next wrapped RPC packet"); + DataInputStream dis = new DataInputStream(in); + int rpcLen = dis.readInt(); + byte[] rpcBuf = new byte[rpcLen]; + dis.readFully(rpcBuf); + + // unwrap with saslAES + rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length); + if (LOG.isDebugEnabled()) { + LOG.debug("unwrapping token of length:" + rpcBuf.length); + } + unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf); + } } /** * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called. - * @param out the OutputStream to wrap * @return a SASL wrapped OutputStream * @throws IOException */ - public OutputStream getOutputStream(OutputStream out) throws IOException { + public OutputStream getOutputStream() throws IOException { if (!saslClient.isComplete()) { throw new IOException("Sasl authentication exchange hasn't completed yet"); } - return new SaslOutputStream(out, saslClient); + // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES. + if (cryptoAesEnable) { + return cryptoOutputStream; + } + return saslOutputStream; + } + + class WrappedOutputStream extends FilterOutputStream { + public WrappedOutputStream(OutputStream out) throws IOException { + super(out); + } + @Override + public void write(byte[] buf, int off, int len) throws IOException { + if (LOG.isDebugEnabled()) { + LOG.debug("wrapping token of length:" + len); + } + + // wrap with saslAES + byte[] wrapped = cryptoAES.wrap(buf, off, len); + DataOutputStream dob = new DataOutputStream(out); + dob.writeInt(wrapped.length); + dob.write(wrapped, 0, wrapped.length); + dob.flush(); + } } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java index f624608..76ec026 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java @@ -47,6 +47,9 @@ public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { if (LOG.isDebugEnabled()) { LOG.debug("SASL client context established. Negotiated QoP: " + qop); } + // Sasl finish the negotiation, remove the challengeDecoder + p.remove(SaslChallengeDecoder.class); + if (qop == null || "auth".equalsIgnoreCase(qop)) { return; } @@ -55,4 +58,8 @@ public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 4, 0, 4), new SaslUnwrapHandler(saslClient)); } + + public String getSaslQOP() { + return (String) saslClient.getNegotiatedProperty(Sasl.QOP); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java index 50609b4..1fff0d9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java @@ -19,16 +19,27 @@ package org.apache.hadoop.hbase.security; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPipeline; import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.timeout.ReadTimeoutHandler; import io.netty.util.concurrent.Promise; +import java.io.DataOutputStream; import java.io.IOException; import java.security.PrivilegedExceptionAction; +import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.ByteArrayOutputStream; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; import org.apache.hadoop.hbase.ipc.FallbackDisallowedException; +import org.apache.hadoop.hbase.ipc.RpcClient; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; @@ -47,17 +58,28 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler< private final NettyHBaseSaslRpcClient saslRpcClient; + private final Configuration conf; + + // enable the flag to negotiate with server for Crypto AES encryption + private boolean waitingConnectionHeaderRepsonse = false; + + private RPCProtos.ConnectionHeader.Builder builder; + /** * @param saslPromise {@code true} if success, {@code false} if server tells us to fallback to * simple. */ public NettyHBaseSaslRpcClientHandler(Promise saslPromise, UserGroupInformation ugi, AuthMethod method, Token token, String serverPrincipal, - boolean fallbackAllowed, String rpcProtection) throws IOException { + boolean fallbackAllowed, Configuration conf, RPCProtos.ConnectionHeader.Builder builder) + throws IOException { this.saslPromise = saslPromise; this.ugi = ugi; + this.conf = conf; + this.builder = builder; this.saslRpcClient = new NettyHBaseSaslRpcClient(method, token, serverPrincipal, - fallbackAllowed, rpcProtection); + fallbackAllowed, conf.get( + "hbase.rpc.protection", SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase())); } private void writeResponse(ChannelHandlerContext ctx, byte[] response) { @@ -68,14 +90,87 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler< ctx.alloc().buffer(4 + response.length).writeInt(response.length).writeBytes(response)); } - private void tryComplete(ChannelHandlerContext ctx) { + private void tryComplete(ChannelHandlerContext ctx) throws IOException { if (!saslRpcClient.isComplete()) { return; } + saslRpcClient.setupSaslHandler(ctx.pipeline()); + + // if Crypto AES is enabled, set transformation to negotiate with server + if (isCryptoAesEnable()) { + builder.setRpcCryptoCipherTransformation( + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR)); + // add ReadTimeoutHandler to deal with server doesn't response connection header + ctx.pipeline().addFirst(new ReadTimeoutHandler( + RpcClient.DEFAULT_SOCKET_TIMEOUT_READ, TimeUnit.MILLISECONDS)); + waitingConnectionHeaderRepsonse = true; + } + + // send the connection header to server + writeConnectionHeader(ctx); + + // if Crypto AES is enabled, wait the response from server + if (waitingConnectionHeaderRepsonse) { + return; + } + + saslPromise.setSuccess(true); + } + + private void writeConnectionHeader(ChannelHandlerContext ctx) throws IOException { + RPCProtos.ConnectionHeader header = builder.build(); + int length = 4 + header.getSerializedSize(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(length); + DataOutputStream dos = new DataOutputStream(baos); + dos.writeInt(header.getSerializedSize()); + header.writeTo(dos); + ctx.writeAndFlush(ctx.alloc().buffer(length).writeBytes(baos.getBuffer())); + } + + private void negotiateConnectionHeader(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { + if (waitingConnectionHeaderRepsonse) { + // read the ConnectionHeaderResponse from server + int len = msg.readInt(); + byte[] buff = new byte[len]; + msg.readBytes(buff); + + RPCProtos.ConnectionHeaderResponse connectionHeaderResponse = + RPCProtos.ConnectionHeaderResponse.parseFrom(buff); + + // Get the CryptoCipherMeta, update the HBaseSaslRpcClient for Crypto Cipher + if (connectionHeaderResponse.hasCryptoCipherMeta()) { + CryptoAES cryptoAES = EncryptionUtil.createCryptoAES( + connectionHeaderResponse.getCryptoCipherMeta(), conf); + // replace the Sasl handler with Crypto AES handler + setupCryptoAESHandler(ctx.pipeline(), cryptoAES); + } + waitingConnectionHeaderRepsonse = false; + } saslPromise.setSuccess(true); } + /** + * Remove handlers for sasl encryption and add handlers for Crypto AES encryption + */ + private void setupCryptoAESHandler(ChannelPipeline p, CryptoAES cryptoAES) { + p.remove(SaslWrapHandler.class); + p.remove(SaslUnwrapHandler.class); + p.remove(ReadTimeoutHandler.class); + String lengthDecoder = p.context(LengthFieldBasedFrameDecoder.class).name(); + p.addAfter(lengthDecoder, null, new CryptoAESUnwrapHandler(cryptoAES)); + p.addAfter(lengthDecoder, null, new CryptoAESWrapHandler(cryptoAES)); + } + + private boolean isCryptoAesEnable() { + boolean saslEncryptionEnabled = SaslUtil.QualityOfProtection.PRIVACY. + getSaslQop().equalsIgnoreCase(saslRpcClient.getSaslQOP()); + return saslEncryptionEnabled && conf.getBoolean( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT); + } + @Override public void handlerAdded(ChannelHandlerContext ctx) { try { @@ -99,6 +194,11 @@ public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler< @Override protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { + // if waiting connection header response, get the message and do the negotiation + if (waitingConnectionHeaderRepsonse) { + negotiateConnectionHeader(ctx, msg); + return; + } int len = msg.readInt(); if (len == SaslUtil.SWITCH_TO_SIMPLE_AUTH) { saslRpcClient.dispose(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java index 12b3661..1c4619e 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java @@ -211,14 +211,14 @@ public class TestHBaseSaslRpcClient { }; try { - rpcClient.getInputStream(Mockito.mock(InputStream.class)); + rpcClient.getInputStream(); } catch(IOException ex) { //Sasl authentication exchange hasn't completed yet inState = true; } try { - rpcClient.getOutputStream(Mockito.mock(OutputStream.class)); + rpcClient.getOutputStream(); } catch(IOException ex) { //Sasl authentication exchange hasn't completed yet outState = true; diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index ee56970..0c3f1bf 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -279,6 +279,16 @@ org.apache.htrace htrace-core + + org.apache.commons + commons-crypto + + + net.java.dev.jna + jna + + + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 4f8facc..e0f3af1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1196,6 +1196,39 @@ public final class HConstants { public static final String CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY = "hbase.crypto.alternate.key.algorithm"; + /** Configuration key for if enable Crypto AES for rpc encryption */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY = + "hbase.rpc.crypto.encryption.aes.enabled"; + + public static final boolean RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT = false; + + /** Configuration key for the transformation of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CONF_KEY = + "hbase.rpc.crypto.encryption.aes.cipher.transform"; + + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR = + "AES/CTR/NoPadding"; + + /** Configuration key for the random of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_RANDOM_CONF_KEY = + "hbase.crypto.sasl.encryption.aes.crypto.random"; + + public static final String RPC_CRYPTO_ENCRYPTION_RANDOM_DEFAULT = + "org.apache.commons.crypto.random.JavaCryptoRandom"; + + /** Configuration key for the key size of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_CONF_KEY = + "hbase.rpc.crypto.encryption.aes.cipher.keySizeBits"; + + public static final int RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_DEFAULT = 128; + + /** Configuration key for the cipher class of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_KEY = + "hbase.rpc.crypto.encryption.aes.cipher.class"; + + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_DEFAULT = + "org.apache.commons.crypto.cipher.JceCipher"; + /** Configuration key for enabling WAL encryption, a boolean */ public static final String ENABLE_WAL_ENCRYPTION = "hbase.regionserver.wal.encryption"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java new file mode 100644 index 0000000..43f241e --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java @@ -0,0 +1,242 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.io.crypto.aes; + +import org.apache.commons.crypto.cipher.CryptoCipher; +import org.apache.commons.crypto.utils.Utils; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; + +import javax.crypto.Cipher; +import javax.crypto.Mac; +import javax.crypto.SecretKey; +import javax.crypto.ShortBufferException; +import javax.crypto.spec.IvParameterSpec; +import javax.crypto.spec.SecretKeySpec; +import javax.security.sasl.SaslException; +import java.io.IOException; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Properties; + +/** + * AES encryption and decryption. + */ +@InterfaceAudience.Private +@InterfaceStability.Evolving +public class CryptoAES { + + private final CryptoCipher encryptor; + private final CryptoCipher decryptor; + + private final Integrity integrity; + + public CryptoAES(String transformation, Properties properties, + byte[] inKey, byte[] outKey, byte[] inIv, byte[] outIv) throws IOException { + checkTransformation(transformation); + // encryptor + encryptor = Utils.getCipherInstance(transformation, properties); + try { + SecretKeySpec outKEYSpec = new SecretKeySpec(outKey, "AES"); + IvParameterSpec outIVSpec = new IvParameterSpec(outIv); + encryptor.init(Cipher.ENCRYPT_MODE, outKEYSpec, outIVSpec); + } catch (InvalidKeyException | InvalidAlgorithmParameterException e) { + throw new IOException("Failed to initialize encryptor", e); + } + + // decryptor + decryptor = Utils.getCipherInstance(transformation, properties); + try { + SecretKeySpec inKEYSpec = new SecretKeySpec(inKey, "AES"); + IvParameterSpec inIVSpec = new IvParameterSpec(inIv); + decryptor.init(Cipher.DECRYPT_MODE, inKEYSpec, inIVSpec); + } catch (InvalidKeyException | InvalidAlgorithmParameterException e) { + throw new IOException("Failed to initialize decryptor", e); + } + + integrity = new Integrity(outKey, inKey); + } + + /** + * Encrypts input data. The result composes of (msg, padding if needed, mac) and sequence num. + * @param data the input byte array + * @param offset the offset in input where the input starts + * @param len the input length + * @return the new encrypted byte array. + * @throws SaslException if error happens + */ + public byte[] wrap(byte[] data, int offset, int len) throws SaslException { + // mac + byte[] mac = integrity.getHMAC(data, offset, len); + integrity.incMySeqNum(); + + // encrypt + byte[] encrypted = new byte[len + 10]; + try { + int n = encryptor.update(data, offset, len, encrypted, 0); + encryptor.update(mac, 0, 10, encrypted, n); + } catch (ShortBufferException sbe) { + // this should not happen + throw new SaslException("Error happens during encrypt data", sbe); + } + + // append seqNum used for mac + byte[] wrapped = new byte[encrypted.length + 4]; + System.arraycopy(encrypted, 0, wrapped, 0, encrypted.length); + System.arraycopy(integrity.getSeqNum(), 0, wrapped, encrypted.length, 4); + + return wrapped; + } + + /** + * Decrypts input data. The input composes of (msg, padding if needed, mac) and sequence num. + * The result is msg. + * @param data the input byte array + * @param offset the offset in input where the input starts + * @param len the input length + * @return the new decrypted byte array. + * @throws SaslException if error happens + */ + public byte[] unwrap(byte[] data, int offset, int len) throws SaslException { + // get plaintext and seqNum + byte[] decrypted = new byte[len - 4]; + byte[] peerSeqNum = new byte[4]; + try { + decryptor.update(data, offset, len - 4, decrypted, 0); + } catch (ShortBufferException sbe) { + // this should not happen + throw new SaslException("Error happens during decrypt data", sbe); + } + System.arraycopy(data, offset + decrypted.length, peerSeqNum, 0, 4); + + // get msg and mac + byte[] msg = new byte[decrypted.length - 10]; + byte[] mac = new byte[10]; + System.arraycopy(decrypted, 0, msg, 0, msg.length); + System.arraycopy(decrypted, msg.length, mac, 0, 10); + + // check mac integrity and msg sequence + if (!integrity.compareHMAC(mac, peerSeqNum, msg, 0, msg.length)) { + throw new SaslException("Unmatched MAC"); + } + if (!integrity.comparePeerSeqNum(peerSeqNum)) { + throw new SaslException("Out of order sequencing of messages. Got: " + integrity.byteToInt + (peerSeqNum) + " Expected: " + integrity.peerSeqNum); + } + integrity.incPeerSeqNum(); + + return msg; + } + + private void checkTransformation(String transformation) throws IOException { + if (HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR.equals(transformation)) { + return; + } + throw new IOException("AES cipher transformation is not supported: " + transformation); + } + + /** + * Helper class for providing integrity protection. + */ + private static class Integrity { + + private int mySeqNum = 0; + private int peerSeqNum = 0; + private byte[] seqNum = new byte[4]; + + private byte[] myKey; + private byte[] peerKey; + + Integrity(byte[] outKey, byte[] inKey) throws IOException { + myKey = outKey; + peerKey = inKey; + } + + byte[] getHMAC(byte[] msg, int start, int len) throws SaslException { + intToByte(mySeqNum); + return calculateHMAC(myKey, seqNum, msg, start, len); + } + + boolean compareHMAC(byte[] expectedHMAC, byte[] peerSeqNum, byte[] msg, int start, + int len) throws SaslException { + byte[] mac = calculateHMAC(peerKey, peerSeqNum, msg, start, len); + return Arrays.equals(mac, expectedHMAC); + } + + boolean comparePeerSeqNum(byte[] peerSeqNum) { + return this.peerSeqNum == byteToInt(peerSeqNum); + } + + byte[] getSeqNum() { + return seqNum; + } + + void incMySeqNum() { + mySeqNum ++; + } + + void incPeerSeqNum() { + peerSeqNum ++; + } + + private byte[] calculateHMAC(byte[] key, byte[] seqNum, byte[] msg, int start, + int len) throws SaslException { + byte[] seqAndMsg = new byte[4+len]; + System.arraycopy(seqNum, 0, seqAndMsg, 0, 4); + System.arraycopy(msg, start, seqAndMsg, 4, len); + + try { + SecretKey keyKi = new SecretKeySpec(key, "HmacMD5"); + Mac m = Mac.getInstance("HmacMD5"); + m.init(keyKi); + m.update(seqAndMsg); + byte[] hMAC_MD5 = m.doFinal(); + + /* First 10 bytes of HMAC_MD5 digest */ + byte macBuffer[] = new byte[10]; + System.arraycopy(hMAC_MD5, 0, macBuffer, 0, 10); + + return macBuffer; + } catch (InvalidKeyException e) { + throw new SaslException("Invalid bytes used for key of HMAC-MD5 hash.", e); + } catch (NoSuchAlgorithmException e) { + throw new SaslException("Error creating instance of MD5 MAC algorithm", e); + } + } + + private void intToByte(int num) { + for(int i = 3; i >= 0; i --) { + seqNum[i] = (byte)(num & 0xff); + num >>>= 8; + } + } + + private int byteToInt(byte[] seqNum) { + int answer = 0; + for (int i = 0; i < 4; i ++) { + answer <<= 8; + answer |= ((int)seqNum[i] & 0xff); + } + return answer; + } + } +} diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java index d05eb57..f6819e2 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java @@ -803,6 +803,33 @@ public final class RPCProtos { * optional .hbase.pb.VersionInfo version_info = 5; */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); + + // optional string rpc_crypto_cipher_transformation = 6; + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + boolean hasRpcCryptoCipherTransformation(); + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + java.lang.String getRpcCryptoCipherTransformation(); + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + com.google.protobuf.ByteString + getRpcCryptoCipherTransformationBytes(); } /** * Protobuf type {@code hbase.pb.ConnectionHeader} @@ -900,6 +927,11 @@ public final class RPCProtos { bitField0_ |= 0x00000010; break; } + case 50: { + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = input.readBytes(); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -1143,12 +1175,68 @@ public final class RPCProtos { return versionInfo_; } + // optional string rpc_crypto_cipher_transformation = 6; + public static final int RPC_CRYPTO_CIPHER_TRANSFORMATION_FIELD_NUMBER = 6; + private java.lang.Object rpcCryptoCipherTransformation_; + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + public boolean hasRpcCryptoCipherTransformation() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + public java.lang.String getRpcCryptoCipherTransformation() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + rpcCryptoCipherTransformation_ = s; + } + return s; + } + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + public com.google.protobuf.ByteString + getRpcCryptoCipherTransformationBytes() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rpcCryptoCipherTransformation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private void initFields() { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); serviceName_ = ""; cellBlockCodecClass_ = ""; cellBlockCompressorClass_ = ""; versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); + rpcCryptoCipherTransformation_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -1189,6 +1277,9 @@ public final class RPCProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, versionInfo_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBytes(6, getRpcCryptoCipherTransformationBytes()); + } getUnknownFields().writeTo(output); } @@ -1218,6 +1309,10 @@ public final class RPCProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, versionInfo_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(6, getRpcCryptoCipherTransformationBytes()); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -1266,6 +1361,11 @@ public final class RPCProtos { result = result && getVersionInfo() .equals(other.getVersionInfo()); } + result = result && (hasRpcCryptoCipherTransformation() == other.hasRpcCryptoCipherTransformation()); + if (hasRpcCryptoCipherTransformation()) { + result = result && getRpcCryptoCipherTransformation() + .equals(other.getRpcCryptoCipherTransformation()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -1299,6 +1399,10 @@ public final class RPCProtos { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } + if (hasRpcCryptoCipherTransformation()) { + hash = (37 * hash) + RPC_CRYPTO_CIPHER_TRANSFORMATION_FIELD_NUMBER; + hash = (53 * hash) + getRpcCryptoCipherTransformation().hashCode(); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; @@ -1432,6 +1536,8 @@ public final class RPCProtos { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); + rpcCryptoCipherTransformation_ = ""; + bitField0_ = (bitField0_ & ~0x00000020); return this; } @@ -1488,6 +1594,10 @@ public final class RPCProtos { } else { result.versionInfo_ = versionInfoBuilder_.build(); } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.rpcCryptoCipherTransformation_ = rpcCryptoCipherTransformation_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1525,6 +1635,11 @@ public final class RPCProtos { if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } + if (other.hasRpcCryptoCipherTransformation()) { + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = other.rpcCryptoCipherTransformation_; + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -2080,6 +2195,104 @@ public final class RPCProtos { return versionInfoBuilder_; } + // optional string rpc_crypto_cipher_transformation = 6; + private java.lang.Object rpcCryptoCipherTransformation_ = ""; + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public boolean hasRpcCryptoCipherTransformation() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public java.lang.String getRpcCryptoCipherTransformation() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + rpcCryptoCipherTransformation_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public com.google.protobuf.ByteString + getRpcCryptoCipherTransformationBytes() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rpcCryptoCipherTransformation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public Builder setRpcCryptoCipherTransformation( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = value; + onChanged(); + return this; + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public Builder clearRpcCryptoCipherTransformation() { + bitField0_ = (bitField0_ & ~0x00000020); + rpcCryptoCipherTransformation_ = getDefaultInstance().getRpcCryptoCipherTransformation(); + onChanged(); + return this; + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public Builder setRpcCryptoCipherTransformationBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = value; + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeader) } @@ -2091,50 +2304,58 @@ public final class RPCProtos { // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeader) } - public interface CellBlockMetaOrBuilder + public interface ConnectionHeaderResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional uint32 length = 1; + // optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - boolean hasLength(); + boolean hasCryptoCipherMeta(); /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - int getLength(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta(); + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+     * To use Apache Commons Crypto, negotiate the metadata
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder(); } /** - * Protobuf type {@code hbase.pb.CellBlockMeta} + * Protobuf type {@code hbase.pb.ConnectionHeaderResponse} * *
-   * Optional Cell block Message.  Included in client RequestHeader
+   * This is sent by rpc server to negotiate the data if necessary
    * 
*/ - public static final class CellBlockMeta extends + public static final class ConnectionHeaderResponse extends com.google.protobuf.GeneratedMessage - implements CellBlockMetaOrBuilder { - // Use CellBlockMeta.newBuilder() to construct. - private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder builder) { + implements ConnectionHeaderResponseOrBuilder { + // Use ConnectionHeaderResponse.newBuilder() to construct. + private ConnectionHeaderResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private ConnectionHeaderResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final CellBlockMeta defaultInstance; - public static CellBlockMeta getDefaultInstance() { + private static final ConnectionHeaderResponse defaultInstance; + public static ConnectionHeaderResponse getDefaultInstance() { return defaultInstance; } - public CellBlockMeta getDefaultInstanceForType() { + public ConnectionHeaderResponse getDefaultInstanceForType() { return defaultInstance; } @@ -2144,7 +2365,7 @@ public final class RPCProtos { getUnknownFields() { return this.unknownFields; } - private CellBlockMeta( + private ConnectionHeaderResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -2167,9 +2388,17 @@ public final class RPCProtos { } break; } - case 8: { + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = cryptoCipherMeta_.toBuilder(); + } + cryptoCipherMeta_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(cryptoCipherMeta_); + cryptoCipherMeta_ = subBuilder.buildPartial(); + } bitField0_ |= 0x00000001; - length_ = input.readUInt32(); break; } } @@ -2186,64 +2415,80 @@ public final class RPCProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CellBlockMeta parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ConnectionHeaderResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new CellBlockMeta(input, extensionRegistry); + return new ConnectionHeaderResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } private int bitField0_; - // optional uint32 length = 1; - public static final int LENGTH_FIELD_NUMBER = 1; - private int length_; + // optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + public static final int CRYPTO_CIPHER_META_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta cryptoCipherMeta_; /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - public boolean hasLength() { + public boolean hasCryptoCipherMeta() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - public int getLength() { - return length_; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta() { + return cryptoCipherMeta_; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+     * To use Apache Commons Crypto, negotiate the metadata
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder() { + return cryptoCipherMeta_; } private void initFields() { - length_ = 0; + cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (hasCryptoCipherMeta()) { + if (!getCryptoCipherMeta().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } memoizedIsInitialized = 1; return true; } @@ -2252,7 +2497,7 @@ public final class RPCProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, length_); + output.writeMessage(1, cryptoCipherMeta_); } getUnknownFields().writeTo(output); } @@ -2265,7 +2510,7 @@ public final class RPCProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, length_); + .computeMessageSize(1, cryptoCipherMeta_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -2284,16 +2529,16 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse) obj; boolean result = true; - result = result && (hasLength() == other.hasLength()); - if (hasLength()) { - result = result && (getLength() - == other.getLength()); + result = result && (hasCryptoCipherMeta() == other.hasCryptoCipherMeta()); + if (hasCryptoCipherMeta()) { + result = result && getCryptoCipherMeta() + .equals(other.getCryptoCipherMeta()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -2308,62 +2553,62 @@ public final class RPCProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLength()) { - hash = (37 * hash) + LENGTH_FIELD_NUMBER; - hash = (53 * hash) + getLength(); + if (hasCryptoCipherMeta()) { + hash = (37 * hash) + CRYPTO_CIPHER_META_FIELD_NUMBER; + hash = (53 * hash) + getCryptoCipherMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2372,7 +2617,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2384,28 +2629,28 @@ public final class RPCProtos { return builder; } /** - * Protobuf type {@code hbase.pb.CellBlockMeta} + * Protobuf type {@code hbase.pb.ConnectionHeaderResponse} * *
-     * Optional Cell block Message.  Included in client RequestHeader
+     * This is sent by rpc server to negotiate the data if necessary
      * 
*/ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2417,6 +2662,7 @@ public final class RPCProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getCryptoCipherMetaFieldBuilder(); } } private static Builder create() { @@ -2425,7 +2671,11 @@ public final class RPCProtos { public Builder clear() { super.clear(); - length_ = 0; + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); + } else { + cryptoCipherMetaBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -2436,53 +2686,63 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta(this); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.length_ = length_; + if (cryptoCipherMetaBuilder_ == null) { + result.cryptoCipherMeta_ = cryptoCipherMeta_; + } else { + result.cryptoCipherMeta_ = cryptoCipherMetaBuilder_.build(); + } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) return this; - if (other.hasLength()) { - setLength(other.getLength()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.getDefaultInstance()) return this; + if (other.hasCryptoCipherMeta()) { + mergeCryptoCipherMeta(other.getCryptoCipherMeta()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (hasCryptoCipherMeta()) { + if (!getCryptoCipherMeta().isInitialized()) { + + return false; + } + } return true; } @@ -2490,11 +2750,11 @@ public final class RPCProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -2505,205 +2765,1943 @@ public final class RPCProtos { } private int bitField0_; - // optional uint32 length = 1; + // optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder> cryptoCipherMetaBuilder_; + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public boolean hasCryptoCipherMeta() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta() { + if (cryptoCipherMetaBuilder_ == null) { + return cryptoCipherMeta_; + } else { + return cryptoCipherMetaBuilder_.getMessage(); + } + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder setCryptoCipherMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta value) { + if (cryptoCipherMetaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + cryptoCipherMeta_ = value; + onChanged(); + } else { + cryptoCipherMetaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder setCryptoCipherMeta( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder builderForValue) { + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMeta_ = builderForValue.build(); + onChanged(); + } else { + cryptoCipherMetaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder mergeCryptoCipherMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta value) { + if (cryptoCipherMetaBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + cryptoCipherMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance()) { + cryptoCipherMeta_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.newBuilder(cryptoCipherMeta_).mergeFrom(value).buildPartial(); + } else { + cryptoCipherMeta_ = value; + } + onChanged(); + } else { + cryptoCipherMetaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder clearCryptoCipherMeta() { + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); + onChanged(); + } else { + cryptoCipherMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder getCryptoCipherMetaBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCryptoCipherMetaFieldBuilder().getBuilder(); + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder() { + if (cryptoCipherMetaBuilder_ != null) { + return cryptoCipherMetaBuilder_.getMessageOrBuilder(); + } else { + return cryptoCipherMeta_; + } + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder> + getCryptoCipherMetaFieldBuilder() { + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder>( + cryptoCipherMeta_, + getParentForChildren(), + isClean()); + cryptoCipherMeta_ = null; + } + return cryptoCipherMetaBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeaderResponse) + } + + static { + defaultInstance = new ConnectionHeaderResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeaderResponse) + } + + public interface CellBlockMetaOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint32 length = 1; + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + boolean hasLength(); + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + int getLength(); + } + /** + * Protobuf type {@code hbase.pb.CellBlockMeta} + * + *
+   * Optional Cell block Message.  Included in client RequestHeader
+   * 
+ */ + public static final class CellBlockMeta extends + com.google.protobuf.GeneratedMessage + implements CellBlockMetaOrBuilder { + // Use CellBlockMeta.newBuilder() to construct. + private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CellBlockMeta defaultInstance; + public static CellBlockMeta getDefaultInstance() { + return defaultInstance; + } + + public CellBlockMeta getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CellBlockMeta( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + length_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CellBlockMeta parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CellBlockMeta(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional uint32 length = 1; + public static final int LENGTH_FIELD_NUMBER = 1; + private int length_; + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + public boolean hasLength() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + public int getLength() { + return length_; + } + + private void initFields() { + length_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, length_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, length_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) obj; + + boolean result = true; + result = result && (hasLength() == other.hasLength()); + if (hasLength()) { + result = result && (getLength() + == other.getLength()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLength()) { + hash = (37 * hash) + LENGTH_FIELD_NUMBER; + hash = (53 * hash) + getLength(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.CellBlockMeta} + * + *
+     * Optional Cell block Message.  Included in client RequestHeader
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + length_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.length_ = length_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) return this; + if (other.hasLength()) { + setLength(other.getLength()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional uint32 length = 1; private int length_ ; /** - * optional uint32 length = 1; + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public boolean hasLength() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public int getLength() { + return length_; + } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public Builder setLength(int value) { + bitField0_ |= 0x00000001; + length_ = value; + onChanged(); + return this; + } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public Builder clearLength() { + bitField0_ = (bitField0_ & ~0x00000001); + length_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.CellBlockMeta) + } + + static { + defaultInstance = new CellBlockMeta(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) + } + + public interface ExceptionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string exception_class_name = 1; + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + boolean hasExceptionClassName(); + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + java.lang.String getExceptionClassName(); + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + com.google.protobuf.ByteString + getExceptionClassNameBytes(); + + // optional string stack_trace = 2; + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + boolean hasStackTrace(); + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + java.lang.String getStackTrace(); + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + com.google.protobuf.ByteString + getStackTraceBytes(); + + // optional string hostname = 3; + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + boolean hasHostname(); + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + java.lang.String getHostname(); + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + com.google.protobuf.ByteString + getHostnameBytes(); + + // optional int32 port = 4; + /** + * optional int32 port = 4; + */ + boolean hasPort(); + /** + * optional int32 port = 4; + */ + int getPort(); + + // optional bool do_not_retry = 5; + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + boolean hasDoNotRetry(); + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + boolean getDoNotRetry(); + } + /** + * Protobuf type {@code hbase.pb.ExceptionResponse} + * + *
+   * At the RPC layer, this message is used to carry
+   * the server side exception to the RPC client.
+   * 
+ */ + public static final class ExceptionResponse extends + com.google.protobuf.GeneratedMessage + implements ExceptionResponseOrBuilder { + // Use ExceptionResponse.newBuilder() to construct. + private ExceptionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ExceptionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ExceptionResponse defaultInstance; + public static ExceptionResponse getDefaultInstance() { + return defaultInstance; + } + + public ExceptionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExceptionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + exceptionClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + stackTrace_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + hostname_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + port_ = input.readInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + doNotRetry_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ExceptionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExceptionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional string exception_class_name = 1; + public static final int EXCEPTION_CLASS_NAME_FIELD_NUMBER = 1; + private java.lang.Object exceptionClassName_; + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public boolean hasExceptionClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public java.lang.String getExceptionClassName() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + exceptionClassName_ = s; + } + return s; + } + } + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public com.google.protobuf.ByteString + getExceptionClassNameBytes() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + exceptionClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string stack_trace = 2; + public static final int STACK_TRACE_FIELD_NUMBER = 2; + private java.lang.Object stackTrace_; + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public java.lang.String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + stackTrace_ = s; + } + return s; + } + } + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public com.google.protobuf.ByteString + getStackTraceBytes() { + java.lang.Object ref = stackTrace_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stackTrace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string hostname = 3; + public static final int HOSTNAME_FIELD_NUMBER = 3; + private java.lang.Object hostname_; + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public boolean hasHostname() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public java.lang.String getHostname() { + java.lang.Object ref = hostname_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + hostname_ = s; + } + return s; + } + } + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public com.google.protobuf.ByteString + getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional int32 port = 4; + public static final int PORT_FIELD_NUMBER = 4; + private int port_; + /** + * optional int32 port = 4; + */ + public boolean hasPort() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int32 port = 4; + */ + public int getPort() { + return port_; + } + + // optional bool do_not_retry = 5; + public static final int DO_NOT_RETRY_FIELD_NUMBER = 5; + private boolean doNotRetry_; + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + public boolean hasDoNotRetry() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + public boolean getDoNotRetry() { + return doNotRetry_; + } + + private void initFields() { + exceptionClassName_ = ""; + stackTrace_ = ""; + hostname_ = ""; + port_ = 0; + doNotRetry_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getExceptionClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getStackTraceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getHostnameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeInt32(4, port_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, doNotRetry_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getExceptionClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getStackTraceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getHostnameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(4, port_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, doNotRetry_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) obj; + + boolean result = true; + result = result && (hasExceptionClassName() == other.hasExceptionClassName()); + if (hasExceptionClassName()) { + result = result && getExceptionClassName() + .equals(other.getExceptionClassName()); + } + result = result && (hasStackTrace() == other.hasStackTrace()); + if (hasStackTrace()) { + result = result && getStackTrace() + .equals(other.getStackTrace()); + } + result = result && (hasHostname() == other.hasHostname()); + if (hasHostname()) { + result = result && getHostname() + .equals(other.getHostname()); + } + result = result && (hasPort() == other.hasPort()); + if (hasPort()) { + result = result && (getPort() + == other.getPort()); + } + result = result && (hasDoNotRetry() == other.hasDoNotRetry()); + if (hasDoNotRetry()) { + result = result && (getDoNotRetry() + == other.getDoNotRetry()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasExceptionClassName()) { + hash = (37 * hash) + EXCEPTION_CLASS_NAME_FIELD_NUMBER; + hash = (53 * hash) + getExceptionClassName().hashCode(); + } + if (hasStackTrace()) { + hash = (37 * hash) + STACK_TRACE_FIELD_NUMBER; + hash = (53 * hash) + getStackTrace().hashCode(); + } + if (hasHostname()) { + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostname().hashCode(); + } + if (hasPort()) { + hash = (37 * hash) + PORT_FIELD_NUMBER; + hash = (53 * hash) + getPort(); + } + if (hasDoNotRetry()) { + hash = (37 * hash) + DO_NOT_RETRY_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getDoNotRetry()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ExceptionResponse} + * + *
+     * At the RPC layer, this message is used to carry
+     * the server side exception to the RPC client.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + exceptionClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + stackTrace_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + hostname_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + port_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + doNotRetry_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.exceptionClassName_ = exceptionClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.stackTrace_ = stackTrace_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.hostname_ = hostname_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.port_ = port_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.doNotRetry_ = doNotRetry_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; + if (other.hasExceptionClassName()) { + bitField0_ |= 0x00000001; + exceptionClassName_ = other.exceptionClassName_; + onChanged(); + } + if (other.hasStackTrace()) { + bitField0_ |= 0x00000002; + stackTrace_ = other.stackTrace_; + onChanged(); + } + if (other.hasHostname()) { + bitField0_ |= 0x00000004; + hostname_ = other.hostname_; + onChanged(); + } + if (other.hasPort()) { + setPort(other.getPort()); + } + if (other.hasDoNotRetry()) { + setDoNotRetry(other.getDoNotRetry()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional string exception_class_name = 1; + private java.lang.Object exceptionClassName_ = ""; + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public boolean hasExceptionClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public java.lang.String getExceptionClassName() { + java.lang.Object ref = exceptionClassName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + exceptionClassName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public com.google.protobuf.ByteString + getExceptionClassNameBytes() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + exceptionClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder setExceptionClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + exceptionClassName_ = value; + onChanged(); + return this; + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder clearExceptionClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + exceptionClassName_ = getDefaultInstance().getExceptionClassName(); + onChanged(); + return this; + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder setExceptionClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + exceptionClassName_ = value; + onChanged(); + return this; + } + + // optional string stack_trace = 2; + private java.lang.Object stackTrace_ = ""; + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public java.lang.String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + stackTrace_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public com.google.protobuf.ByteString + getStackTraceBytes() { + java.lang.Object ref = stackTrace_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stackTrace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder setStackTrace( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + return this; + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder clearStackTrace() { + bitField0_ = (bitField0_ & ~0x00000002); + stackTrace_ = getDefaultInstance().getStackTrace(); + onChanged(); + return this; + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder setStackTraceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + return this; + } + + // optional string hostname = 3; + private java.lang.Object hostname_ = ""; + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public boolean hasHostname() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public java.lang.String getHostname() { + java.lang.Object ref = hostname_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + hostname_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public com.google.protobuf.ByteString + getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder setHostname( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + hostname_ = value; + onChanged(); + return this; + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder clearHostname() { + bitField0_ = (bitField0_ & ~0x00000004); + hostname_ = getDefaultInstance().getHostname(); + onChanged(); + return this; + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder setHostnameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + hostname_ = value; + onChanged(); + return this; + } + + // optional int32 port = 4; + private int port_ ; + /** + * optional int32 port = 4; + */ + public boolean hasPort() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int32 port = 4; + */ + public int getPort() { + return port_; + } + /** + * optional int32 port = 4; + */ + public Builder setPort(int value) { + bitField0_ |= 0x00000008; + port_ = value; + onChanged(); + return this; + } + /** + * optional int32 port = 4; + */ + public Builder clearPort() { + bitField0_ = (bitField0_ & ~0x00000008); + port_ = 0; + onChanged(); + return this; + } + + // optional bool do_not_retry = 5; + private boolean doNotRetry_ ; + /** + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public boolean hasLength() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public boolean hasDoNotRetry() { + return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional uint32 length = 1; + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public int getLength() { - return length_; + public boolean getDoNotRetry() { + return doNotRetry_; } /** - * optional uint32 length = 1; + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public Builder setLength(int value) { - bitField0_ |= 0x00000001; - length_ = value; + public Builder setDoNotRetry(boolean value) { + bitField0_ |= 0x00000010; + doNotRetry_ = value; onChanged(); return this; } /** - * optional uint32 length = 1; + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public Builder clearLength() { - bitField0_ = (bitField0_ & ~0x00000001); - length_ = 0; + public Builder clearDoNotRetry() { + bitField0_ = (bitField0_ & ~0x00000010); + doNotRetry_ = false; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.CellBlockMeta) + // @@protoc_insertion_point(builder_scope:hbase.pb.ExceptionResponse) } static { - defaultInstance = new CellBlockMeta(true); + defaultInstance = new ExceptionResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) + // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) } - public interface ExceptionResponseOrBuilder + public interface CryptoCipherMetaOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional string exception_class_name = 1; + // required string transformation = 1; /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
+ * required string transformation = 1; */ - boolean hasExceptionClassName(); + boolean hasTransformation(); /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
+ * required string transformation = 1; */ - java.lang.String getExceptionClassName(); + java.lang.String getTransformation(); /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
+ * required string transformation = 1; */ com.google.protobuf.ByteString - getExceptionClassNameBytes(); + getTransformationBytes(); - // optional string stack_trace = 2; - /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
- */ - boolean hasStackTrace(); + // optional bytes inKey = 2; /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * optional bytes inKey = 2; */ - java.lang.String getStackTrace(); + boolean hasInKey(); /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * optional bytes inKey = 2; */ - com.google.protobuf.ByteString - getStackTraceBytes(); + com.google.protobuf.ByteString getInKey(); - // optional string hostname = 3; + // optional bytes inIv = 3; /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
- */ - boolean hasHostname(); - /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inIv = 3; */ - java.lang.String getHostname(); + boolean hasInIv(); /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inIv = 3; */ - com.google.protobuf.ByteString - getHostnameBytes(); + com.google.protobuf.ByteString getInIv(); - // optional int32 port = 4; + // optional bytes outKey = 4; /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - boolean hasPort(); + boolean hasOutKey(); /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - int getPort(); + com.google.protobuf.ByteString getOutKey(); - // optional bool do_not_retry = 5; + // optional bytes outIv = 5; /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - boolean hasDoNotRetry(); + boolean hasOutIv(); /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - boolean getDoNotRetry(); + com.google.protobuf.ByteString getOutIv(); } /** - * Protobuf type {@code hbase.pb.ExceptionResponse} + * Protobuf type {@code hbase.pb.CryptoCipherMeta} * *
-   * At the RPC layer, this message is used to carry
-   * the server side exception to the RPC client.
+   **
+   * Cipher meta for Crypto
    * 
*/ - public static final class ExceptionResponse extends + public static final class CryptoCipherMeta extends com.google.protobuf.GeneratedMessage - implements ExceptionResponseOrBuilder { - // Use ExceptionResponse.newBuilder() to construct. - private ExceptionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements CryptoCipherMetaOrBuilder { + // Use CryptoCipherMeta.newBuilder() to construct. + private CryptoCipherMeta(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private ExceptionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private CryptoCipherMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final ExceptionResponse defaultInstance; - public static ExceptionResponse getDefaultInstance() { + private static final CryptoCipherMeta defaultInstance; + public static CryptoCipherMeta getDefaultInstance() { return defaultInstance; } - public ExceptionResponse getDefaultInstanceForType() { + public CryptoCipherMeta getDefaultInstanceForType() { return defaultInstance; } @@ -2713,7 +4711,7 @@ public final class RPCProtos { getUnknownFields() { return this.unknownFields; } - private ExceptionResponse( + private CryptoCipherMeta( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -2738,27 +4736,27 @@ public final class RPCProtos { } case 10: { bitField0_ |= 0x00000001; - exceptionClassName_ = input.readBytes(); + transformation_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; - stackTrace_ = input.readBytes(); + inKey_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; - hostname_ = input.readBytes(); + inIv_ = input.readBytes(); break; } - case 32: { + case 34: { bitField0_ |= 0x00000008; - port_ = input.readInt32(); + outKey_ = input.readBytes(); break; } - case 40: { + case 42: { bitField0_ |= 0x00000010; - doNotRetry_ = input.readBool(); + outIv_ = input.readBytes(); break; } } @@ -2772,112 +4770,49 @@ public final class RPCProtos { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ExceptionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ExceptionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional string exception_class_name = 1; - public static final int EXCEPTION_CLASS_NAME_FIELD_NUMBER = 1; - private java.lang.Object exceptionClassName_; - /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
- */ - public boolean hasExceptionClassName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
- */ - public java.lang.String getExceptionClassName() { - java.lang.Object ref = exceptionClassName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - exceptionClassName_ = s; - } - return s; - } - } - /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
- */ - public com.google.protobuf.ByteString - getExceptionClassNameBytes() { - java.lang.Object ref = exceptionClassName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - exceptionClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CryptoCipherMeta parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CryptoCipherMeta(input, extensionRegistry); } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - // optional string stack_trace = 2; - public static final int STACK_TRACE_FIELD_NUMBER = 2; - private java.lang.Object stackTrace_; + private int bitField0_; + // required string transformation = 1; + public static final int TRANSFORMATION_FIELD_NUMBER = 1; + private java.lang.Object transformation_; /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * required string transformation = 1; */ - public boolean hasStackTrace() { - return ((bitField0_ & 0x00000002) == 0x00000002); + public boolean hasTransformation() { + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * required string transformation = 1; */ - public java.lang.String getStackTrace() { - java.lang.Object ref = stackTrace_; + public java.lang.String getTransformation() { + java.lang.Object ref = transformation_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { @@ -2885,142 +4820,108 @@ public final class RPCProtos { (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { - stackTrace_ = s; + transformation_ = s; } return s; } } /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * required string transformation = 1; */ public com.google.protobuf.ByteString - getStackTraceBytes() { - java.lang.Object ref = stackTrace_; + getTransformationBytes() { + java.lang.Object ref = transformation_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - stackTrace_ = b; + transformation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - // optional string hostname = 3; - public static final int HOSTNAME_FIELD_NUMBER = 3; - private java.lang.Object hostname_; + // optional bytes inKey = 2; + public static final int INKEY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString inKey_; /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inKey = 2; */ - public boolean hasHostname() { - return ((bitField0_ & 0x00000004) == 0x00000004); + public boolean hasInKey() { + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inKey = 2; */ - public java.lang.String getHostname() { - java.lang.Object ref = hostname_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - hostname_ = s; - } - return s; - } + public com.google.protobuf.ByteString getInKey() { + return inKey_; } + + // optional bytes inIv = 3; + public static final int INIV_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString inIv_; /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inIv = 3; */ - public com.google.protobuf.ByteString - getHostnameBytes() { - java.lang.Object ref = hostname_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - hostname_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public boolean hasInIv() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bytes inIv = 3; + */ + public com.google.protobuf.ByteString getInIv() { + return inIv_; } - // optional int32 port = 4; - public static final int PORT_FIELD_NUMBER = 4; - private int port_; + // optional bytes outKey = 4; + public static final int OUTKEY_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString outKey_; /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public boolean hasPort() { + public boolean hasOutKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public int getPort() { - return port_; + public com.google.protobuf.ByteString getOutKey() { + return outKey_; } - // optional bool do_not_retry = 5; - public static final int DO_NOT_RETRY_FIELD_NUMBER = 5; - private boolean doNotRetry_; + // optional bytes outIv = 5; + public static final int OUTIV_FIELD_NUMBER = 5; + private com.google.protobuf.ByteString outIv_; /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - public boolean hasDoNotRetry() { + public boolean hasOutIv() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - public boolean getDoNotRetry() { - return doNotRetry_; + public com.google.protobuf.ByteString getOutIv() { + return outIv_; } private void initFields() { - exceptionClassName_ = ""; - stackTrace_ = ""; - hostname_ = ""; - port_ = 0; - doNotRetry_ = false; + transformation_ = ""; + inKey_ = com.google.protobuf.ByteString.EMPTY; + inIv_ = com.google.protobuf.ByteString.EMPTY; + outKey_ = com.google.protobuf.ByteString.EMPTY; + outIv_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (!hasTransformation()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -3029,19 +4930,19 @@ public final class RPCProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getExceptionClassNameBytes()); + output.writeBytes(1, getTransformationBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStackTraceBytes()); + output.writeBytes(2, inKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getHostnameBytes()); + output.writeBytes(3, inIv_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeInt32(4, port_); + output.writeBytes(4, outKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBool(5, doNotRetry_); + output.writeBytes(5, outIv_); } getUnknownFields().writeTo(output); } @@ -3054,23 +4955,23 @@ public final class RPCProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getExceptionClassNameBytes()); + .computeBytesSize(1, getTransformationBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStackTraceBytes()); + .computeBytesSize(2, inKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getHostnameBytes()); + .computeBytesSize(3, inIv_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeInt32Size(4, port_); + .computeBytesSize(4, outKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(5, doNotRetry_); + .computeBytesSize(5, outIv_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -3089,36 +4990,36 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta) obj; boolean result = true; - result = result && (hasExceptionClassName() == other.hasExceptionClassName()); - if (hasExceptionClassName()) { - result = result && getExceptionClassName() - .equals(other.getExceptionClassName()); - } - result = result && (hasStackTrace() == other.hasStackTrace()); - if (hasStackTrace()) { - result = result && getStackTrace() - .equals(other.getStackTrace()); - } - result = result && (hasHostname() == other.hasHostname()); - if (hasHostname()) { - result = result && getHostname() - .equals(other.getHostname()); - } - result = result && (hasPort() == other.hasPort()); - if (hasPort()) { - result = result && (getPort() - == other.getPort()); - } - result = result && (hasDoNotRetry() == other.hasDoNotRetry()); - if (hasDoNotRetry()) { - result = result && (getDoNotRetry() - == other.getDoNotRetry()); + result = result && (hasTransformation() == other.hasTransformation()); + if (hasTransformation()) { + result = result && getTransformation() + .equals(other.getTransformation()); + } + result = result && (hasInKey() == other.hasInKey()); + if (hasInKey()) { + result = result && getInKey() + .equals(other.getInKey()); + } + result = result && (hasInIv() == other.hasInIv()); + if (hasInIv()) { + result = result && getInIv() + .equals(other.getInIv()); + } + result = result && (hasOutKey() == other.hasOutKey()); + if (hasOutKey()) { + result = result && getOutKey() + .equals(other.getOutKey()); + } + result = result && (hasOutIv() == other.hasOutIv()); + if (hasOutIv()) { + result = result && getOutIv() + .equals(other.getOutIv()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -3133,78 +5034,78 @@ public final class RPCProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasExceptionClassName()) { - hash = (37 * hash) + EXCEPTION_CLASS_NAME_FIELD_NUMBER; - hash = (53 * hash) + getExceptionClassName().hashCode(); + if (hasTransformation()) { + hash = (37 * hash) + TRANSFORMATION_FIELD_NUMBER; + hash = (53 * hash) + getTransformation().hashCode(); } - if (hasStackTrace()) { - hash = (37 * hash) + STACK_TRACE_FIELD_NUMBER; - hash = (53 * hash) + getStackTrace().hashCode(); + if (hasInKey()) { + hash = (37 * hash) + INKEY_FIELD_NUMBER; + hash = (53 * hash) + getInKey().hashCode(); } - if (hasHostname()) { - hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; - hash = (53 * hash) + getHostname().hashCode(); + if (hasInIv()) { + hash = (37 * hash) + INIV_FIELD_NUMBER; + hash = (53 * hash) + getInIv().hashCode(); } - if (hasPort()) { - hash = (37 * hash) + PORT_FIELD_NUMBER; - hash = (53 * hash) + getPort(); + if (hasOutKey()) { + hash = (37 * hash) + OUTKEY_FIELD_NUMBER; + hash = (53 * hash) + getOutKey().hashCode(); } - if (hasDoNotRetry()) { - hash = (37 * hash) + DO_NOT_RETRY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDoNotRetry()); + if (hasOutIv()) { + hash = (37 * hash) + OUTIV_FIELD_NUMBER; + hash = (53 * hash) + getOutIv().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3213,7 +5114,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -3225,29 +5126,29 @@ public final class RPCProtos { return builder; } /** - * Protobuf type {@code hbase.pb.ExceptionResponse} + * Protobuf type {@code hbase.pb.CryptoCipherMeta} * *
-     * At the RPC layer, this message is used to carry
-     * the server side exception to the RPC client.
+     **
+     * Cipher meta for Crypto
      * 
*/ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -3267,15 +5168,15 @@ public final class RPCProtos { public Builder clear() { super.clear(); - exceptionClassName_ = ""; + transformation_ = ""; bitField0_ = (bitField0_ & ~0x00000001); - stackTrace_ = ""; + inKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); - hostname_ = ""; + inIv_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); - port_ = 0; + outKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); - doNotRetry_ = false; + outIv_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -3286,87 +5187,87 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.exceptionClassName_ = exceptionClassName_; + result.transformation_ = transformation_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } - result.stackTrace_ = stackTrace_; + result.inKey_ = inKey_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } - result.hostname_ = hostname_; + result.inIv_ = inIv_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } - result.port_ = port_; + result.outKey_ = outKey_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } - result.doNotRetry_ = doNotRetry_; + result.outIv_ = outIv_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; - if (other.hasExceptionClassName()) { + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance()) return this; + if (other.hasTransformation()) { bitField0_ |= 0x00000001; - exceptionClassName_ = other.exceptionClassName_; + transformation_ = other.transformation_; onChanged(); } - if (other.hasStackTrace()) { - bitField0_ |= 0x00000002; - stackTrace_ = other.stackTrace_; - onChanged(); + if (other.hasInKey()) { + setInKey(other.getInKey()); } - if (other.hasHostname()) { - bitField0_ |= 0x00000004; - hostname_ = other.hostname_; - onChanged(); + if (other.hasInIv()) { + setInIv(other.getInIv()); } - if (other.hasPort()) { - setPort(other.getPort()); + if (other.hasOutKey()) { + setOutKey(other.getOutKey()); } - if (other.hasDoNotRetry()) { - setDoNotRetry(other.getDoNotRetry()); + if (other.hasOutIv()) { + setOutIv(other.getOutIv()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (!hasTransformation()) { + + return false; + } return true; } @@ -3374,11 +5275,11 @@ public final class RPCProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -3389,397 +5290,233 @@ public final class RPCProtos { } private int bitField0_; - // optional string exception_class_name = 1; - private java.lang.Object exceptionClassName_ = ""; + // required string transformation = 1; + private java.lang.Object transformation_ = ""; /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public boolean hasExceptionClassName() { + public boolean hasTransformation() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public java.lang.String getExceptionClassName() { - java.lang.Object ref = exceptionClassName_; + public java.lang.String getTransformation() { + java.lang.Object ref = transformation_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); - exceptionClassName_ = s; + transformation_ = s; return s; } else { return (java.lang.String) ref; } } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ public com.google.protobuf.ByteString - getExceptionClassNameBytes() { - java.lang.Object ref = exceptionClassName_; + getTransformationBytes() { + java.lang.Object ref = transformation_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - exceptionClassName_ = b; + transformation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public Builder setExceptionClassName( + public Builder setTransformation( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - exceptionClassName_ = value; + transformation_ = value; onChanged(); return this; } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public Builder clearExceptionClassName() { + public Builder clearTransformation() { bitField0_ = (bitField0_ & ~0x00000001); - exceptionClassName_ = getDefaultInstance().getExceptionClassName(); + transformation_ = getDefaultInstance().getTransformation(); onChanged(); return this; } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public Builder setExceptionClassNameBytes( + public Builder setTransformationBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - exceptionClassName_ = value; + transformation_ = value; onChanged(); return this; } - // optional string stack_trace = 2; - private java.lang.Object stackTrace_ = ""; + // optional bytes inKey = 2; + private com.google.protobuf.ByteString inKey_ = com.google.protobuf.ByteString.EMPTY; /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public boolean hasStackTrace() { + public boolean hasInKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
- */ - public java.lang.String getStackTrace() { - java.lang.Object ref = stackTrace_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - stackTrace_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public com.google.protobuf.ByteString - getStackTraceBytes() { - java.lang.Object ref = stackTrace_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - stackTrace_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public com.google.protobuf.ByteString getInKey() { + return inKey_; } /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public Builder setStackTrace( - java.lang.String value) { + public Builder setInKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; - stackTrace_ = value; + inKey_ = value; onChanged(); return this; } /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public Builder clearStackTrace() { + public Builder clearInKey() { bitField0_ = (bitField0_ & ~0x00000002); - stackTrace_ = getDefaultInstance().getStackTrace(); - onChanged(); - return this; - } - /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
- */ - public Builder setStackTraceBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - stackTrace_ = value; + inKey_ = getDefaultInstance().getInKey(); onChanged(); return this; } - // optional string hostname = 3; - private java.lang.Object hostname_ = ""; + // optional bytes inIv = 3; + private com.google.protobuf.ByteString inIv_ = com.google.protobuf.ByteString.EMPTY; /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public boolean hasHostname() { + public boolean hasInIv() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
- */ - public java.lang.String getHostname() { - java.lang.Object ref = hostname_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - hostname_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public com.google.protobuf.ByteString - getHostnameBytes() { - java.lang.Object ref = hostname_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - hostname_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public com.google.protobuf.ByteString getInIv() { + return inIv_; } /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public Builder setHostname( - java.lang.String value) { + public Builder setInIv(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; - hostname_ = value; + inIv_ = value; onChanged(); return this; } /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public Builder clearHostname() { + public Builder clearInIv() { bitField0_ = (bitField0_ & ~0x00000004); - hostname_ = getDefaultInstance().getHostname(); - onChanged(); - return this; - } - /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
- */ - public Builder setHostnameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - hostname_ = value; + inIv_ = getDefaultInstance().getInIv(); onChanged(); return this; } - // optional int32 port = 4; - private int port_ ; + // optional bytes outKey = 4; + private com.google.protobuf.ByteString outKey_ = com.google.protobuf.ByteString.EMPTY; /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public boolean hasPort() { + public boolean hasOutKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public int getPort() { - return port_; + public com.google.protobuf.ByteString getOutKey() { + return outKey_; } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public Builder setPort(int value) { - bitField0_ |= 0x00000008; - port_ = value; + public Builder setOutKey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + outKey_ = value; onChanged(); return this; } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public Builder clearPort() { + public Builder clearOutKey() { bitField0_ = (bitField0_ & ~0x00000008); - port_ = 0; + outKey_ = getDefaultInstance().getOutKey(); onChanged(); return this; } - // optional bool do_not_retry = 5; - private boolean doNotRetry_ ; + // optional bytes outIv = 5; + private com.google.protobuf.ByteString outIv_ = com.google.protobuf.ByteString.EMPTY; /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public boolean hasDoNotRetry() { + public boolean hasOutIv() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public boolean getDoNotRetry() { - return doNotRetry_; + public com.google.protobuf.ByteString getOutIv() { + return outIv_; } /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public Builder setDoNotRetry(boolean value) { - bitField0_ |= 0x00000010; - doNotRetry_ = value; + public Builder setOutIv(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + outIv_ = value; onChanged(); return this; } /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public Builder clearDoNotRetry() { + public Builder clearOutIv() { bitField0_ = (bitField0_ & ~0x00000010); - doNotRetry_ = false; + outIv_ = getDefaultInstance().getOutIv(); onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.ExceptionResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.CryptoCipherMeta) } static { - defaultInstance = new ExceptionResponse(true); + defaultInstance = new CryptoCipherMeta(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) + // @@protoc_insertion_point(class_scope:hbase.pb.CryptoCipherMeta) } public interface RequestHeaderOrBuilder @@ -6192,6 +7929,11 @@ public final class RPCProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CellBlockMeta_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -6202,6 +7944,11 @@ public final class RPCProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_CryptoCipherMeta_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RequestHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -6222,26 +7969,32 @@ public final class RPCProtos { java.lang.String[] descriptorData = { "\n\tRPC.proto\022\010hbase.pb\032\rTracing.proto\032\013HB" + "ase.proto\"<\n\017UserInformation\022\026\n\016effectiv" + - "e_user\030\001 \002(\t\022\021\n\treal_user\030\002 \001(\t\"\310\001\n\020Conn" + + "e_user\030\001 \002(\t\022\021\n\treal_user\030\002 \001(\t\"\362\001\n\020Conn" + "ectionHeader\022,\n\tuser_info\030\001 \001(\0132\031.hbase." + "pb.UserInformation\022\024\n\014service_name\030\002 \001(\t" + "\022\036\n\026cell_block_codec_class\030\003 \001(\t\022#\n\033cell" + "_block_compressor_class\030\004 \001(\t\022+\n\014version" + - "_info\030\005 \001(\0132\025.hbase.pb.VersionInfo\"\037\n\rCe" + - "llBlockMeta\022\016\n\006length\030\001 \001(\r\"|\n\021Exception" + - "Response\022\034\n\024exception_class_name\030\001 \001(\t\022\023", - "\n\013stack_trace\030\002 \001(\t\022\020\n\010hostname\030\003 \001(\t\022\014\n" + - "\004port\030\004 \001(\005\022\024\n\014do_not_retry\030\005 \001(\010\"\311\001\n\rRe" + - "questHeader\022\017\n\007call_id\030\001 \001(\r\022&\n\ntrace_in" + - "fo\030\002 \001(\0132\022.hbase.pb.RPCTInfo\022\023\n\013method_n" + - "ame\030\003 \001(\t\022\025\n\rrequest_param\030\004 \001(\010\0220\n\017cell" + - "_block_meta\030\005 \001(\0132\027.hbase.pb.CellBlockMe" + - "ta\022\020\n\010priority\030\006 \001(\r\022\017\n\007timeout\030\007 \001(\r\"\203\001" + - "\n\016ResponseHeader\022\017\n\007call_id\030\001 \001(\r\022.\n\texc" + - "eption\030\002 \001(\0132\033.hbase.pb.ExceptionRespons" + - "e\0220\n\017cell_block_meta\030\003 \001(\0132\027.hbase.pb.Ce", - "llBlockMetaB<\n*org.apache.hadoop.hbase.p" + - "rotobuf.generatedB\tRPCProtosH\001\240\001\001" + "_info\030\005 \001(\0132\025.hbase.pb.VersionInfo\022(\n rp" + + "c_crypto_cipher_transformation\030\006 \001(\t\"R\n\030" + + "ConnectionHeaderResponse\0226\n\022crypto_ciphe", + "r_meta\030\001 \001(\0132\032.hbase.pb.CryptoCipherMeta" + + "\"\037\n\rCellBlockMeta\022\016\n\006length\030\001 \001(\r\"|\n\021Exc" + + "eptionResponse\022\034\n\024exception_class_name\030\001" + + " \001(\t\022\023\n\013stack_trace\030\002 \001(\t\022\020\n\010hostname\030\003 " + + "\001(\t\022\014\n\004port\030\004 \001(\005\022\024\n\014do_not_retry\030\005 \001(\010\"" + + "f\n\020CryptoCipherMeta\022\026\n\016transformation\030\001 " + + "\002(\t\022\r\n\005inKey\030\002 \001(\014\022\014\n\004inIv\030\003 \001(\014\022\016\n\006outK" + + "ey\030\004 \001(\014\022\r\n\005outIv\030\005 \001(\014\"\311\001\n\rRequestHeade" + + "r\022\017\n\007call_id\030\001 \001(\r\022&\n\ntrace_info\030\002 \001(\0132\022" + + ".hbase.pb.RPCTInfo\022\023\n\013method_name\030\003 \001(\t\022", + "\025\n\rrequest_param\030\004 \001(\010\0220\n\017cell_block_met" + + "a\030\005 \001(\0132\027.hbase.pb.CellBlockMeta\022\020\n\010prio" + + "rity\030\006 \001(\r\022\017\n\007timeout\030\007 \001(\r\"\203\001\n\016Response" + + "Header\022\017\n\007call_id\030\001 \001(\r\022.\n\texception\030\002 \001" + + "(\0132\033.hbase.pb.ExceptionResponse\0220\n\017cell_" + + "block_meta\030\003 \001(\0132\027.hbase.pb.CellBlockMet" + + "aB<\n*org.apache.hadoop.hbase.protobuf.ge" + + "neratedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -6259,27 +8012,39 @@ public final class RPCProtos { internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ConnectionHeader_descriptor, - new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", }); - internal_static_hbase_pb_CellBlockMeta_descriptor = + new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", "RpcCryptoCipherTransformation", }); + internal_static_hbase_pb_ConnectionHeaderResponse_descriptor = getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_ConnectionHeaderResponse_descriptor, + new java.lang.String[] { "CryptoCipherMeta", }); + internal_static_hbase_pb_CellBlockMeta_descriptor = + getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CellBlockMeta_descriptor, new java.lang.String[] { "Length", }); internal_static_hbase_pb_ExceptionResponse_descriptor = - getDescriptor().getMessageTypes().get(3); + getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ExceptionResponse_descriptor, new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }); + internal_static_hbase_pb_CryptoCipherMeta_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_CryptoCipherMeta_descriptor, + new java.lang.String[] { "Transformation", "InKey", "InIv", "OutKey", "OutIv", }); internal_static_hbase_pb_RequestHeader_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_RequestHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RequestHeader_descriptor, new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "Priority", "Timeout", }); internal_static_hbase_pb_ResponseHeader_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_ResponseHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ResponseHeader_descriptor, diff --git a/hbase-protocol/src/main/protobuf/RPC.proto b/hbase-protocol/src/main/protobuf/RPC.proto index 8413d25..04d5703 100644 --- a/hbase-protocol/src/main/protobuf/RPC.proto +++ b/hbase-protocol/src/main/protobuf/RPC.proto @@ -89,6 +89,14 @@ message ConnectionHeader { // Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. optional string cell_block_compressor_class = 4; optional VersionInfo version_info = 5; + // the transformation for rpc AES encryption with Apache Commons Crypto + optional string rpc_crypto_cipher_transformation = 6; +} + +// This is sent by rpc server to negotiate the data if necessary +message ConnectionHeaderResponse { + // To use Apache Commons Crypto, negotiate the metadata + optional CryptoCipherMeta crypto_cipher_meta = 1; } // Optional Cell block Message. Included in client RequestHeader @@ -112,6 +120,17 @@ message ExceptionResponse { optional bool do_not_retry = 5; } +/** + * Cipher meta for Crypto + */ +message CryptoCipherMeta { + required string transformation = 1; + optional bytes inKey = 2; + optional bytes inIv = 3; + optional bytes outKey = 4; + optional bytes outIv = 5; +} + // Header sent making a request. message RequestHeader { // Monotonically increasing call_id to keep track of RPC requests and their response diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 0dbaf04..6433fe1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -51,6 +51,7 @@ import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.WritableByteChannel; +import java.security.GeneralSecurityException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; @@ -59,6 +60,7 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.Set; import java.util.Timer; import java.util.TimerTask; @@ -76,6 +78,10 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; +import com.google.protobuf.ByteString; +import org.apache.commons.crypto.cipher.CryptoCipherFactory; +import org.apache.commons.crypto.random.CryptoRandom; +import org.apache.commons.crypto.random.CryptoRandomFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -96,10 +102,12 @@ import org.apache.hadoop.hbase.io.ByteBufferInputStream; import org.apache.hadoop.hbase.io.ByteBufferListOutputStream; import org.apache.hadoop.hbase.io.ByteBufferOutputStream; import org.apache.hadoop.hbase.io.ByteBufferPool; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse; @@ -416,6 +424,12 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { this.response = new BufferChain(responseBufs); } + protected synchronized void setConnectionHeaderResponse(ByteBuffer response) { + ByteBuffer[] responseBufs = new ByteBuffer[1]; + responseBufs[0] = response; + this.response = new BufferChain(responseBufs); + } + protected synchronized void setResponse(Object m, final CellScanner cells, Throwable t, String errorMsg) { if (this.isError) return; @@ -558,9 +572,16 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { byte [] responseBytes = bc.getBytes(); byte [] token; // synchronization may be needed since there can be multiple Handler - // threads using saslServer to wrap responses. - synchronized (connection.saslServer) { - token = connection.saslServer.wrap(responseBytes, 0, responseBytes.length); + // threads using saslServer or Crypto AES to wrap responses. + if (connection.useCryptoAesWrap) { + // wrap with Crypto AES + synchronized (connection.cryptoAES) { + token = connection.cryptoAES.wrap(responseBytes, 0, responseBytes.length); + } + } else { + synchronized (connection.saslServer) { + token = connection.saslServer.wrap(responseBytes, 0, responseBytes.length); + } } if (LOG.isTraceEnabled()) { LOG.trace("Adding saslServer wrapped token of size " + token.length @@ -1245,7 +1266,9 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { private ByteBuffer unwrappedDataLengthBuffer = ByteBuffer.allocate(4); boolean useSasl; SaslServer saslServer; + private CryptoAES cryptoAES; private boolean useWrap = false; + private boolean useCryptoAesWrap = false; // Fake 'call' for failed authorization response private static final int AUTHORIZATION_FAILED_CALLID = -1; private final Call authFailedCall = new Call(AUTHORIZATION_FAILED_CALLID, null, null, null, @@ -1256,6 +1279,10 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { private static final int SASL_CALLID = -33; private final Call saslCall = new Call(SASL_CALLID, null, null, null, null, null, this, null, 0, null, null, 0); + // Fake 'call' for connection header response + private static final int CONNECTION_HEADER_RESPONSE_CALLID = -34; + private final Call setConnectionHeaderResponseCall = new Call(CONNECTION_HEADER_RESPONSE_CALLID, + null, null, null, null, null, this, null, 0, null, null, 0); // was authentication allowed with a fallback to simple auth private boolean authenticatedWithFallback; @@ -1366,7 +1393,13 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { processOneRpc(saslToken); } else { byte[] b = saslToken.array(); - byte [] plaintextData = saslServer.unwrap(b, saslToken.position(), saslToken.limit()); + byte [] plaintextData; + if (useCryptoAesWrap) { + // unwrap with CryptoAES + plaintextData = cryptoAES.unwrap(b, saslToken.position(), saslToken.limit()); + } else { + plaintextData = saslServer.unwrap(b, saslToken.position(), saslToken.limit()); + } processUnwrappedData(plaintextData); } } else { @@ -1493,6 +1526,31 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } + /** + * Send the response for connection header + */ + private void doConnectionHeaderResponse(byte[] wrappedCipherMetaData) throws IOException { + ByteBufferOutputStream response = null; + DataOutputStream out = null; + try { + response = new ByteBufferOutputStream(wrappedCipherMetaData.length + 4); + out = new DataOutputStream(response); + out.writeInt(wrappedCipherMetaData.length); + out.write(wrappedCipherMetaData); + + setConnectionHeaderResponseCall.setConnectionHeaderResponse(response.getByteBuffer()); + setConnectionHeaderResponseCall.responder = responder; + setConnectionHeaderResponseCall.sendResponseIfReady(); + } finally { + if (response != null) { + response.close(); + } + if (out != null) { + out.close(); + } + } + } + private void disposeSasl() { if (saslServer != null) { try { @@ -1689,6 +1747,10 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { this.service = getService(services, serviceName); if (this.service == null) throw new UnknownServiceException(serviceName); setupCellBlockCodecs(this.connectionHeader); + RPCProtos.ConnectionHeaderResponse.Builder chrBuilder = + RPCProtos.ConnectionHeaderResponse.newBuilder(); + setupCryptoCipher(this.connectionHeader, chrBuilder); + responseConnectionHeader(chrBuilder); UserGroupInformation protocolUser = createUser(connectionHeader); if (!useSasl) { ugi = protocolUser; @@ -1737,8 +1799,6 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { AUDITLOG.info("Connection from " + this.hostAddress + " port: " + this.remotePort + " with unknown version info"); } - - } /** @@ -1765,6 +1825,94 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } + /** + * Set up cipher for rpc encryption with Apache Commons Crypto + * @throws FatalConnectionException + */ + private void setupCryptoCipher(final ConnectionHeader header, + RPCProtos.ConnectionHeaderResponse.Builder chrBuilder) throws FatalConnectionException { + // If simple auth, return + if (saslServer == null) return; + // check if rpc encryption with Crypto AES + String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP); + boolean isEncryption = SaslUtil.QualityOfProtection.PRIVACY + .getSaslQop().equalsIgnoreCase(qop); + boolean isCryptoAesEncryption = isEncryption && conf.getBoolean( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT); + if (!isCryptoAesEncryption) return; + if (!header.hasRpcCryptoCipherTransformation()) return; + String transformation = header.getRpcCryptoCipherTransformation(); + if (transformation == null || transformation.length() == 0) return; + // Negotiates AES based on complete saslServer. + // The Crypto metadata need to be encrypted and send to client. + Properties properties = new Properties(); + // the property for SecureRandomFactory + properties.setProperty(CryptoRandomFactory.CLASSES_KEY, + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_RANDOM_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_RANDOM_DEFAULT)); + // the property for cipher class + properties.setProperty(CryptoCipherFactory.CLASSES_KEY, + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_DEFAULT)); + + int cipherKeyBits = conf.getInt( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_DEFAULT); + // generate key and iv + if (cipherKeyBits % 8 != 0) { + throw new IllegalArgumentException("The AES cipher key size in bits" + + " should be a multiple of byte"); + } + int len = cipherKeyBits / 8; + byte[] inKey = new byte[len]; + byte[] outKey = new byte[len]; + byte[] inIv = new byte[len]; + byte[] outIv = new byte[len]; + + try { + // generate the cipher meta data with SecureRandom + CryptoRandom secureRandom = CryptoRandomFactory.getCryptoRandom(properties); + secureRandom.nextBytes(inKey); + secureRandom.nextBytes(outKey); + secureRandom.nextBytes(inIv); + secureRandom.nextBytes(outIv); + + // create CryptoAES for server + cryptoAES = new CryptoAES(transformation, properties, + inKey, outKey, inIv, outIv); + // create SaslCipherMeta and send to client, + // for client, the [inKey, outKey], [inIv, outIv] should be reversed + RPCProtos.CryptoCipherMeta.Builder ccmBuilder = RPCProtos.CryptoCipherMeta.newBuilder(); + ccmBuilder.setTransformation(transformation); + ccmBuilder.setInIv(getByteString(outIv)); + ccmBuilder.setInKey(getByteString(outKey)); + ccmBuilder.setOutIv(getByteString(inIv)); + ccmBuilder.setOutKey(getByteString(inKey)); + chrBuilder.setCryptoCipherMeta(ccmBuilder); + useCryptoAesWrap = true; + } catch (GeneralSecurityException | IOException ex) { + throw new UnsupportedCryptoException(ex.getMessage(), ex); + } + } + + private void responseConnectionHeader(RPCProtos.ConnectionHeaderResponse.Builder chrBuilder) + throws FatalConnectionException { + // Response the connection header if Crypto AES is enabled + if (!chrBuilder.hasCryptoCipherMeta()) return; + try { + byte[] connectionHeaderResBytes = chrBuilder.build().toByteArray(); + // encrypt the Crypto AES cipher meta data with sasl server, and send to client + byte[] unwrapped = new byte[connectionHeaderResBytes.length + 4]; + Bytes.putBytes(unwrapped, 0, Bytes.toBytes(connectionHeaderResBytes.length), 0, 4); + Bytes.putBytes(unwrapped, 4, connectionHeaderResBytes, 0, connectionHeaderResBytes.length); + + doConnectionHeaderResponse(saslServer.wrap(unwrapped, 0, unwrapped.length)); + } catch (IOException ex) { + throw new UnsupportedCryptoException(ex.getMessage(), ex); + } + } + private void processUnwrappedData(byte[] inBuf) throws IOException, InterruptedException { ReadableByteChannel ch = Channels.newChannel(new ByteArrayInputStream(inBuf)); @@ -1803,7 +1951,6 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } - private void processOneRpc(ByteBuffer buf) throws IOException, InterruptedException { if (connectionHeaderRead) { processRequest(buf); @@ -1933,6 +2080,11 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } + private ByteString getByteString(byte[] bytes) { + // return singleton to reduce object allocation + return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes); + } + private boolean authorizeConnection() throws IOException { try { // If auth method is DIGEST, the token was obtained by the diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java index 9a2a1e8..f943fe9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java @@ -26,6 +26,7 @@ import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getSecuredConf import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; +import static org.junit.Assert.fail; import com.google.common.collect.Lists; import com.google.protobuf.ServiceException; @@ -96,8 +97,8 @@ public class TestSecureIPC { @Parameters(name = "{index}: rpcClientImpl={0}") public static Collection parameters() { - return Arrays.asList(new Object[] { BlockingRpcClient.class.getName() }, - new Object[] { NettyRpcClient.class.getName() }); + return Arrays.asList(new Object[]{BlockingRpcClient.class.getName()}, + new Object[]{NettyRpcClient.class.getName()}); } @Parameter @@ -192,6 +193,42 @@ public class TestSecureIPC { callRpcService(User.create(ugi)); } + /** + * Test sasl encryption with Crypto AES. + * @throws Exception + */ + @Test + public void testSaslWithCryptoAES() throws Exception { + setRpcProtection("privacy", "privacy"); + setCryptoAES("true", "true"); + callRpcService(User.create(ugi)); + } + + /** + * Test various combinations of Server and Client configuration for Crypto AES. + * @throws Exception + */ + @Test + public void testDifferentConfWithCryptoAES() throws Exception { + setRpcProtection("privacy", "privacy"); + // Crypto AES won't be enabled, SaslWrapHandler/SaslUnwrapHandler will be used for encryption + setCryptoAES("false", "true"); + callRpcService(User.create(ugi)); + + setCryptoAES("true", "false"); + try { + callRpcService(User.create(ugi)); + fail("The exception should be thrown out for the rpc timeout."); + } catch (Exception e) { + // ignore the expected exception + } + } + + void setCryptoAES(String clientCryptoAES, String serverCryptoAES) { + clientConf.set(HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, clientCryptoAES); + serverConf.set(HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, serverCryptoAES); + } + private UserGroupInformation loginKerberosPrincipal(String krbKeytab, String krbPrincipal) throws Exception { Configuration cnf = new Configuration(); diff --git a/pom.xml b/pom.xml index ae7a80a..dffddf4 100644 --- a/pom.xml +++ b/pom.xml @@ -1219,6 +1219,7 @@ 2.11.6 1.46 1.0.0-RC2 + 1.0.0 2.4 1.8 @@ -1787,6 +1788,17 @@ kerb-simplekdc ${kerby.version} + + org.apache.commons + commons-crypto + ${commons-crypto.version} + + + net.java.dev.jna + jna + + +