diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java index 2ec5adc..86eedb0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java @@ -46,6 +46,8 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.TimeUnit; +import javax.security.sasl.Sasl; +import javax.security.sasl.SaslClient; import javax.security.sasl.SaslException; import org.apache.commons.logging.Log; @@ -264,8 +266,8 @@ public class AsyncRpcChannel { return new SaslClientHandler(realTicket, authMethod, token, serverPrincipal, client.fallbackAllowed, client.conf.get("hbase.rpc.protection", - SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), - getChannelHeaderBytes(authMethod), + SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), + authMethod, client.conf, new SaslClientHandler.SaslExceptionHandler() { @Override public void handle(int retryCount, Random random, Throwable cause) { @@ -274,22 +276,22 @@ public class AsyncRpcChannel { handleSaslConnectionFailure(retryCount, cause, realTicket); retryOrClose(bootstrap, failureCounter++, random.nextInt(reloginMaxBackoff) + 1, - cause); + cause); } catch (IOException | InterruptedException e) { close(e); } } }, new SaslClientHandler.SaslSuccessfulConnectHandler() { - @Override - public void onSuccess(Channel channel) { - startHBaseConnection(channel); - } + @Override + public void onSuccess(Channel channel) { + startHBaseConnection(channel); + } - @Override - public void onSaslProtectionSucess(Channel channel) { - startConnectionWithEncryption(channel); - } - }); + @Override + public void onSaslProtectionSucess(Channel channel) { + startConnectionWithEncryption(channel); + } + }, this); } /** @@ -367,7 +369,7 @@ public class AsyncRpcChannel { * @throws java.io.IOException on failure to write */ private ChannelFuture writeChannelHeader(Channel channel) throws IOException { - RPCProtos.ConnectionHeader header = getChannelHeader(authMethod); + RPCProtos.ConnectionHeader header = getChannelHeader(authMethod, null); int totalSize = IPCUtil.getTotalSizeWhenWrittenDelimited(header); ByteBuf b = channel.alloc().directBuffer(totalSize); @@ -377,15 +379,7 @@ public class AsyncRpcChannel { return channel.writeAndFlush(b); } - private byte[] getChannelHeaderBytes(AuthMethod authMethod) { - RPCProtos.ConnectionHeader header = getChannelHeader(authMethod); - ByteBuffer b = ByteBuffer.allocate(header.getSerializedSize() + 4); - b.putInt(header.getSerializedSize()); - b.put(header.toByteArray()); - return b.array(); - } - - private RPCProtos.ConnectionHeader getChannelHeader(AuthMethod authMethod) { + public RPCProtos.ConnectionHeader getChannelHeader(AuthMethod authMethod, SaslClient saslClient) { RPCProtos.ConnectionHeader.Builder headerBuilder = RPCProtos.ConnectionHeader.newBuilder() .setServiceName(serviceName); @@ -402,6 +396,23 @@ public class AsyncRpcChannel { } headerBuilder.setVersionInfo(ProtobufUtil.getVersionInfo()); + + // if rpc security is enabled and encryption with Crypto AES, + // set the cipher transformation in connection header to negotiate with server + if (saslClient != null && authMethod != AuthMethod.SIMPLE) { + String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP); + boolean saslEncryptionEnabled = + SaslUtil.QualityOfProtection.PRIVACY.getSaslQop().equalsIgnoreCase(qop); + // if rpc with Crypto AES is enabled + boolean rpcCryptoAesEnabled = saslEncryptionEnabled && client.conf.getBoolean( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT); + if (rpcCryptoAesEnabled) { + headerBuilder.setRpcCryptoCipherTransformation( + client.conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR)); + } + } return headerBuilder.build(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java index 4546c8d..259ab93 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java @@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse; @@ -79,6 +80,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; import org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo; import org.apache.hadoop.hbase.security.AuthMethod; import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; +import org.apache.hadoop.hbase.security.SaslUtil; import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; import org.apache.hadoop.hbase.security.SecurityInfo; import org.apache.hadoop.hbase.security.User; @@ -158,7 +160,6 @@ public class RpcClientImpl extends AbstractRpcClient { * socket connected to a remote address. Calls are multiplexed through this * socket: responses may be delivered out of order. */ protected class Connection extends Thread { - private final ConnectionHeader header; // connection header protected ConnectionId remoteId; protected Socket socket = null; // connected socket protected DataInputStream in; @@ -173,6 +174,7 @@ public class RpcClientImpl extends AbstractRpcClient { private final int reloginMaxBackoff; // max pause before relogin on sasl failure private final Codec codec; private final CompressionCodec compressor; + private boolean waitingConnectionHeaderResponse; // currently active calls protected final ConcurrentSkipListMap calls = @@ -352,21 +354,6 @@ public class RpcClientImpl extends AbstractRpcClient { reloginMaxBackoff = conf.getInt("hbase.security.relogin.maxbackoff", 5000); this.remoteId = remoteId; - ConnectionHeader.Builder builder = ConnectionHeader.newBuilder(); - builder.setServiceName(remoteId.getServiceName()); - UserInformation userInfoPB = getUserInfo(ticket); - if (userInfoPB != null) { - builder.setUserInfo(userInfoPB); - } - if (this.codec != null) { - builder.setCellBlockCodecClass(this.codec.getClass().getCanonicalName()); - } - if (this.compressor != null) { - builder.setCellBlockCompressorClass(this.compressor.getClass().getCanonicalName()); - } - builder.setVersionInfo(ProtobufUtil.getVersionInfo()); - this.header = builder.build(); - this.setName("IPC Client (" + socketFactory.hashCode() +") connection to " + remoteId.getAddress().toString() + ((ticket==null)?" from an unknown user": (" from " @@ -759,8 +746,8 @@ public class RpcClientImpl extends AbstractRpcClient { } if (continueSasl) { // Sasl connect is successful. Let's set up Sasl i/o streams. - inStream = saslRpcClient.getInputStream(inStream); - outStream = saslRpcClient.getOutputStream(outStream); + inStream = saslRpcClient.getInputStream(); + outStream = saslRpcClient.getOutputStream(); } else { // fall back to simple auth because server told us so. authMethod = AuthMethod.SIMPLE; @@ -773,6 +760,8 @@ public class RpcClientImpl extends AbstractRpcClient { } // Now write out the connection header writeConnectionHeader(); + // process the response from server for connection header if necessary + processResponseForConnectionHeader(); // start the receiver thread after the socket connection has been set up start(); @@ -822,12 +811,72 @@ public class RpcClientImpl extends AbstractRpcClient { */ private synchronized void writeConnectionHeader() throws IOException { synchronized (this.outLock) { - this.out.writeInt(this.header.getSerializedSize()); - this.header.writeTo(this.out); + UserGroupInformation ticket = remoteId.getTicket().getUGI(); + ConnectionHeader.Builder builder = ConnectionHeader.newBuilder(); + builder.setServiceName(remoteId.getServiceName()); + UserInformation userInfoPB = getUserInfo(ticket); + if (userInfoPB != null) { + builder.setUserInfo(userInfoPB); + } + if (this.codec != null) { + builder.setCellBlockCodecClass(this.codec.getClass().getCanonicalName()); + } + if (this.compressor != null) { + builder.setCellBlockCompressorClass(this.compressor.getClass().getCanonicalName()); + } + builder.setVersionInfo(ProtobufUtil.getVersionInfo()); + + // if security is enable and Crypto AES is enabled, + // set the cipher transformation in connection header to negotiate with server + if (saslRpcClient != null) { + String qop = saslRpcClient.getSaslQOP(); + boolean saslEncryptionEnabled = + SaslUtil.QualityOfProtection.PRIVACY.getSaslQop().equalsIgnoreCase(qop); + boolean isWriteAesCipherTransformation = saslEncryptionEnabled && conf.getBoolean( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT); + if (isWriteAesCipherTransformation) { + builder.setRpcCryptoCipherTransformation( + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR)); + waitingConnectionHeaderResponse = true; + } + } + ConnectionHeader header = builder.build(); + this.out.writeInt(header.getSerializedSize()); + header.writeTo(this.out); this.out.flush(); } } + private void processResponseForConnectionHeader() throws IOException { + // if no response excepted, return + if (!waitingConnectionHeaderResponse) return; + // read the ConnectionHeaderResponse from server + int len = this.in.readInt(); + byte[] buff = new byte[len]; + this.in.read(buff); + + RPCProtos.ConnectionHeaderResponse connectionHeaderResponse = + RPCProtos.ConnectionHeaderResponse.parseFrom(buff); + + // Get the CryptoCipherMeta, update the HBaseSaslRpcClient for Crypto Cipher + if (connectionHeaderResponse.hasCryptoCipherMeta()) { + negotiateCryptoAes(connectionHeaderResponse.getCryptoCipherMeta()); + } + waitingConnectionHeaderResponse = false; + } + + private void negotiateCryptoAes(RPCProtos.CryptoCipherMeta cryptoCipherMeta) + throws IOException { + saslRpcClient.initCryptoCipher(cryptoCipherMeta, conf); + // reset the inputStream. outputStream for Crypto AES encryption + this.in = new DataInputStream(new BufferedInputStream(saslRpcClient.getInputStream())); + synchronized (this.outLock) { + this.out = new DataOutputStream(new BufferedOutputStream(saslRpcClient.getOutputStream())); + } + } + /** Close the connection. */ protected synchronized void close() { if (!shouldCloseConnection.get()) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java new file mode 100644 index 0000000..12e4a7a --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.ipc; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class UnsupportedCryptoException extends FatalConnectionException { + public UnsupportedCryptoException() { + super(); + } + + public UnsupportedCryptoException(String msg) { + super(msg); + } + + public UnsupportedCryptoException(String msg, Throwable t) { + super(msg, t); + } +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java index d89d96c..f963b0a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java @@ -23,9 +23,11 @@ import java.io.IOException; import java.security.Key; import java.security.KeyException; import java.security.SecureRandom; +import java.util.Properties; import javax.crypto.spec.SecretKeySpec; +import org.apache.commons.crypto.cipher.CryptoCipherFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -35,7 +37,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; import org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; @@ -255,4 +259,27 @@ public final class EncryptionUtil { } return key; } + + /** + * Helper to create an instance of CryptoAES. + * + * @param conf The current configuration. + * @param cryptoCipherMeta The metadata for create CryptoAES. + * @return The instance of CryptoAES. + * @throws IOException if create CryptoAES failed + */ + public static CryptoAES createCryptoAES(RPCProtos.CryptoCipherMeta cryptoCipherMeta, + Configuration conf) throws IOException { + Properties properties = new Properties(); + // the property for cipher class + properties.setProperty(CryptoCipherFactory.CLASSES_KEY, + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_DEFAULT)); + // create SaslAES for client + return new CryptoAES(cryptoCipherMeta.getTransformation(), properties, + cryptoCipherMeta.getInKey().toByteArray(), + cryptoCipherMeta.getOutKey().toByteArray(), + cryptoCipherMeta.getInIv().toByteArray(), + cryptoCipherMeta.getOutIv().toByteArray()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java index ce32ed9..f203e4a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java @@ -22,10 +22,14 @@ import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; +import java.io.FilterInputStream; +import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; import java.util.Map; +import java.util.Properties; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; @@ -38,9 +42,14 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslClient; import javax.security.sasl.SaslException; +import org.apache.commons.crypto.cipher.CryptoCipherFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.SaslInputStream; @@ -61,6 +70,13 @@ public class HBaseSaslRpcClient { private final SaslClient saslClient; private final boolean fallbackAllowed; protected final Map saslProps; + private boolean cryptoAesEnable; + private CryptoAES cryptoAES; + private InputStream saslInputStream; + private InputStream cryptoInputStream; + private OutputStream saslOutputStream; + private OutputStream cryptoOutputStream; + /** * Create a HBaseSaslRpcClient for an authentication method * @@ -231,6 +247,15 @@ public class HBaseSaslRpcClient { LOG.debug("SASL client context established. Negotiated QoP: " + saslClient.getNegotiatedProperty(Sasl.QOP)); } + // initial the inputStream, outputStream for both Sasl encryption and Crypto encryption + // if Cryption encryption enabled, the saslInputStream/saslOutputStream is + // responsible for rpc encryption with sasl(eg, 3DES, DES), + // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with AES + saslInputStream = new SaslInputStream(inS, saslClient); + cryptoInputStream = new WrappedInputStream(inS); + saslOutputStream = new SaslOutputStream(outS, saslClient); + cryptoOutputStream = new WrappedOutputStream(outS); + return true; } catch (IOException e) { try { @@ -242,36 +267,117 @@ public class HBaseSaslRpcClient { } } + public String getSaslQOP() { + return (String) saslClient.getNegotiatedProperty(Sasl.QOP); + } + + public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta, + Configuration conf) throws IOException { + // create SaslAES for client + cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf); + cryptoAesEnable = true; + } + /** * Get a SASL wrapped InputStream. Can be called only after saslConnect() has * been called. * - * @param in - * the InputStream to wrap * @return a SASL wrapped InputStream * @throws IOException */ - public InputStream getInputStream(InputStream in) throws IOException { + public InputStream getInputStream() throws IOException { if (!saslClient.isComplete()) { throw new IOException("Sasl authentication exchange hasn't completed yet"); } - return new SaslInputStream(in, saslClient); + // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES. + if (cryptoAesEnable) { + return cryptoInputStream; + } + return saslInputStream; + } + + class WrappedInputStream extends FilterInputStream { + private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0); + public WrappedInputStream(InputStream in) throws IOException { + super(in); + } + + @Override + public int read() throws IOException { + byte[] b = new byte[1]; + int n = read(b, 0, 1); + return (n != -1) ? b[0] : -1; + } + + @Override + public int read(byte b[]) throws IOException { + return read(b, 0, b.length); + } + + @Override + public synchronized int read(byte[] buf, int off, int len) throws IOException { + // fill the buffer with the next RPC message + if (unwrappedRpcBuffer.remaining() == 0) { + readNextRpcPacket(); + } + // satisfy as much of the request as possible + int readLen = Math.min(len, unwrappedRpcBuffer.remaining()); + unwrappedRpcBuffer.get(buf, off, readLen); + return readLen; + } + + // all messages must be wrapped by saslAES, else an exception is thrown + private void readNextRpcPacket() throws IOException { + LOG.debug("reading next wrapped RPC packet"); + DataInputStream dis = new DataInputStream(in); + int rpcLen = dis.readInt(); + byte[] rpcBuf = new byte[rpcLen]; + dis.readFully(rpcBuf); + + // unwrap with saslAES + rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length); + if (LOG.isDebugEnabled()) { + LOG.debug("unwrapping token of length:" + rpcBuf.length); + } + unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf); + } } /** * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has * been called. * - * @param out - * the OutputStream to wrap * @return a SASL wrapped OutputStream * @throws IOException */ - public OutputStream getOutputStream(OutputStream out) throws IOException { + public OutputStream getOutputStream() throws IOException { if (!saslClient.isComplete()) { throw new IOException("Sasl authentication exchange hasn't completed yet"); } - return new SaslOutputStream(out, saslClient); + // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES. + if (cryptoAesEnable) { + return cryptoOutputStream; + } + return saslOutputStream; + } + + class WrappedOutputStream extends FilterOutputStream { + public WrappedOutputStream(OutputStream out) throws IOException { + super(out); + } + @Override + public void write(byte[] buf, int off, int len) throws IOException { + if (LOG.isDebugEnabled()) { + LOG.debug("wrapping token of length:" + len); + } + + // wrap with saslAES + byte[] wrapped = cryptoAES.wrap(buf, off, len); + DataOutputStream dob = new DataOutputStream(out); + dob.writeInt(wrapped.length); + dob.write(wrapped, 0, wrapped.length); + dob.flush(); + } } /** Release resources used by wrapped saslClient */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslClientHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslClientHandler.java index d583e20..f77c54b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslClientHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslClientHandler.java @@ -27,7 +27,12 @@ import io.netty.channel.ChannelPromise; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; +import org.apache.hadoop.hbase.ipc.AsyncRpcChannel; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; @@ -64,8 +69,13 @@ public class SaslClientHandler extends ChannelDuplexHandler { private final SaslExceptionHandler exceptionHandler; private final SaslSuccessfulConnectHandler successfulConnectHandler; private byte[] saslToken; - private byte[] connectionHeader; + private AuthMethod authMethod; private boolean firstRead = true; + private boolean cryptoAesEnable; + private boolean waitingConnectionHeaderResponse; + private CryptoAES cryptoAES; + private final Configuration conf; + private final AsyncRpcChannel asyncRpcChannel; private int retryCount = 0; private Random random; @@ -82,14 +92,17 @@ public class SaslClientHandler extends ChannelDuplexHandler { */ public SaslClientHandler(UserGroupInformation ticket, AuthMethod method, Token token, String serverPrincipal, boolean fallbackAllowed, - String rpcProtection, byte[] connectionHeader, SaslExceptionHandler exceptionHandler, - SaslSuccessfulConnectHandler successfulConnectHandler) throws IOException { + String rpcProtection, AuthMethod authMethod, Configuration conf, SaslExceptionHandler exceptionHandler, + SaslSuccessfulConnectHandler successfulConnectHandler, + AsyncRpcChannel asyncRpcChannel) throws IOException { + this.conf = conf; this.ticket = ticket; this.fallbackAllowed = fallbackAllowed; - this.connectionHeader = connectionHeader; + this.authMethod = authMethod; this.exceptionHandler = exceptionHandler; this.successfulConnectHandler = successfulConnectHandler; + this.asyncRpcChannel = asyncRpcChannel; saslProps = SaslUtil.initSaslProperties(rpcProtection); switch (method) { @@ -232,16 +245,28 @@ public class SaslClientHandler extends ChannelDuplexHandler { ctx.pipeline().remove(this); successfulConnectHandler.onSuccess(ctx.channel()); } else { + byte[] connectionHeader = getChannelHeaderBytes(); byte[] wrappedCH = saslClient.wrap(connectionHeader, 0, connectionHeader.length); // write connection header writeSaslToken(ctx, wrappedCH); - successfulConnectHandler.onSaslProtectionSucess(ctx.channel()); + // don't send the call if client is waiting the response from server + waitingConnectionHeaderResponse = isWaitingConnectionHeaderResponse(conf); + if (!waitingConnectionHeaderResponse) { + successfulConnectHandler.onSaslProtectionSucess(ctx.channel()); + } } } } // Normal wrapped reading else { try { + if (waitingConnectionHeaderResponse) { + // process the connection header response from server + processConnectionHeaderResponse(in); + waitingConnectionHeaderResponse = false; + successfulConnectHandler.onSaslProtectionSucess(ctx.channel()); + return; + } int length = in.readInt(); if (LOG.isDebugEnabled()) { LOG.debug("Actual length is " + length); @@ -256,7 +281,13 @@ public class SaslClientHandler extends ChannelDuplexHandler { try { ByteBuf b = ctx.channel().alloc().buffer(saslToken.length); - b.writeBytes(saslClient.unwrap(saslToken, 0, saslToken.length)); + byte[] unwrapped; + if (cryptoAesEnable) { + unwrapped = cryptoAES.unwrap(saslToken, 0, saslToken.length); + } else { + unwrapped = saslClient.unwrap(saslToken, 0, saslToken.length); + } + b.writeBytes(unwrapped); ctx.fireChannelRead(b); } catch (SaslException se) { @@ -270,6 +301,45 @@ public class SaslClientHandler extends ChannelDuplexHandler { } } + /** + * check if client need wait server response by connection header, will wait the response + * if Crypto AES is enable + */ + private boolean isWaitingConnectionHeaderResponse(Configuration conf) throws IOException { + String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP); + boolean saslWrapEnabled = + SaslUtil.QualityOfProtection.PRIVACY.getSaslQop().equalsIgnoreCase(qop); + return saslWrapEnabled && conf.getBoolean( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT); + } + + private void processConnectionHeaderResponse(ByteBuf in) + throws IOException { + // read the ConnectionHeaderResponse from server + int len = in.readInt(); + byte[] buff = new byte[len]; + in.readBytes(buff); + + // unwrap the SaslCipherMeta message with sasl client + byte[] unwrappedData = saslClient.unwrap(buff, 0, buff.length); + byte[] headResponseData = new byte[unwrappedData.length - 4]; + System.arraycopy(unwrappedData, 4, headResponseData, 0, headResponseData.length); + + RPCProtos.ConnectionHeaderResponse connectionHeaderResponse = + RPCProtos.ConnectionHeaderResponse.parseFrom(headResponseData); + // if CryptoCipherMeta exist, create the instance of CryptoAES + if (connectionHeaderResponse.hasCryptoCipherMeta()) { + initCryptoCipher(connectionHeaderResponse.getCryptoCipherMeta()); + } + } + + private void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta) throws IOException { + // create SaslAES for client + cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf); + cryptoAesEnable = true; + } + private void writeSaslToken(final ChannelHandlerContext ctx, byte[] saslToken) { ByteBuf b = ctx.alloc().buffer(4 + saslToken.length); b.writeInt(saslToken.length); @@ -284,6 +354,15 @@ public class SaslClientHandler extends ChannelDuplexHandler { }); } + private byte[] getChannelHeaderBytes() { + RPCProtos.ConnectionHeader header = + asyncRpcChannel.getChannelHeader(authMethod, saslClient); + ByteBuffer b = ByteBuffer.allocate(header.getSerializedSize() + 4); + b.putInt(header.getSerializedSize()); + b.put(header.toByteArray()); + return b.array(); + } + /** * Get the read status */ @@ -295,7 +374,8 @@ public class SaslClientHandler extends ChannelDuplexHandler { } } - @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { saslClient.dispose(); @@ -321,7 +401,11 @@ public class SaslClientHandler extends ChannelDuplexHandler { in.release(); try { - saslToken = saslClient.wrap(unwrapped, 0, unwrapped.length); + if (cryptoAesEnable) { + saslToken = cryptoAES.wrap(unwrapped, 0, unwrapped.length); + } else { + saslToken = saslClient.wrap(unwrapped, 0, unwrapped.length); + } } catch (SaslException se) { try { saslClient.dispose(); @@ -336,7 +420,8 @@ public class SaslClientHandler extends ChannelDuplexHandler { out.writeBytes(saslToken, 0, saslToken.length); ctx.write(out).addListener(new ChannelFutureListener() { - @Override public void operationComplete(ChannelFuture future) throws Exception { + @Override + public void operationComplete(ChannelFuture future) throws Exception { if (!future.isSuccess()) { exceptionCaught(ctx, future.cause()); } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java index 0e3aeab..6be0e2b 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java @@ -211,14 +211,14 @@ public class TestHBaseSaslRpcClient { }; try { - rpcClient.getInputStream(Mockito.mock(InputStream.class)); + rpcClient.getInputStream(); } catch(IOException ex) { //Sasl authentication exchange hasn't completed yet inState = true; } try { - rpcClient.getOutputStream(Mockito.mock(OutputStream.class)); + rpcClient.getOutputStream(); } catch(IOException ex) { //Sasl authentication exchange hasn't completed yet outState = true; diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index 5b43553..9e5c87b 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -279,6 +279,10 @@ org.apache.htrace htrace-core + + org.apache.commons + commons-crypto + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 4c499a2..0772886 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -1184,6 +1184,39 @@ public final class HConstants { public static final String CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY = "hbase.crypto.alternate.key.algorithm"; + /** Configuration key for if enable Crypto AES for rpc encryption */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY = + "hbase.rpc.crypto.encryption.aes.enabled"; + + public static final boolean RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT = false; + + /** Configuration key for the transformation of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CONF_KEY = + "hbase.rpc.crypto.encryption.aes.cipher.transform"; + + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR = + "AES/CTR/NoPadding"; + + /** Configuration key for the random of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_RANDOM_CONF_KEY = + "hbase.crypto.sasl.encryption.aes.crypto.random"; + + public static final String RPC_CRYPTO_ENCRYPTION_RANDOM_DEFAULT = + "org.apache.commons.crypto.random.JavaCryptoRandom"; + + /** Configuration key for the key size of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_CONF_KEY = + "hbase.rpc.crypto.encryption.aes.cipher.keySizeBits"; + + public static final int RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_DEFAULT = 128; + + /** Configuration key for the cipher class of Crypto AES cipher */ + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_KEY = + "hbase.rpc.crypto.encryption.aes.cipher.class"; + + public static final String RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_DEFAULT = + "org.apache.commons.crypto.cipher.JceCipher"; + /** Configuration key for enabling WAL encryption, a boolean */ public static final String ENABLE_WAL_ENCRYPTION = "hbase.regionserver.wal.encryption"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java new file mode 100644 index 0000000..43f241e --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CryptoAES.java @@ -0,0 +1,242 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.io.crypto.aes; + +import org.apache.commons.crypto.cipher.CryptoCipher; +import org.apache.commons.crypto.utils.Utils; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; + +import javax.crypto.Cipher; +import javax.crypto.Mac; +import javax.crypto.SecretKey; +import javax.crypto.ShortBufferException; +import javax.crypto.spec.IvParameterSpec; +import javax.crypto.spec.SecretKeySpec; +import javax.security.sasl.SaslException; +import java.io.IOException; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Properties; + +/** + * AES encryption and decryption. + */ +@InterfaceAudience.Private +@InterfaceStability.Evolving +public class CryptoAES { + + private final CryptoCipher encryptor; + private final CryptoCipher decryptor; + + private final Integrity integrity; + + public CryptoAES(String transformation, Properties properties, + byte[] inKey, byte[] outKey, byte[] inIv, byte[] outIv) throws IOException { + checkTransformation(transformation); + // encryptor + encryptor = Utils.getCipherInstance(transformation, properties); + try { + SecretKeySpec outKEYSpec = new SecretKeySpec(outKey, "AES"); + IvParameterSpec outIVSpec = new IvParameterSpec(outIv); + encryptor.init(Cipher.ENCRYPT_MODE, outKEYSpec, outIVSpec); + } catch (InvalidKeyException | InvalidAlgorithmParameterException e) { + throw new IOException("Failed to initialize encryptor", e); + } + + // decryptor + decryptor = Utils.getCipherInstance(transformation, properties); + try { + SecretKeySpec inKEYSpec = new SecretKeySpec(inKey, "AES"); + IvParameterSpec inIVSpec = new IvParameterSpec(inIv); + decryptor.init(Cipher.DECRYPT_MODE, inKEYSpec, inIVSpec); + } catch (InvalidKeyException | InvalidAlgorithmParameterException e) { + throw new IOException("Failed to initialize decryptor", e); + } + + integrity = new Integrity(outKey, inKey); + } + + /** + * Encrypts input data. The result composes of (msg, padding if needed, mac) and sequence num. + * @param data the input byte array + * @param offset the offset in input where the input starts + * @param len the input length + * @return the new encrypted byte array. + * @throws SaslException if error happens + */ + public byte[] wrap(byte[] data, int offset, int len) throws SaslException { + // mac + byte[] mac = integrity.getHMAC(data, offset, len); + integrity.incMySeqNum(); + + // encrypt + byte[] encrypted = new byte[len + 10]; + try { + int n = encryptor.update(data, offset, len, encrypted, 0); + encryptor.update(mac, 0, 10, encrypted, n); + } catch (ShortBufferException sbe) { + // this should not happen + throw new SaslException("Error happens during encrypt data", sbe); + } + + // append seqNum used for mac + byte[] wrapped = new byte[encrypted.length + 4]; + System.arraycopy(encrypted, 0, wrapped, 0, encrypted.length); + System.arraycopy(integrity.getSeqNum(), 0, wrapped, encrypted.length, 4); + + return wrapped; + } + + /** + * Decrypts input data. The input composes of (msg, padding if needed, mac) and sequence num. + * The result is msg. + * @param data the input byte array + * @param offset the offset in input where the input starts + * @param len the input length + * @return the new decrypted byte array. + * @throws SaslException if error happens + */ + public byte[] unwrap(byte[] data, int offset, int len) throws SaslException { + // get plaintext and seqNum + byte[] decrypted = new byte[len - 4]; + byte[] peerSeqNum = new byte[4]; + try { + decryptor.update(data, offset, len - 4, decrypted, 0); + } catch (ShortBufferException sbe) { + // this should not happen + throw new SaslException("Error happens during decrypt data", sbe); + } + System.arraycopy(data, offset + decrypted.length, peerSeqNum, 0, 4); + + // get msg and mac + byte[] msg = new byte[decrypted.length - 10]; + byte[] mac = new byte[10]; + System.arraycopy(decrypted, 0, msg, 0, msg.length); + System.arraycopy(decrypted, msg.length, mac, 0, 10); + + // check mac integrity and msg sequence + if (!integrity.compareHMAC(mac, peerSeqNum, msg, 0, msg.length)) { + throw new SaslException("Unmatched MAC"); + } + if (!integrity.comparePeerSeqNum(peerSeqNum)) { + throw new SaslException("Out of order sequencing of messages. Got: " + integrity.byteToInt + (peerSeqNum) + " Expected: " + integrity.peerSeqNum); + } + integrity.incPeerSeqNum(); + + return msg; + } + + private void checkTransformation(String transformation) throws IOException { + if (HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_TRANSFORM_CTR.equals(transformation)) { + return; + } + throw new IOException("AES cipher transformation is not supported: " + transformation); + } + + /** + * Helper class for providing integrity protection. + */ + private static class Integrity { + + private int mySeqNum = 0; + private int peerSeqNum = 0; + private byte[] seqNum = new byte[4]; + + private byte[] myKey; + private byte[] peerKey; + + Integrity(byte[] outKey, byte[] inKey) throws IOException { + myKey = outKey; + peerKey = inKey; + } + + byte[] getHMAC(byte[] msg, int start, int len) throws SaslException { + intToByte(mySeqNum); + return calculateHMAC(myKey, seqNum, msg, start, len); + } + + boolean compareHMAC(byte[] expectedHMAC, byte[] peerSeqNum, byte[] msg, int start, + int len) throws SaslException { + byte[] mac = calculateHMAC(peerKey, peerSeqNum, msg, start, len); + return Arrays.equals(mac, expectedHMAC); + } + + boolean comparePeerSeqNum(byte[] peerSeqNum) { + return this.peerSeqNum == byteToInt(peerSeqNum); + } + + byte[] getSeqNum() { + return seqNum; + } + + void incMySeqNum() { + mySeqNum ++; + } + + void incPeerSeqNum() { + peerSeqNum ++; + } + + private byte[] calculateHMAC(byte[] key, byte[] seqNum, byte[] msg, int start, + int len) throws SaslException { + byte[] seqAndMsg = new byte[4+len]; + System.arraycopy(seqNum, 0, seqAndMsg, 0, 4); + System.arraycopy(msg, start, seqAndMsg, 4, len); + + try { + SecretKey keyKi = new SecretKeySpec(key, "HmacMD5"); + Mac m = Mac.getInstance("HmacMD5"); + m.init(keyKi); + m.update(seqAndMsg); + byte[] hMAC_MD5 = m.doFinal(); + + /* First 10 bytes of HMAC_MD5 digest */ + byte macBuffer[] = new byte[10]; + System.arraycopy(hMAC_MD5, 0, macBuffer, 0, 10); + + return macBuffer; + } catch (InvalidKeyException e) { + throw new SaslException("Invalid bytes used for key of HMAC-MD5 hash.", e); + } catch (NoSuchAlgorithmException e) { + throw new SaslException("Error creating instance of MD5 MAC algorithm", e); + } + } + + private void intToByte(int num) { + for(int i = 3; i >= 0; i --) { + seqNum[i] = (byte)(num & 0xff); + num >>>= 8; + } + } + + private int byteToInt(byte[] seqNum) { + int answer = 0; + for (int i = 0; i < 4; i ++) { + answer <<= 8; + answer |= ((int)seqNum[i] & 0xff); + } + return answer; + } + } +} diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java index d05eb57..f6819e2 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java @@ -803,6 +803,33 @@ public final class RPCProtos { * optional .hbase.pb.VersionInfo version_info = 5; */ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfoOrBuilder getVersionInfoOrBuilder(); + + // optional string rpc_crypto_cipher_transformation = 6; + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + boolean hasRpcCryptoCipherTransformation(); + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + java.lang.String getRpcCryptoCipherTransformation(); + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + com.google.protobuf.ByteString + getRpcCryptoCipherTransformationBytes(); } /** * Protobuf type {@code hbase.pb.ConnectionHeader} @@ -900,6 +927,11 @@ public final class RPCProtos { bitField0_ |= 0x00000010; break; } + case 50: { + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = input.readBytes(); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -1143,12 +1175,68 @@ public final class RPCProtos { return versionInfo_; } + // optional string rpc_crypto_cipher_transformation = 6; + public static final int RPC_CRYPTO_CIPHER_TRANSFORMATION_FIELD_NUMBER = 6; + private java.lang.Object rpcCryptoCipherTransformation_; + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + public boolean hasRpcCryptoCipherTransformation() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + public java.lang.String getRpcCryptoCipherTransformation() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + rpcCryptoCipherTransformation_ = s; + } + return s; + } + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+     * the transformation for rpc AES encryption with Apache Commons Crypto
+     * 
+ */ + public com.google.protobuf.ByteString + getRpcCryptoCipherTransformationBytes() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rpcCryptoCipherTransformation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private void initFields() { userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance(); serviceName_ = ""; cellBlockCodecClass_ = ""; cellBlockCompressorClass_ = ""; versionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo.getDefaultInstance(); + rpcCryptoCipherTransformation_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -1189,6 +1277,9 @@ public final class RPCProtos { if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeMessage(5, versionInfo_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBytes(6, getRpcCryptoCipherTransformationBytes()); + } getUnknownFields().writeTo(output); } @@ -1218,6 +1309,10 @@ public final class RPCProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, versionInfo_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(6, getRpcCryptoCipherTransformationBytes()); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -1266,6 +1361,11 @@ public final class RPCProtos { result = result && getVersionInfo() .equals(other.getVersionInfo()); } + result = result && (hasRpcCryptoCipherTransformation() == other.hasRpcCryptoCipherTransformation()); + if (hasRpcCryptoCipherTransformation()) { + result = result && getRpcCryptoCipherTransformation() + .equals(other.getRpcCryptoCipherTransformation()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -1299,6 +1399,10 @@ public final class RPCProtos { hash = (37 * hash) + VERSION_INFO_FIELD_NUMBER; hash = (53 * hash) + getVersionInfo().hashCode(); } + if (hasRpcCryptoCipherTransformation()) { + hash = (37 * hash) + RPC_CRYPTO_CIPHER_TRANSFORMATION_FIELD_NUMBER; + hash = (53 * hash) + getRpcCryptoCipherTransformation().hashCode(); + } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; @@ -1432,6 +1536,8 @@ public final class RPCProtos { versionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); + rpcCryptoCipherTransformation_ = ""; + bitField0_ = (bitField0_ & ~0x00000020); return this; } @@ -1488,6 +1594,10 @@ public final class RPCProtos { } else { result.versionInfo_ = versionInfoBuilder_.build(); } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.rpcCryptoCipherTransformation_ = rpcCryptoCipherTransformation_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1525,6 +1635,11 @@ public final class RPCProtos { if (other.hasVersionInfo()) { mergeVersionInfo(other.getVersionInfo()); } + if (other.hasRpcCryptoCipherTransformation()) { + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = other.rpcCryptoCipherTransformation_; + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -2080,6 +2195,104 @@ public final class RPCProtos { return versionInfoBuilder_; } + // optional string rpc_crypto_cipher_transformation = 6; + private java.lang.Object rpcCryptoCipherTransformation_ = ""; + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public boolean hasRpcCryptoCipherTransformation() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public java.lang.String getRpcCryptoCipherTransformation() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + rpcCryptoCipherTransformation_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public com.google.protobuf.ByteString + getRpcCryptoCipherTransformationBytes() { + java.lang.Object ref = rpcCryptoCipherTransformation_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rpcCryptoCipherTransformation_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public Builder setRpcCryptoCipherTransformation( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = value; + onChanged(); + return this; + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public Builder clearRpcCryptoCipherTransformation() { + bitField0_ = (bitField0_ & ~0x00000020); + rpcCryptoCipherTransformation_ = getDefaultInstance().getRpcCryptoCipherTransformation(); + onChanged(); + return this; + } + /** + * optional string rpc_crypto_cipher_transformation = 6; + * + *
+       * the transformation for rpc AES encryption with Apache Commons Crypto
+       * 
+ */ + public Builder setRpcCryptoCipherTransformationBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + rpcCryptoCipherTransformation_ = value; + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeader) } @@ -2091,50 +2304,58 @@ public final class RPCProtos { // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeader) } - public interface CellBlockMetaOrBuilder + public interface ConnectionHeaderResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional uint32 length = 1; + // optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - boolean hasLength(); + boolean hasCryptoCipherMeta(); /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - int getLength(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta(); + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+     * To use Apache Commons Crypto, negotiate the metadata
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder(); } /** - * Protobuf type {@code hbase.pb.CellBlockMeta} + * Protobuf type {@code hbase.pb.ConnectionHeaderResponse} * *
-   * Optional Cell block Message.  Included in client RequestHeader
+   * This is sent by rpc server to negotiate the data if necessary
    * 
*/ - public static final class CellBlockMeta extends + public static final class ConnectionHeaderResponse extends com.google.protobuf.GeneratedMessage - implements CellBlockMetaOrBuilder { - // Use CellBlockMeta.newBuilder() to construct. - private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder builder) { + implements ConnectionHeaderResponseOrBuilder { + // Use ConnectionHeaderResponse.newBuilder() to construct. + private ConnectionHeaderResponse(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private ConnectionHeaderResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final CellBlockMeta defaultInstance; - public static CellBlockMeta getDefaultInstance() { + private static final ConnectionHeaderResponse defaultInstance; + public static ConnectionHeaderResponse getDefaultInstance() { return defaultInstance; } - public CellBlockMeta getDefaultInstanceForType() { + public ConnectionHeaderResponse getDefaultInstanceForType() { return defaultInstance; } @@ -2144,7 +2365,7 @@ public final class RPCProtos { getUnknownFields() { return this.unknownFields; } - private CellBlockMeta( + private ConnectionHeaderResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -2167,9 +2388,17 @@ public final class RPCProtos { } break; } - case 8: { + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = cryptoCipherMeta_.toBuilder(); + } + cryptoCipherMeta_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(cryptoCipherMeta_); + cryptoCipherMeta_ = subBuilder.buildPartial(); + } bitField0_ |= 0x00000001; - length_ = input.readUInt32(); break; } } @@ -2186,64 +2415,80 @@ public final class RPCProtos { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CellBlockMeta parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ConnectionHeaderResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new CellBlockMeta(input, extensionRegistry); + return new ConnectionHeaderResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } private int bitField0_; - // optional uint32 length = 1; - public static final int LENGTH_FIELD_NUMBER = 1; - private int length_; + // optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + public static final int CRYPTO_CIPHER_META_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta cryptoCipherMeta_; /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - public boolean hasLength() { + public boolean hasCryptoCipherMeta() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional uint32 length = 1; + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; * *
-     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * To use Apache Commons Crypto, negotiate the metadata
      * 
*/ - public int getLength() { - return length_; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta() { + return cryptoCipherMeta_; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+     * To use Apache Commons Crypto, negotiate the metadata
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder() { + return cryptoCipherMeta_; } private void initFields() { - length_ = 0; + cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (hasCryptoCipherMeta()) { + if (!getCryptoCipherMeta().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } memoizedIsInitialized = 1; return true; } @@ -2252,7 +2497,7 @@ public final class RPCProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, length_); + output.writeMessage(1, cryptoCipherMeta_); } getUnknownFields().writeTo(output); } @@ -2265,7 +2510,7 @@ public final class RPCProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, length_); + .computeMessageSize(1, cryptoCipherMeta_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -2284,16 +2529,16 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse) obj; boolean result = true; - result = result && (hasLength() == other.hasLength()); - if (hasLength()) { - result = result && (getLength() - == other.getLength()); + result = result && (hasCryptoCipherMeta() == other.hasCryptoCipherMeta()); + if (hasCryptoCipherMeta()) { + result = result && getCryptoCipherMeta() + .equals(other.getCryptoCipherMeta()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -2308,62 +2553,62 @@ public final class RPCProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLength()) { - hash = (37 * hash) + LENGTH_FIELD_NUMBER; - hash = (53 * hash) + getLength(); + if (hasCryptoCipherMeta()) { + hash = (37 * hash) + CRYPTO_CIPHER_META_FIELD_NUMBER; + hash = (53 * hash) + getCryptoCipherMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2372,7 +2617,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2384,28 +2629,28 @@ public final class RPCProtos { return builder; } /** - * Protobuf type {@code hbase.pb.CellBlockMeta} + * Protobuf type {@code hbase.pb.ConnectionHeaderResponse} * *
-     * Optional Cell block Message.  Included in client RequestHeader
+     * This is sent by rpc server to negotiate the data if necessary
      * 
*/ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2417,6 +2662,7 @@ public final class RPCProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getCryptoCipherMetaFieldBuilder(); } } private static Builder create() { @@ -2425,7 +2671,11 @@ public final class RPCProtos { public Builder clear() { super.clear(); - length_ = 0; + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); + } else { + cryptoCipherMetaBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000001); return this; } @@ -2436,53 +2686,63 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta(this); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.length_ = length_; + if (cryptoCipherMetaBuilder_ == null) { + result.cryptoCipherMeta_ = cryptoCipherMeta_; + } else { + result.cryptoCipherMeta_ = cryptoCipherMetaBuilder_.build(); + } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) return this; - if (other.hasLength()) { - setLength(other.getLength()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse.getDefaultInstance()) return this; + if (other.hasCryptoCipherMeta()) { + mergeCryptoCipherMeta(other.getCryptoCipherMeta()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (hasCryptoCipherMeta()) { + if (!getCryptoCipherMeta().isInitialized()) { + + return false; + } + } return true; } @@ -2490,11 +2750,11 @@ public final class RPCProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeaderResponse) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -2505,205 +2765,1943 @@ public final class RPCProtos { } private int bitField0_; - // optional uint32 length = 1; + // optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder> cryptoCipherMetaBuilder_; + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public boolean hasCryptoCipherMeta() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getCryptoCipherMeta() { + if (cryptoCipherMetaBuilder_ == null) { + return cryptoCipherMeta_; + } else { + return cryptoCipherMetaBuilder_.getMessage(); + } + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder setCryptoCipherMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta value) { + if (cryptoCipherMetaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + cryptoCipherMeta_ = value; + onChanged(); + } else { + cryptoCipherMetaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder setCryptoCipherMeta( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder builderForValue) { + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMeta_ = builderForValue.build(); + onChanged(); + } else { + cryptoCipherMetaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder mergeCryptoCipherMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta value) { + if (cryptoCipherMetaBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + cryptoCipherMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance()) { + cryptoCipherMeta_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.newBuilder(cryptoCipherMeta_).mergeFrom(value).buildPartial(); + } else { + cryptoCipherMeta_ = value; + } + onChanged(); + } else { + cryptoCipherMetaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public Builder clearCryptoCipherMeta() { + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); + onChanged(); + } else { + cryptoCipherMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder getCryptoCipherMetaBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCryptoCipherMetaFieldBuilder().getBuilder(); + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder getCryptoCipherMetaOrBuilder() { + if (cryptoCipherMetaBuilder_ != null) { + return cryptoCipherMetaBuilder_.getMessageOrBuilder(); + } else { + return cryptoCipherMeta_; + } + } + /** + * optional .hbase.pb.CryptoCipherMeta crypto_cipher_meta = 1; + * + *
+       * To use Apache Commons Crypto, negotiate the metadata
+       * 
+ */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder> + getCryptoCipherMetaFieldBuilder() { + if (cryptoCipherMetaBuilder_ == null) { + cryptoCipherMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder>( + cryptoCipherMeta_, + getParentForChildren(), + isClean()); + cryptoCipherMeta_ = null; + } + return cryptoCipherMetaBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ConnectionHeaderResponse) + } + + static { + defaultInstance = new ConnectionHeaderResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ConnectionHeaderResponse) + } + + public interface CellBlockMetaOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint32 length = 1; + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + boolean hasLength(); + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + int getLength(); + } + /** + * Protobuf type {@code hbase.pb.CellBlockMeta} + * + *
+   * Optional Cell block Message.  Included in client RequestHeader
+   * 
+ */ + public static final class CellBlockMeta extends + com.google.protobuf.GeneratedMessage + implements CellBlockMetaOrBuilder { + // Use CellBlockMeta.newBuilder() to construct. + private CellBlockMeta(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CellBlockMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CellBlockMeta defaultInstance; + public static CellBlockMeta getDefaultInstance() { + return defaultInstance; + } + + public CellBlockMeta getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CellBlockMeta( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + length_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CellBlockMeta parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CellBlockMeta(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional uint32 length = 1; + public static final int LENGTH_FIELD_NUMBER = 1; + private int length_; + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + public boolean hasLength() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional uint32 length = 1; + * + *
+     * Length of the following cell block.  Could calculate it but convenient having it too hand.
+     * 
+ */ + public int getLength() { + return length_; + } + + private void initFields() { + length_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, length_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, length_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) obj; + + boolean result = true; + result = result && (hasLength() == other.hasLength()); + if (hasLength()) { + result = result && (getLength() + == other.getLength()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLength()) { + hash = (37 * hash) + LENGTH_FIELD_NUMBER; + hash = (53 * hash) + getLength(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.CellBlockMeta} + * + *
+     * Optional Cell block Message.  Included in client RequestHeader
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMetaOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + length_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CellBlockMeta_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.length_ = length_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta.getDefaultInstance()) return this; + if (other.hasLength()) { + setLength(other.getLength()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional uint32 length = 1; private int length_ ; /** - * optional uint32 length = 1; + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public boolean hasLength() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public int getLength() { + return length_; + } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public Builder setLength(int value) { + bitField0_ |= 0x00000001; + length_ = value; + onChanged(); + return this; + } + /** + * optional uint32 length = 1; + * + *
+       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * 
+ */ + public Builder clearLength() { + bitField0_ = (bitField0_ & ~0x00000001); + length_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.CellBlockMeta) + } + + static { + defaultInstance = new CellBlockMeta(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) + } + + public interface ExceptionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string exception_class_name = 1; + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + boolean hasExceptionClassName(); + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + java.lang.String getExceptionClassName(); + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + com.google.protobuf.ByteString + getExceptionClassNameBytes(); + + // optional string stack_trace = 2; + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + boolean hasStackTrace(); + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + java.lang.String getStackTrace(); + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + com.google.protobuf.ByteString + getStackTraceBytes(); + + // optional string hostname = 3; + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + boolean hasHostname(); + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + java.lang.String getHostname(); + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + com.google.protobuf.ByteString + getHostnameBytes(); + + // optional int32 port = 4; + /** + * optional int32 port = 4; + */ + boolean hasPort(); + /** + * optional int32 port = 4; + */ + int getPort(); + + // optional bool do_not_retry = 5; + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + boolean hasDoNotRetry(); + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + boolean getDoNotRetry(); + } + /** + * Protobuf type {@code hbase.pb.ExceptionResponse} + * + *
+   * At the RPC layer, this message is used to carry
+   * the server side exception to the RPC client.
+   * 
+ */ + public static final class ExceptionResponse extends + com.google.protobuf.GeneratedMessage + implements ExceptionResponseOrBuilder { + // Use ExceptionResponse.newBuilder() to construct. + private ExceptionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ExceptionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ExceptionResponse defaultInstance; + public static ExceptionResponse getDefaultInstance() { + return defaultInstance; + } + + public ExceptionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExceptionResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + exceptionClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + stackTrace_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + hostname_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + port_ = input.readInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + doNotRetry_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ExceptionResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExceptionResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional string exception_class_name = 1; + public static final int EXCEPTION_CLASS_NAME_FIELD_NUMBER = 1; + private java.lang.Object exceptionClassName_; + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public boolean hasExceptionClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public java.lang.String getExceptionClassName() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + exceptionClassName_ = s; + } + return s; + } + } + /** + * optional string exception_class_name = 1; + * + *
+     * Class name of the exception thrown from the server
+     * 
+ */ + public com.google.protobuf.ByteString + getExceptionClassNameBytes() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + exceptionClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string stack_trace = 2; + public static final int STACK_TRACE_FIELD_NUMBER = 2; + private java.lang.Object stackTrace_; + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public java.lang.String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + stackTrace_ = s; + } + return s; + } + } + /** + * optional string stack_trace = 2; + * + *
+     * Exception stack trace from the server side
+     * 
+ */ + public com.google.protobuf.ByteString + getStackTraceBytes() { + java.lang.Object ref = stackTrace_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stackTrace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string hostname = 3; + public static final int HOSTNAME_FIELD_NUMBER = 3; + private java.lang.Object hostname_; + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public boolean hasHostname() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public java.lang.String getHostname() { + java.lang.Object ref = hostname_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + hostname_ = s; + } + return s; + } + } + /** + * optional string hostname = 3; + * + *
+     * Optional hostname.  Filled in for some exceptions such as region moved
+     * where exception gives clue on where the region may have moved.
+     * 
+ */ + public com.google.protobuf.ByteString + getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional int32 port = 4; + public static final int PORT_FIELD_NUMBER = 4; + private int port_; + /** + * optional int32 port = 4; + */ + public boolean hasPort() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int32 port = 4; + */ + public int getPort() { + return port_; + } + + // optional bool do_not_retry = 5; + public static final int DO_NOT_RETRY_FIELD_NUMBER = 5; + private boolean doNotRetry_; + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + public boolean hasDoNotRetry() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional bool do_not_retry = 5; + * + *
+     * Set if we are NOT to retry on receipt of this exception
+     * 
+ */ + public boolean getDoNotRetry() { + return doNotRetry_; + } + + private void initFields() { + exceptionClassName_ = ""; + stackTrace_ = ""; + hostname_ = ""; + port_ = 0; + doNotRetry_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getExceptionClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getStackTraceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getHostnameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeInt32(4, port_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, doNotRetry_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getExceptionClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getStackTraceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getHostnameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(4, port_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, doNotRetry_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) obj; + + boolean result = true; + result = result && (hasExceptionClassName() == other.hasExceptionClassName()); + if (hasExceptionClassName()) { + result = result && getExceptionClassName() + .equals(other.getExceptionClassName()); + } + result = result && (hasStackTrace() == other.hasStackTrace()); + if (hasStackTrace()) { + result = result && getStackTrace() + .equals(other.getStackTrace()); + } + result = result && (hasHostname() == other.hasHostname()); + if (hasHostname()) { + result = result && getHostname() + .equals(other.getHostname()); + } + result = result && (hasPort() == other.hasPort()); + if (hasPort()) { + result = result && (getPort() + == other.getPort()); + } + result = result && (hasDoNotRetry() == other.hasDoNotRetry()); + if (hasDoNotRetry()) { + result = result && (getDoNotRetry() + == other.getDoNotRetry()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasExceptionClassName()) { + hash = (37 * hash) + EXCEPTION_CLASS_NAME_FIELD_NUMBER; + hash = (53 * hash) + getExceptionClassName().hashCode(); + } + if (hasStackTrace()) { + hash = (37 * hash) + STACK_TRACE_FIELD_NUMBER; + hash = (53 * hash) + getStackTrace().hashCode(); + } + if (hasHostname()) { + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostname().hashCode(); + } + if (hasPort()) { + hash = (37 * hash) + PORT_FIELD_NUMBER; + hash = (53 * hash) + getPort(); + } + if (hasDoNotRetry()) { + hash = (37 * hash) + DO_NOT_RETRY_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getDoNotRetry()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ExceptionResponse} + * + *
+     * At the RPC layer, this message is used to carry
+     * the server side exception to the RPC client.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + exceptionClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + stackTrace_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + hostname_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + port_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + doNotRetry_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.exceptionClassName_ = exceptionClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.stackTrace_ = stackTrace_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.hostname_ = hostname_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.port_ = port_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.doNotRetry_ = doNotRetry_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; + if (other.hasExceptionClassName()) { + bitField0_ |= 0x00000001; + exceptionClassName_ = other.exceptionClassName_; + onChanged(); + } + if (other.hasStackTrace()) { + bitField0_ |= 0x00000002; + stackTrace_ = other.stackTrace_; + onChanged(); + } + if (other.hasHostname()) { + bitField0_ |= 0x00000004; + hostname_ = other.hostname_; + onChanged(); + } + if (other.hasPort()) { + setPort(other.getPort()); + } + if (other.hasDoNotRetry()) { + setDoNotRetry(other.getDoNotRetry()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional string exception_class_name = 1; + private java.lang.Object exceptionClassName_ = ""; + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public boolean hasExceptionClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public java.lang.String getExceptionClassName() { + java.lang.Object ref = exceptionClassName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + exceptionClassName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public com.google.protobuf.ByteString + getExceptionClassNameBytes() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + exceptionClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder setExceptionClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + exceptionClassName_ = value; + onChanged(); + return this; + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder clearExceptionClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + exceptionClassName_ = getDefaultInstance().getExceptionClassName(); + onChanged(); + return this; + } + /** + * optional string exception_class_name = 1; + * + *
+       * Class name of the exception thrown from the server
+       * 
+ */ + public Builder setExceptionClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + exceptionClassName_ = value; + onChanged(); + return this; + } + + // optional string stack_trace = 2; + private java.lang.Object stackTrace_ = ""; + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public java.lang.String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + stackTrace_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public com.google.protobuf.ByteString + getStackTraceBytes() { + java.lang.Object ref = stackTrace_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + stackTrace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder setStackTrace( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + return this; + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder clearStackTrace() { + bitField0_ = (bitField0_ & ~0x00000002); + stackTrace_ = getDefaultInstance().getStackTrace(); + onChanged(); + return this; + } + /** + * optional string stack_trace = 2; + * + *
+       * Exception stack trace from the server side
+       * 
+ */ + public Builder setStackTraceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + return this; + } + + // optional string hostname = 3; + private java.lang.Object hostname_ = ""; + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public boolean hasHostname() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public java.lang.String getHostname() { + java.lang.Object ref = hostname_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + hostname_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public com.google.protobuf.ByteString + getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder setHostname( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + hostname_ = value; + onChanged(); + return this; + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder clearHostname() { + bitField0_ = (bitField0_ & ~0x00000004); + hostname_ = getDefaultInstance().getHostname(); + onChanged(); + return this; + } + /** + * optional string hostname = 3; + * + *
+       * Optional hostname.  Filled in for some exceptions such as region moved
+       * where exception gives clue on where the region may have moved.
+       * 
+ */ + public Builder setHostnameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + hostname_ = value; + onChanged(); + return this; + } + + // optional int32 port = 4; + private int port_ ; + /** + * optional int32 port = 4; + */ + public boolean hasPort() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional int32 port = 4; + */ + public int getPort() { + return port_; + } + /** + * optional int32 port = 4; + */ + public Builder setPort(int value) { + bitField0_ |= 0x00000008; + port_ = value; + onChanged(); + return this; + } + /** + * optional int32 port = 4; + */ + public Builder clearPort() { + bitField0_ = (bitField0_ & ~0x00000008); + port_ = 0; + onChanged(); + return this; + } + + // optional bool do_not_retry = 5; + private boolean doNotRetry_ ; + /** + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public boolean hasLength() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public boolean hasDoNotRetry() { + return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional uint32 length = 1; + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public int getLength() { - return length_; + public boolean getDoNotRetry() { + return doNotRetry_; } /** - * optional uint32 length = 1; + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public Builder setLength(int value) { - bitField0_ |= 0x00000001; - length_ = value; + public Builder setDoNotRetry(boolean value) { + bitField0_ |= 0x00000010; + doNotRetry_ = value; onChanged(); return this; } /** - * optional uint32 length = 1; + * optional bool do_not_retry = 5; * *
-       * Length of the following cell block.  Could calculate it but convenient having it too hand.
+       * Set if we are NOT to retry on receipt of this exception
        * 
*/ - public Builder clearLength() { - bitField0_ = (bitField0_ & ~0x00000001); - length_ = 0; + public Builder clearDoNotRetry() { + bitField0_ = (bitField0_ & ~0x00000010); + doNotRetry_ = false; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.CellBlockMeta) + // @@protoc_insertion_point(builder_scope:hbase.pb.ExceptionResponse) } static { - defaultInstance = new CellBlockMeta(true); + defaultInstance = new ExceptionResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.CellBlockMeta) + // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) } - public interface ExceptionResponseOrBuilder + public interface CryptoCipherMetaOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional string exception_class_name = 1; + // required string transformation = 1; /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
+ * required string transformation = 1; */ - boolean hasExceptionClassName(); + boolean hasTransformation(); /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
+ * required string transformation = 1; */ - java.lang.String getExceptionClassName(); + java.lang.String getTransformation(); /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
+ * required string transformation = 1; */ com.google.protobuf.ByteString - getExceptionClassNameBytes(); + getTransformationBytes(); - // optional string stack_trace = 2; - /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
- */ - boolean hasStackTrace(); + // optional bytes inKey = 2; /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * optional bytes inKey = 2; */ - java.lang.String getStackTrace(); + boolean hasInKey(); /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * optional bytes inKey = 2; */ - com.google.protobuf.ByteString - getStackTraceBytes(); + com.google.protobuf.ByteString getInKey(); - // optional string hostname = 3; + // optional bytes inIv = 3; /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
- */ - boolean hasHostname(); - /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inIv = 3; */ - java.lang.String getHostname(); + boolean hasInIv(); /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inIv = 3; */ - com.google.protobuf.ByteString - getHostnameBytes(); + com.google.protobuf.ByteString getInIv(); - // optional int32 port = 4; + // optional bytes outKey = 4; /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - boolean hasPort(); + boolean hasOutKey(); /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - int getPort(); + com.google.protobuf.ByteString getOutKey(); - // optional bool do_not_retry = 5; + // optional bytes outIv = 5; /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - boolean hasDoNotRetry(); + boolean hasOutIv(); /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - boolean getDoNotRetry(); + com.google.protobuf.ByteString getOutIv(); } /** - * Protobuf type {@code hbase.pb.ExceptionResponse} + * Protobuf type {@code hbase.pb.CryptoCipherMeta} * *
-   * At the RPC layer, this message is used to carry
-   * the server side exception to the RPC client.
+   **
+   * Cipher meta for Crypto
    * 
*/ - public static final class ExceptionResponse extends + public static final class CryptoCipherMeta extends com.google.protobuf.GeneratedMessage - implements ExceptionResponseOrBuilder { - // Use ExceptionResponse.newBuilder() to construct. - private ExceptionResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + implements CryptoCipherMetaOrBuilder { + // Use CryptoCipherMeta.newBuilder() to construct. + private CryptoCipherMeta(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private ExceptionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private CryptoCipherMeta(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final ExceptionResponse defaultInstance; - public static ExceptionResponse getDefaultInstance() { + private static final CryptoCipherMeta defaultInstance; + public static CryptoCipherMeta getDefaultInstance() { return defaultInstance; } - public ExceptionResponse getDefaultInstanceForType() { + public CryptoCipherMeta getDefaultInstanceForType() { return defaultInstance; } @@ -2713,7 +4711,7 @@ public final class RPCProtos { getUnknownFields() { return this.unknownFields; } - private ExceptionResponse( + private CryptoCipherMeta( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -2738,27 +4736,27 @@ public final class RPCProtos { } case 10: { bitField0_ |= 0x00000001; - exceptionClassName_ = input.readBytes(); + transformation_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; - stackTrace_ = input.readBytes(); + inKey_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; - hostname_ = input.readBytes(); + inIv_ = input.readBytes(); break; } - case 32: { + case 34: { bitField0_ |= 0x00000008; - port_ = input.readInt32(); + outKey_ = input.readBytes(); break; } - case 40: { + case 42: { bitField0_ |= 0x00000010; - doNotRetry_ = input.readBool(); + outIv_ = input.readBytes(); break; } } @@ -2772,112 +4770,49 @@ public final class RPCProtos { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ExceptionResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ExceptionResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional string exception_class_name = 1; - public static final int EXCEPTION_CLASS_NAME_FIELD_NUMBER = 1; - private java.lang.Object exceptionClassName_; - /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
- */ - public boolean hasExceptionClassName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
- */ - public java.lang.String getExceptionClassName() { - java.lang.Object ref = exceptionClassName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - exceptionClassName_ = s; - } - return s; - } - } - /** - * optional string exception_class_name = 1; - * - *
-     * Class name of the exception thrown from the server
-     * 
- */ - public com.google.protobuf.ByteString - getExceptionClassNameBytes() { - java.lang.Object ref = exceptionClassName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - exceptionClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CryptoCipherMeta parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CryptoCipherMeta(input, extensionRegistry); } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; } - // optional string stack_trace = 2; - public static final int STACK_TRACE_FIELD_NUMBER = 2; - private java.lang.Object stackTrace_; + private int bitField0_; + // required string transformation = 1; + public static final int TRANSFORMATION_FIELD_NUMBER = 1; + private java.lang.Object transformation_; /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * required string transformation = 1; */ - public boolean hasStackTrace() { - return ((bitField0_ & 0x00000002) == 0x00000002); + public boolean hasTransformation() { + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * required string transformation = 1; */ - public java.lang.String getStackTrace() { - java.lang.Object ref = stackTrace_; + public java.lang.String getTransformation() { + java.lang.Object ref = transformation_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { @@ -2885,142 +4820,108 @@ public final class RPCProtos { (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { - stackTrace_ = s; + transformation_ = s; } return s; } } /** - * optional string stack_trace = 2; - * - *
-     * Exception stack trace from the server side
-     * 
+ * required string transformation = 1; */ public com.google.protobuf.ByteString - getStackTraceBytes() { - java.lang.Object ref = stackTrace_; + getTransformationBytes() { + java.lang.Object ref = transformation_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - stackTrace_ = b; + transformation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - // optional string hostname = 3; - public static final int HOSTNAME_FIELD_NUMBER = 3; - private java.lang.Object hostname_; + // optional bytes inKey = 2; + public static final int INKEY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString inKey_; /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inKey = 2; */ - public boolean hasHostname() { - return ((bitField0_ & 0x00000004) == 0x00000004); + public boolean hasInKey() { + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inKey = 2; */ - public java.lang.String getHostname() { - java.lang.Object ref = hostname_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - hostname_ = s; - } - return s; - } + public com.google.protobuf.ByteString getInKey() { + return inKey_; } + + // optional bytes inIv = 3; + public static final int INIV_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString inIv_; /** - * optional string hostname = 3; - * - *
-     * Optional hostname.  Filled in for some exceptions such as region moved
-     * where exception gives clue on where the region may have moved.
-     * 
+ * optional bytes inIv = 3; */ - public com.google.protobuf.ByteString - getHostnameBytes() { - java.lang.Object ref = hostname_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - hostname_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public boolean hasInIv() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bytes inIv = 3; + */ + public com.google.protobuf.ByteString getInIv() { + return inIv_; } - // optional int32 port = 4; - public static final int PORT_FIELD_NUMBER = 4; - private int port_; + // optional bytes outKey = 4; + public static final int OUTKEY_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString outKey_; /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public boolean hasPort() { + public boolean hasOutKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public int getPort() { - return port_; + public com.google.protobuf.ByteString getOutKey() { + return outKey_; } - // optional bool do_not_retry = 5; - public static final int DO_NOT_RETRY_FIELD_NUMBER = 5; - private boolean doNotRetry_; + // optional bytes outIv = 5; + public static final int OUTIV_FIELD_NUMBER = 5; + private com.google.protobuf.ByteString outIv_; /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - public boolean hasDoNotRetry() { + public boolean hasOutIv() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool do_not_retry = 5; - * - *
-     * Set if we are NOT to retry on receipt of this exception
-     * 
+ * optional bytes outIv = 5; */ - public boolean getDoNotRetry() { - return doNotRetry_; + public com.google.protobuf.ByteString getOutIv() { + return outIv_; } private void initFields() { - exceptionClassName_ = ""; - stackTrace_ = ""; - hostname_ = ""; - port_ = 0; - doNotRetry_ = false; + transformation_ = ""; + inKey_ = com.google.protobuf.ByteString.EMPTY; + inIv_ = com.google.protobuf.ByteString.EMPTY; + outKey_ = com.google.protobuf.ByteString.EMPTY; + outIv_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (!hasTransformation()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -3029,19 +4930,19 @@ public final class RPCProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getExceptionClassNameBytes()); + output.writeBytes(1, getTransformationBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStackTraceBytes()); + output.writeBytes(2, inKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getHostnameBytes()); + output.writeBytes(3, inIv_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeInt32(4, port_); + output.writeBytes(4, outKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBool(5, doNotRetry_); + output.writeBytes(5, outIv_); } getUnknownFields().writeTo(output); } @@ -3054,23 +4955,23 @@ public final class RPCProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getExceptionClassNameBytes()); + .computeBytesSize(1, getTransformationBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStackTraceBytes()); + .computeBytesSize(2, inKey_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getHostnameBytes()); + .computeBytesSize(3, inIv_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream - .computeInt32Size(4, port_); + .computeBytesSize(4, outKey_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(5, doNotRetry_); + .computeBytesSize(5, outIv_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -3089,36 +4990,36 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta) obj; boolean result = true; - result = result && (hasExceptionClassName() == other.hasExceptionClassName()); - if (hasExceptionClassName()) { - result = result && getExceptionClassName() - .equals(other.getExceptionClassName()); - } - result = result && (hasStackTrace() == other.hasStackTrace()); - if (hasStackTrace()) { - result = result && getStackTrace() - .equals(other.getStackTrace()); - } - result = result && (hasHostname() == other.hasHostname()); - if (hasHostname()) { - result = result && getHostname() - .equals(other.getHostname()); - } - result = result && (hasPort() == other.hasPort()); - if (hasPort()) { - result = result && (getPort() - == other.getPort()); - } - result = result && (hasDoNotRetry() == other.hasDoNotRetry()); - if (hasDoNotRetry()) { - result = result && (getDoNotRetry() - == other.getDoNotRetry()); + result = result && (hasTransformation() == other.hasTransformation()); + if (hasTransformation()) { + result = result && getTransformation() + .equals(other.getTransformation()); + } + result = result && (hasInKey() == other.hasInKey()); + if (hasInKey()) { + result = result && getInKey() + .equals(other.getInKey()); + } + result = result && (hasInIv() == other.hasInIv()); + if (hasInIv()) { + result = result && getInIv() + .equals(other.getInIv()); + } + result = result && (hasOutKey() == other.hasOutKey()); + if (hasOutKey()) { + result = result && getOutKey() + .equals(other.getOutKey()); + } + result = result && (hasOutIv() == other.hasOutIv()); + if (hasOutIv()) { + result = result && getOutIv() + .equals(other.getOutIv()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -3133,78 +5034,78 @@ public final class RPCProtos { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasExceptionClassName()) { - hash = (37 * hash) + EXCEPTION_CLASS_NAME_FIELD_NUMBER; - hash = (53 * hash) + getExceptionClassName().hashCode(); + if (hasTransformation()) { + hash = (37 * hash) + TRANSFORMATION_FIELD_NUMBER; + hash = (53 * hash) + getTransformation().hashCode(); } - if (hasStackTrace()) { - hash = (37 * hash) + STACK_TRACE_FIELD_NUMBER; - hash = (53 * hash) + getStackTrace().hashCode(); + if (hasInKey()) { + hash = (37 * hash) + INKEY_FIELD_NUMBER; + hash = (53 * hash) + getInKey().hashCode(); } - if (hasHostname()) { - hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; - hash = (53 * hash) + getHostname().hashCode(); + if (hasInIv()) { + hash = (37 * hash) + INIV_FIELD_NUMBER; + hash = (53 * hash) + getInIv().hashCode(); } - if (hasPort()) { - hash = (37 * hash) + PORT_FIELD_NUMBER; - hash = (53 * hash) + getPort(); + if (hasOutKey()) { + hash = (37 * hash) + OUTKEY_FIELD_NUMBER; + hash = (53 * hash) + getOutKey().hashCode(); } - if (hasDoNotRetry()) { - hash = (37 * hash) + DO_NOT_RETRY_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getDoNotRetry()); + if (hasOutIv()) { + hash = (37 * hash) + OUTIV_FIELD_NUMBER; + hash = (53 * hash) + getOutIv().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3213,7 +5114,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -3225,29 +5126,29 @@ public final class RPCProtos { return builder; } /** - * Protobuf type {@code hbase.pb.ExceptionResponse} + * Protobuf type {@code hbase.pb.CryptoCipherMeta} * *
-     * At the RPC layer, this message is used to carry
-     * the server side exception to the RPC client.
+     **
+     * Cipher meta for Crypto
      * 
*/ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMetaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.Builder.class); } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -3267,15 +5168,15 @@ public final class RPCProtos { public Builder clear() { super.clear(); - exceptionClassName_ = ""; + transformation_ = ""; bitField0_ = (bitField0_ & ~0x00000001); - stackTrace_ = ""; + inKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); - hostname_ = ""; + inIv_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); - port_ = 0; + outKey_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000008); - doNotRetry_ = false; + outIv_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -3286,87 +5187,87 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_ExceptionResponse_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_hbase_pb_CryptoCipherMeta_descriptor; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse(this); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.exceptionClassName_ = exceptionClassName_; + result.transformation_ = transformation_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } - result.stackTrace_ = stackTrace_; + result.inKey_ = inKey_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } - result.hostname_ = hostname_; + result.inIv_ = inIv_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } - result.port_ = port_; + result.outKey_ = outKey_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } - result.doNotRetry_ = doNotRetry_; + result.outIv_ = outIv_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; - if (other.hasExceptionClassName()) { + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta.getDefaultInstance()) return this; + if (other.hasTransformation()) { bitField0_ |= 0x00000001; - exceptionClassName_ = other.exceptionClassName_; + transformation_ = other.transformation_; onChanged(); } - if (other.hasStackTrace()) { - bitField0_ |= 0x00000002; - stackTrace_ = other.stackTrace_; - onChanged(); + if (other.hasInKey()) { + setInKey(other.getInKey()); } - if (other.hasHostname()) { - bitField0_ |= 0x00000004; - hostname_ = other.hostname_; - onChanged(); + if (other.hasInIv()) { + setInIv(other.getInIv()); } - if (other.hasPort()) { - setPort(other.getPort()); + if (other.hasOutKey()) { + setOutKey(other.getOutKey()); } - if (other.hasDoNotRetry()) { - setDoNotRetry(other.getDoNotRetry()); + if (other.hasOutIv()) { + setOutIv(other.getOutIv()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (!hasTransformation()) { + + return false; + } return true; } @@ -3374,11 +5275,11 @@ public final class RPCProtos { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parsedMessage = null; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CryptoCipherMeta) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -3389,397 +5290,233 @@ public final class RPCProtos { } private int bitField0_; - // optional string exception_class_name = 1; - private java.lang.Object exceptionClassName_ = ""; + // required string transformation = 1; + private java.lang.Object transformation_ = ""; /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public boolean hasExceptionClassName() { + public boolean hasTransformation() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public java.lang.String getExceptionClassName() { - java.lang.Object ref = exceptionClassName_; + public java.lang.String getTransformation() { + java.lang.Object ref = transformation_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); - exceptionClassName_ = s; + transformation_ = s; return s; } else { return (java.lang.String) ref; } } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ public com.google.protobuf.ByteString - getExceptionClassNameBytes() { - java.lang.Object ref = exceptionClassName_; + getTransformationBytes() { + java.lang.Object ref = transformation_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - exceptionClassName_ = b; + transformation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public Builder setExceptionClassName( + public Builder setTransformation( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - exceptionClassName_ = value; + transformation_ = value; onChanged(); return this; } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public Builder clearExceptionClassName() { + public Builder clearTransformation() { bitField0_ = (bitField0_ & ~0x00000001); - exceptionClassName_ = getDefaultInstance().getExceptionClassName(); + transformation_ = getDefaultInstance().getTransformation(); onChanged(); return this; } /** - * optional string exception_class_name = 1; - * - *
-       * Class name of the exception thrown from the server
-       * 
+ * required string transformation = 1; */ - public Builder setExceptionClassNameBytes( + public Builder setTransformationBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - exceptionClassName_ = value; + transformation_ = value; onChanged(); return this; } - // optional string stack_trace = 2; - private java.lang.Object stackTrace_ = ""; + // optional bytes inKey = 2; + private com.google.protobuf.ByteString inKey_ = com.google.protobuf.ByteString.EMPTY; /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public boolean hasStackTrace() { + public boolean hasInKey() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
- */ - public java.lang.String getStackTrace() { - java.lang.Object ref = stackTrace_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - stackTrace_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public com.google.protobuf.ByteString - getStackTraceBytes() { - java.lang.Object ref = stackTrace_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - stackTrace_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public com.google.protobuf.ByteString getInKey() { + return inKey_; } /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public Builder setStackTrace( - java.lang.String value) { + public Builder setInKey(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; - stackTrace_ = value; + inKey_ = value; onChanged(); return this; } /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
+ * optional bytes inKey = 2; */ - public Builder clearStackTrace() { + public Builder clearInKey() { bitField0_ = (bitField0_ & ~0x00000002); - stackTrace_ = getDefaultInstance().getStackTrace(); - onChanged(); - return this; - } - /** - * optional string stack_trace = 2; - * - *
-       * Exception stack trace from the server side
-       * 
- */ - public Builder setStackTraceBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - stackTrace_ = value; + inKey_ = getDefaultInstance().getInKey(); onChanged(); return this; } - // optional string hostname = 3; - private java.lang.Object hostname_ = ""; + // optional bytes inIv = 3; + private com.google.protobuf.ByteString inIv_ = com.google.protobuf.ByteString.EMPTY; /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public boolean hasHostname() { + public boolean hasInIv() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
- */ - public java.lang.String getHostname() { - java.lang.Object ref = hostname_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - hostname_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public com.google.protobuf.ByteString - getHostnameBytes() { - java.lang.Object ref = hostname_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - hostname_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public com.google.protobuf.ByteString getInIv() { + return inIv_; } /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public Builder setHostname( - java.lang.String value) { + public Builder setInIv(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; - hostname_ = value; + inIv_ = value; onChanged(); return this; } /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
+ * optional bytes inIv = 3; */ - public Builder clearHostname() { + public Builder clearInIv() { bitField0_ = (bitField0_ & ~0x00000004); - hostname_ = getDefaultInstance().getHostname(); - onChanged(); - return this; - } - /** - * optional string hostname = 3; - * - *
-       * Optional hostname.  Filled in for some exceptions such as region moved
-       * where exception gives clue on where the region may have moved.
-       * 
- */ - public Builder setHostnameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - hostname_ = value; + inIv_ = getDefaultInstance().getInIv(); onChanged(); return this; } - // optional int32 port = 4; - private int port_ ; + // optional bytes outKey = 4; + private com.google.protobuf.ByteString outKey_ = com.google.protobuf.ByteString.EMPTY; /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public boolean hasPort() { + public boolean hasOutKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public int getPort() { - return port_; + public com.google.protobuf.ByteString getOutKey() { + return outKey_; } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public Builder setPort(int value) { - bitField0_ |= 0x00000008; - port_ = value; + public Builder setOutKey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + outKey_ = value; onChanged(); return this; } /** - * optional int32 port = 4; + * optional bytes outKey = 4; */ - public Builder clearPort() { + public Builder clearOutKey() { bitField0_ = (bitField0_ & ~0x00000008); - port_ = 0; + outKey_ = getDefaultInstance().getOutKey(); onChanged(); return this; } - // optional bool do_not_retry = 5; - private boolean doNotRetry_ ; + // optional bytes outIv = 5; + private com.google.protobuf.ByteString outIv_ = com.google.protobuf.ByteString.EMPTY; /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public boolean hasDoNotRetry() { + public boolean hasOutIv() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public boolean getDoNotRetry() { - return doNotRetry_; + public com.google.protobuf.ByteString getOutIv() { + return outIv_; } /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public Builder setDoNotRetry(boolean value) { - bitField0_ |= 0x00000010; - doNotRetry_ = value; + public Builder setOutIv(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + outIv_ = value; onChanged(); return this; } /** - * optional bool do_not_retry = 5; - * - *
-       * Set if we are NOT to retry on receipt of this exception
-       * 
+ * optional bytes outIv = 5; */ - public Builder clearDoNotRetry() { + public Builder clearOutIv() { bitField0_ = (bitField0_ & ~0x00000010); - doNotRetry_ = false; + outIv_ = getDefaultInstance().getOutIv(); onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:hbase.pb.ExceptionResponse) + // @@protoc_insertion_point(builder_scope:hbase.pb.CryptoCipherMeta) } static { - defaultInstance = new ExceptionResponse(true); + defaultInstance = new CryptoCipherMeta(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:hbase.pb.ExceptionResponse) + // @@protoc_insertion_point(class_scope:hbase.pb.CryptoCipherMeta) } public interface RequestHeaderOrBuilder @@ -6192,6 +7929,11 @@ public final class RPCProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ConnectionHeaderResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CellBlockMeta_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -6202,6 +7944,11 @@ public final class RPCProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_CryptoCipherMeta_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_RequestHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -6222,26 +7969,32 @@ public final class RPCProtos { java.lang.String[] descriptorData = { "\n\tRPC.proto\022\010hbase.pb\032\rTracing.proto\032\013HB" + "ase.proto\"<\n\017UserInformation\022\026\n\016effectiv" + - "e_user\030\001 \002(\t\022\021\n\treal_user\030\002 \001(\t\"\310\001\n\020Conn" + + "e_user\030\001 \002(\t\022\021\n\treal_user\030\002 \001(\t\"\362\001\n\020Conn" + "ectionHeader\022,\n\tuser_info\030\001 \001(\0132\031.hbase." + "pb.UserInformation\022\024\n\014service_name\030\002 \001(\t" + "\022\036\n\026cell_block_codec_class\030\003 \001(\t\022#\n\033cell" + "_block_compressor_class\030\004 \001(\t\022+\n\014version" + - "_info\030\005 \001(\0132\025.hbase.pb.VersionInfo\"\037\n\rCe" + - "llBlockMeta\022\016\n\006length\030\001 \001(\r\"|\n\021Exception" + - "Response\022\034\n\024exception_class_name\030\001 \001(\t\022\023", - "\n\013stack_trace\030\002 \001(\t\022\020\n\010hostname\030\003 \001(\t\022\014\n" + - "\004port\030\004 \001(\005\022\024\n\014do_not_retry\030\005 \001(\010\"\311\001\n\rRe" + - "questHeader\022\017\n\007call_id\030\001 \001(\r\022&\n\ntrace_in" + - "fo\030\002 \001(\0132\022.hbase.pb.RPCTInfo\022\023\n\013method_n" + - "ame\030\003 \001(\t\022\025\n\rrequest_param\030\004 \001(\010\0220\n\017cell" + - "_block_meta\030\005 \001(\0132\027.hbase.pb.CellBlockMe" + - "ta\022\020\n\010priority\030\006 \001(\r\022\017\n\007timeout\030\007 \001(\r\"\203\001" + - "\n\016ResponseHeader\022\017\n\007call_id\030\001 \001(\r\022.\n\texc" + - "eption\030\002 \001(\0132\033.hbase.pb.ExceptionRespons" + - "e\0220\n\017cell_block_meta\030\003 \001(\0132\027.hbase.pb.Ce", - "llBlockMetaB<\n*org.apache.hadoop.hbase.p" + - "rotobuf.generatedB\tRPCProtosH\001\240\001\001" + "_info\030\005 \001(\0132\025.hbase.pb.VersionInfo\022(\n rp" + + "c_crypto_cipher_transformation\030\006 \001(\t\"R\n\030" + + "ConnectionHeaderResponse\0226\n\022crypto_ciphe", + "r_meta\030\001 \001(\0132\032.hbase.pb.CryptoCipherMeta" + + "\"\037\n\rCellBlockMeta\022\016\n\006length\030\001 \001(\r\"|\n\021Exc" + + "eptionResponse\022\034\n\024exception_class_name\030\001" + + " \001(\t\022\023\n\013stack_trace\030\002 \001(\t\022\020\n\010hostname\030\003 " + + "\001(\t\022\014\n\004port\030\004 \001(\005\022\024\n\014do_not_retry\030\005 \001(\010\"" + + "f\n\020CryptoCipherMeta\022\026\n\016transformation\030\001 " + + "\002(\t\022\r\n\005inKey\030\002 \001(\014\022\014\n\004inIv\030\003 \001(\014\022\016\n\006outK" + + "ey\030\004 \001(\014\022\r\n\005outIv\030\005 \001(\014\"\311\001\n\rRequestHeade" + + "r\022\017\n\007call_id\030\001 \001(\r\022&\n\ntrace_info\030\002 \001(\0132\022" + + ".hbase.pb.RPCTInfo\022\023\n\013method_name\030\003 \001(\t\022", + "\025\n\rrequest_param\030\004 \001(\010\0220\n\017cell_block_met" + + "a\030\005 \001(\0132\027.hbase.pb.CellBlockMeta\022\020\n\010prio" + + "rity\030\006 \001(\r\022\017\n\007timeout\030\007 \001(\r\"\203\001\n\016Response" + + "Header\022\017\n\007call_id\030\001 \001(\r\022.\n\texception\030\002 \001" + + "(\0132\033.hbase.pb.ExceptionResponse\0220\n\017cell_" + + "block_meta\030\003 \001(\0132\027.hbase.pb.CellBlockMet" + + "aB<\n*org.apache.hadoop.hbase.protobuf.ge" + + "neratedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -6259,27 +8012,39 @@ public final class RPCProtos { internal_static_hbase_pb_ConnectionHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ConnectionHeader_descriptor, - new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", }); - internal_static_hbase_pb_CellBlockMeta_descriptor = + new java.lang.String[] { "UserInfo", "ServiceName", "CellBlockCodecClass", "CellBlockCompressorClass", "VersionInfo", "RpcCryptoCipherTransformation", }); + internal_static_hbase_pb_ConnectionHeaderResponse_descriptor = getDescriptor().getMessageTypes().get(2); + internal_static_hbase_pb_ConnectionHeaderResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_ConnectionHeaderResponse_descriptor, + new java.lang.String[] { "CryptoCipherMeta", }); + internal_static_hbase_pb_CellBlockMeta_descriptor = + getDescriptor().getMessageTypes().get(3); internal_static_hbase_pb_CellBlockMeta_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CellBlockMeta_descriptor, new java.lang.String[] { "Length", }); internal_static_hbase_pb_ExceptionResponse_descriptor = - getDescriptor().getMessageTypes().get(3); + getDescriptor().getMessageTypes().get(4); internal_static_hbase_pb_ExceptionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ExceptionResponse_descriptor, new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }); + internal_static_hbase_pb_CryptoCipherMeta_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_hbase_pb_CryptoCipherMeta_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_CryptoCipherMeta_descriptor, + new java.lang.String[] { "Transformation", "InKey", "InIv", "OutKey", "OutIv", }); internal_static_hbase_pb_RequestHeader_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(6); internal_static_hbase_pb_RequestHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_RequestHeader_descriptor, new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParam", "CellBlockMeta", "Priority", "Timeout", }); internal_static_hbase_pb_ResponseHeader_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(7); internal_static_hbase_pb_ResponseHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_ResponseHeader_descriptor, diff --git a/hbase-protocol/src/main/protobuf/RPC.proto b/hbase-protocol/src/main/protobuf/RPC.proto index 8413d25..04d5703 100644 --- a/hbase-protocol/src/main/protobuf/RPC.proto +++ b/hbase-protocol/src/main/protobuf/RPC.proto @@ -89,6 +89,14 @@ message ConnectionHeader { // Class must implement hadoop's CompressionCodec Interface. Can't compress if no codec. optional string cell_block_compressor_class = 4; optional VersionInfo version_info = 5; + // the transformation for rpc AES encryption with Apache Commons Crypto + optional string rpc_crypto_cipher_transformation = 6; +} + +// This is sent by rpc server to negotiate the data if necessary +message ConnectionHeaderResponse { + // To use Apache Commons Crypto, negotiate the metadata + optional CryptoCipherMeta crypto_cipher_meta = 1; } // Optional Cell block Message. Included in client RequestHeader @@ -112,6 +120,17 @@ message ExceptionResponse { optional bool do_not_retry = 5; } +/** + * Cipher meta for Crypto + */ +message CryptoCipherMeta { + required string transformation = 1; + optional bytes inKey = 2; + optional bytes inIv = 3; + optional bytes outKey = 4; + optional bytes outIv = 5; +} + // Header sent making a request. message RequestHeader { // Monotonically increasing call_id to keep track of RPC requests and their response diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index f611796..33119e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -42,6 +42,7 @@ import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.WritableByteChannel; +import java.security.GeneralSecurityException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; @@ -50,6 +51,7 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.Set; import java.util.Timer; import java.util.TimerTask; @@ -66,6 +68,10 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; +import com.google.protobuf.ByteString; +import org.apache.commons.crypto.cipher.CryptoCipherFactory; +import org.apache.commons.crypto.random.CryptoRandom; +import org.apache.commons.crypto.random.CryptoRandomFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -86,10 +92,12 @@ import org.apache.hadoop.hbase.io.ByteBufferInputStream; import org.apache.hadoop.hbase.io.ByteBufferListOutputStream; import org.apache.hadoop.hbase.io.ByteBufferOutputStream; import org.apache.hadoop.hbase.io.ByteBufferPool; +import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.VersionInfo; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.CellBlockMeta; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse; @@ -414,6 +422,10 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { this.response = new BufferChain(response); } + protected synchronized void setConnectionHeaderResponse(ByteBuffer response) { + this.response = new BufferChain(response); + } + protected synchronized void setResponse(Object m, final CellScanner cells, Throwable t, String errorMsg) { if (this.isError) return; @@ -546,9 +558,16 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { byte [] responseBytes = bc.getBytes(); byte [] token; // synchronization may be needed since there can be multiple Handler - // threads using saslServer to wrap responses. - synchronized (connection.saslServer) { - token = connection.saslServer.wrap(responseBytes, 0, responseBytes.length); + // threads using saslServer or Crypto AES to wrap responses. + if (connection.useCryptoAesWrap) { + // wrap with Crypto AES + synchronized (connection.cryptoAES) { + token = connection.cryptoAES.wrap(responseBytes, 0, responseBytes.length); + } + } else { + synchronized (connection.saslServer) { + token = connection.saslServer.wrap(responseBytes, 0, responseBytes.length); + } } if (LOG.isTraceEnabled()) { LOG.trace("Adding saslServer wrapped token of size " + token.length @@ -1230,7 +1249,9 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { private ByteBuffer unwrappedDataLengthBuffer = ByteBuffer.allocate(4); boolean useSasl; SaslServer saslServer; + private CryptoAES cryptoAES; private boolean useWrap = false; + private boolean useCryptoAesWrap = false; // Fake 'call' for failed authorization response private static final int AUTHORIZATION_FAILED_CALLID = -1; private final Call authFailedCall = new Call(AUTHORIZATION_FAILED_CALLID, null, null, null, @@ -1241,6 +1262,10 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { private static final int SASL_CALLID = -33; private final Call saslCall = new Call(SASL_CALLID, null, null, null, null, null, this, null, 0, null, null, 0); + // Fake 'call' for connection header response + private static final int CONNECTION_HEADER_RESPONSE_CALLID = -34; + private final Call setConnectionHeaderResponseCall = new Call(CONNECTION_HEADER_RESPONSE_CALLID, + null, null, null, null, null, this, null, 0, null, null, 0); // was authentication allowed with a fallback to simple auth private boolean authenticatedWithFallback; @@ -1351,7 +1376,13 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { processOneRpc(saslToken); } else { byte[] b = saslToken.array(); - byte [] plaintextData = saslServer.unwrap(b, saslToken.position(), saslToken.limit()); + byte [] plaintextData; + if (useCryptoAesWrap) { + // unwrap with CryptoAES + plaintextData = cryptoAES.unwrap(b, saslToken.position(), saslToken.limit()); + } else { + plaintextData = saslServer.unwrap(b, saslToken.position(), saslToken.limit()); + } processUnwrappedData(plaintextData); } } else { @@ -1478,6 +1509,31 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } + /** + * Send the response for connection header + */ + private void doConnectionHeaderResponse(byte[] wrappedCipherMetaData) throws IOException { + ByteBufferOutputStream response = null; + DataOutputStream out = null; + try { + response = new ByteBufferOutputStream(wrappedCipherMetaData.length + 4); + out = new DataOutputStream(response); + out.writeInt(wrappedCipherMetaData.length); + out.write(wrappedCipherMetaData); + + setConnectionHeaderResponseCall.setConnectionHeaderResponse(response.getByteBuffer()); + setConnectionHeaderResponseCall.responder = responder; + setConnectionHeaderResponseCall.sendResponseIfReady(); + } finally { + if (response != null) { + response.close(); + } + if (out != null) { + out.close(); + } + } + } + private void disposeSasl() { if (saslServer != null) { try { @@ -1674,6 +1730,7 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { this.service = getService(services, serviceName); if (this.service == null) throw new UnknownServiceException(serviceName); setupCellBlockCodecs(this.connectionHeader); + setupCryptoCipher(this.connectionHeader); UserGroupInformation protocolUser = createUser(connectionHeader); if (!useSasl) { ugi = protocolUser; @@ -1722,8 +1779,6 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { AUDITLOG.info("Connection from " + this.hostAddress + " port: " + this.remotePort + " with unknown version info"); } - - } /** @@ -1750,6 +1805,86 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } + /** + * Set up cipher for rpc encryption with Apache Commons Crypto + * @throws FatalConnectionException + */ + private void setupCryptoCipher(final ConnectionHeader header) + throws FatalConnectionException { + // If simple auth, return + if (saslServer == null) return; + // check if rpc encryption with Crypto AES + String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP); + boolean isEncryption = SaslUtil.QualityOfProtection.PRIVACY + .getSaslQop().equalsIgnoreCase(qop); + boolean isCryptoAesEncryption = isEncryption && conf.getBoolean( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_DEFAULT); + if (!isCryptoAesEncryption) return; + if (!header.hasRpcCryptoCipherTransformation()) return; + String transformation = header.getRpcCryptoCipherTransformation(); + if (transformation == null || transformation.length() == 0) return; + // Negotiates AES based on complete saslServer. + // The Crypto metadata need to be encrypted and send to client. + Properties properties = new Properties(); + // the property for SecureRandomFactory + properties.setProperty(CryptoRandomFactory.CLASSES_KEY, + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_RANDOM_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_RANDOM_DEFAULT)); + // the property for cipher class + properties.setProperty(CryptoCipherFactory.CLASSES_KEY, + conf.get(HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_CLASS_DEFAULT)); + + int cipherKeyBits = conf.getInt( + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_CONF_KEY, + HConstants.RPC_CRYPTO_ENCRYPTION_AES_CIPHER_KEY_SIZE_DEFAULT); + // generate key and iv + if (cipherKeyBits % 8 != 0) { + throw new IllegalArgumentException("The AES cipher key size in bits" + + " should be a multiple of byte"); + } + int len = cipherKeyBits / 8; + byte[] inKey = new byte[len]; + byte[] outKey = new byte[len]; + byte[] inIv = new byte[len]; + byte[] outIv = new byte[len]; + + try { + // generate the cipher meta data with SecureRandom + CryptoRandom secureRandom = CryptoRandomFactory.getCryptoRandom(properties); + secureRandom.nextBytes(inKey); + secureRandom.nextBytes(outKey); + secureRandom.nextBytes(inIv); + secureRandom.nextBytes(outIv); + + // create CryptoAES for server + cryptoAES = new CryptoAES(transformation, properties, + inKey, outKey, inIv, outIv); + // create SaslCipherMeta and send to client, + // for client, the [inKey, outKey], [inIv, outIv] should be reversed + RPCProtos.CryptoCipherMeta.Builder ccmBuilder = RPCProtos.CryptoCipherMeta.newBuilder(); + ccmBuilder.setTransformation(transformation); + ccmBuilder.setInIv(getByteString(outIv)); + ccmBuilder.setInKey(getByteString(outKey)); + ccmBuilder.setOutIv(getByteString(inIv)); + ccmBuilder.setOutKey(getByteString(inKey)); + RPCProtos.ConnectionHeaderResponse.Builder chrBuilder = + RPCProtos.ConnectionHeaderResponse.newBuilder(); + chrBuilder.setCryptoCipherMeta(ccmBuilder); + byte[] connectionHeaderResBytes = chrBuilder.build().toByteArray(); + // encrypt the Crypto AES cipher meta data with sasl server, and send to client + byte[] unwrapped = new byte[connectionHeaderResBytes.length + 4]; + Bytes.putBytes(unwrapped, 0, Bytes.toBytes(connectionHeaderResBytes.length), 0, 4); + Bytes.putBytes(unwrapped, 4, connectionHeaderResBytes, 0, connectionHeaderResBytes.length); + + doConnectionHeaderResponse(saslServer.wrap(unwrapped, 0, unwrapped.length)); + useCryptoAesWrap = true; + } catch (GeneralSecurityException | IOException ex) { + throw new UnsupportedCryptoException(ex.getMessage(), ex); + } + } + private void processUnwrappedData(byte[] inBuf) throws IOException, InterruptedException { ReadableByteChannel ch = Channels.newChannel(new ByteArrayInputStream(inBuf)); @@ -1788,7 +1923,6 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } - private void processOneRpc(ByteBuffer buf) throws IOException, InterruptedException { if (connectionHeaderRead) { processRequest(buf); @@ -1912,6 +2046,11 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } } + private ByteString getByteString(byte[] bytes) { + // return singleton to reduce object allocation + return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes); + } + private boolean authorizeConnection() throws IOException { try { // If auth method is DIGEST, the token was obtained by the diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java index 4637a01..065b22f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java @@ -192,7 +192,7 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput { setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration()); HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath()); HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm()); - HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration()); + HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration(), false); UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java index c1b8de7..567a633 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java @@ -82,6 +82,8 @@ public abstract class AbstractTestSecureIPC { abstract Class getRpcClientClass(); + abstract boolean isAesEnabled(); + @Rule public ExpectedException exception = ExpectedException.none(); @@ -110,9 +112,9 @@ public abstract class AbstractTestSecureIPC { krbKeytab = getKeytabFileForTesting(); krbPrincipal = getPrincipalForTesting(); ugi = loginKerberosPrincipal(krbKeytab, krbPrincipal); - clientConf = getSecuredConfiguration(); + clientConf = getSecuredConfiguration(isAesEnabled()); clientConf.set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, getRpcClientClass().getName()); - serverConf = getSecuredConfiguration(); + serverConf = getSecuredConfiguration(isAesEnabled()); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java index 237efe9..620cbc4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; import com.google.common.base.Strings; @@ -67,18 +68,21 @@ public class HBaseKerberosUtils { return conf; } - public static Configuration getSecuredConfiguration() { + public static Configuration getSecuredConfiguration(boolean isAesEnabled) { Configuration conf = HBaseConfiguration.create(); - setSecuredConfiguration(conf); + setSecuredConfiguration(conf, isAesEnabled); return conf; } - public static void setSecuredConfiguration(Configuration conf) { + public static void setSecuredConfiguration(Configuration conf, boolean isAesEnabled) { conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos"); conf.setBoolean(User.HBASE_SECURITY_AUTHORIZATION_CONF_KEY, true); conf.set(KRB_KEYTAB_FILE, System.getProperty(KRB_KEYTAB_FILE)); conf.set(KRB_PRINCIPAL, System.getProperty(KRB_PRINCIPAL)); conf.set(MASTER_KRB_PRINCIPAL, System.getProperty(KRB_PRINCIPAL)); + if (isAesEnabled) { + conf.set(HConstants.RPC_CRYPTO_ENCRYPTION_AES_ENABLED_CONF_KEY, "true"); + } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestAsyncSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestAsyncSecureIPC.java index ea37915..37342bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestAsyncSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestAsyncSecureIPC.java @@ -30,4 +30,8 @@ public class TestAsyncSecureIPC extends AbstractTestSecureIPC { Class getRpcClientClass() { return AsyncRpcClient.class; } + + boolean isAesEnabled() { + return false; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestAsyncSecureIPCWithAes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestAsyncSecureIPCWithAes.java new file mode 100644 index 0000000..341d541 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestAsyncSecureIPCWithAes.java @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security; + +import org.apache.hadoop.hbase.ipc.AsyncRpcClient; +import org.apache.hadoop.hbase.ipc.RpcClient; +import org.apache.hadoop.hbase.testclassification.SecurityTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.experimental.categories.Category; + +@Category({ SecurityTests.class, SmallTests.class }) +public class TestAsyncSecureIPCWithAes extends AbstractTestSecureIPC { + Class getRpcClientClass() { + return AsyncRpcClient.class; + } + + boolean isAesEnabled() { + return true; + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java index 98ea221..0a7a53a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java @@ -30,4 +30,8 @@ public class TestSecureIPC extends AbstractTestSecureIPC { Class getRpcClientClass() { return RpcClientImpl.class; } + + boolean isAesEnabled() { + return false; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPCWithAes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPCWithAes.java new file mode 100644 index 0000000..525d85d --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPCWithAes.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security; + +import org.apache.hadoop.hbase.ipc.RpcClient; +import org.apache.hadoop.hbase.ipc.RpcClientImpl; +import org.apache.hadoop.hbase.testclassification.SecurityTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.experimental.categories.Category; + +@Category({ SecurityTests.class, SmallTests.class }) +public class TestSecureIPCWithAes extends AbstractTestSecureIPC { + + Class getRpcClientClass() { + return RpcClientImpl.class; + } + + boolean isAesEnabled() { + return true; + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java index 0226d49..9861696 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java @@ -99,7 +99,7 @@ public class TestUsersOperationsWithSecureHadoop { assertNotNull("KerberosKeytab was not specified", nnKeyTab); assertNotNull("KerberosPrincipal was not specified", dnPrincipal); - conf = getSecuredConfiguration(); + conf = getSecuredConfiguration(false); UserGroupInformation.setConfiguration(conf); User.login(conf, HBaseKerberosUtils.KRB_KEYTAB_FILE, HBaseKerberosUtils.KRB_PRINCIPAL, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java index a469537..bf6e61e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java @@ -97,7 +97,7 @@ public class SecureTestCluster { HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath()); HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + KDC.getRealm()); - HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration()); + HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration(), false); setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration()); UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration()); TEST_UTIL.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, diff --git a/pom.xml b/pom.xml index fb3ead0..fab0ce5 100644 --- a/pom.xml +++ b/pom.xml @@ -1219,6 +1219,7 @@ 2.11.6 1.46 1.0.0-RC2 + 1.0.0 2.4 1.8 @@ -1787,6 +1788,17 @@ kerb-simplekdc ${kerby.version} + + org.apache.commons + commons-crypto + ${commons-crypto.version} + + + net.java.dev.jna + jna + + +