Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java (revision 1542892) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java (working copy) @@ -61,7 +61,7 @@ */ @InterfaceAudience.Public @InterfaceStability.Stable -public class Get extends OperationWithAttributes +public class Get extends Query implements Row, Comparable { private byte [] row = null; Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java (revision 1542892) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java (working copy) @@ -36,7 +36,11 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.security.visibility.CellVisibility; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -290,6 +294,26 @@ } /** + * Sets the visibility expression associated with cells in this Mutation. + * It is illegal to set CellVisibility on Delete mutation. + * @param expression + */ + public void setCellVisibility(CellVisibility expression) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, ProtobufUtil + .toCellVisibility(expression).toByteArray()); + } + + /** + * @return CellVisibility associated with cells in this Mutation. + * @throws DeserializationException + */ + public CellVisibility getCellVisibility() throws DeserializationException { + byte[] cellVisibilityBytes = this.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + if (cellVisibilityBytes == null) return null; + return ProtobufUtil.toCellVisibility(cellVisibilityBytes); + } + + /** * Number of KeyValues carried by this Mutation. * @return the total number of KeyValues */ Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java (working copy) @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public abstract class Query extends OperationWithAttributes { + + /** + * Sets the authorizations to be used by this Query + * @param authorizations + */ + public void setAuthorizations(Authorizations authorizations) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, ProtobufUtil + .toAuthorizations(authorizations).toByteArray()); + } + + /** + * @return The authorizations this Query is associated with. + * @throws DeserializationException + */ + public Authorizations getAuthorizations() throws DeserializationException { + byte[] authorizationsBytes = this.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + if (authorizationsBytes == null) return null; + return ProtobufUtil.toAuthorizations(authorizationsBytes); + } +} Index: hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java (revision 1542892) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java (working copy) @@ -81,7 +81,7 @@ */ @InterfaceAudience.Public @InterfaceStability.Stable -public class Scan extends OperationWithAttributes { +public class Scan extends Query { private static final String RAW_ATTR = "_raw_"; private static final String ISOLATION_LEVEL = "_isolationlevel_"; Index: hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (revision 1542892) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (working copy) @@ -119,6 +119,8 @@ import org.apache.hadoop.hbase.security.access.TablePermission; import org.apache.hadoop.hbase.security.access.UserPermission; import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DynamicClassLoader; import org.apache.hadoop.hbase.util.Methods; @@ -2464,4 +2466,89 @@ return tableNames; } + /** + * Convert a protocol buffer CellVisibility to a client CellVisibility + * + * @param proto + * @return the converted client CellVisibility + */ + public static CellVisibility toCellVisibility(ClientProtos.CellVisibility proto) { + if (proto == null) return null; + return new CellVisibility(proto.getExpression()); + } + + /** + * Convert a protocol buffer CellVisibility bytes to a client CellVisibility + * + * @param protoBytes + * @return the converted client CellVisibility + * @throws DeserializationException + */ + public static CellVisibility toCellVisibility(byte[] protoBytes) throws DeserializationException { + if (protoBytes == null) return null; + ClientProtos.CellVisibility.Builder builder = ClientProtos.CellVisibility.newBuilder(); + ClientProtos.CellVisibility proto = null; + try { + proto = builder.mergeFrom(protoBytes).build(); + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + return toCellVisibility(proto); + } + + /** + * Create a protocol buffer CellVisibility based on a client CellVisibility. + * + * @param cellVisibility + * @return a protocol buffer CellVisibility + */ + public static ClientProtos.CellVisibility toCellVisibility(CellVisibility cellVisibility) { + ClientProtos.CellVisibility.Builder builder = ClientProtos.CellVisibility.newBuilder(); + builder.setExpression(cellVisibility.getExpression()); + return builder.build(); + } + + /** + * Convert a protocol buffer Authorizations to a client Authorizations + * + * @param proto + * @return the converted client Authorizations + */ + public static Authorizations toAuthorizations(ClientProtos.Authorizations proto) { + if (proto == null) return null; + return new Authorizations(proto.getLabelList()); + } + + /** + * Convert a protocol buffer Authorizations bytes to a client Authorizations + * + * @param protoBytes + * @return the converted client Authorizations + * @throws DeserializationException + */ + public static Authorizations toAuthorizations(byte[] protoBytes) throws DeserializationException { + if (protoBytes == null) return null; + ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder(); + ClientProtos.Authorizations proto = null; + try { + proto = builder.mergeFrom(protoBytes).build(); + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + return toAuthorizations(proto); + } + + /** + * Create a protocol buffer Authorizations based on a client Authorizations. + * + * @param authorizations + * @return a protocol buffer Authorizations + */ + public static ClientProtos.Authorizations toAuthorizations(Authorizations authorizations) { + ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder(); + for (String label : authorizations.getLabels()) { + builder.addLabel(label); + } + return builder.build(); + } } Index: hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java (working copy) @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * This class contains visibility labels associated with a Scan/Get deciding which all labeled data + * current scan/get can access. + */ +@InterfaceAudience.Public +@InterfaceStability.Stable +public class Authorizations { + + private List labels; + + public Authorizations(String... labels) { + this.labels = new ArrayList(labels.length); + for (String label : labels) { + this.labels.add(label); + } + } + + public Authorizations(List labels) { + this.labels = labels; + } + + public List getLabels() { + return Collections.unmodifiableList(this.labels); + } + + @Override + public String toString() { + return this.labels.toString(); + } +} \ No newline at end of file Index: hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java (working copy) @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * This contains a visibility expression which can be associated with a cell. When it is set with a + * Mutation, all the cells in that mutation will get associated with this expression. A visibility + * expression can contain visibility labels combined with logical operators AND(&), OR(|) and NOT(!) + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class CellVisibility { + + private String expression; + + public CellVisibility(String expression) { + this.expression = expression; + } + + /** + * @return The visibility expression + */ + public String getExpression() { + return this.expression; + } +} Index: hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java (working copy) @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.DoNotRetryIOException; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class InvalidLabelException extends DoNotRetryIOException { + private static final long serialVersionUID = 1L; + + public InvalidLabelException(String msg) { + super(msg); + } +} + Index: hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java (working copy) @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.DoNotRetryIOException; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class LabelAlreadyExistsException extends DoNotRetryIOException { + private static final long serialVersionUID = 1L; + + public LabelAlreadyExistsException(String msg) { + super(msg); + } + +} Index: hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java (working copy) @@ -0,0 +1,207 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.ByteString; +import com.google.protobuf.ServiceException; + +/** + * Utility client for doing visibility labels admin operations. + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class VisibilityClient { + + /** + * Utility method for adding label to the system. + * + * @param conf + * @param label + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse addLabel(Configuration conf, final String label) + throws Throwable { + return addLabels(conf, new String[] { label }); + } + + /** + * Utility method for adding labels to the system. + * + * @param conf + * @param labels + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse addLabels(Configuration conf, final String[] labels) + throws Throwable { + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + Batch.Call callable = + new Batch.Call() { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException { + VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder(); + for (String label : labels) { + if (label.length() > 0) { + VisibilityLabel.Builder newBuilder = VisibilityLabel.newBuilder(); + newBuilder.setLabel(ByteString.copyFrom(Bytes.toBytes(label))); + builder.addVisLabel(newBuilder.build()); + } + } + service.addLabels(controller, builder.build(), rpcCallback); + return rpcCallback.get(); + } + }; + Map result = ht.coprocessorService( + VisibilityLabelsService.class, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, + callable); + return result.values().iterator().next(); // There will be exactly one region for labels + // table and so one entry in result Map. + } finally { + if (ht != null) { + ht.close(); + } + } + } + + /** + * Sets given labels globally authorized for the user. + * @param conf + * @param auths + * @param user + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse setAuths(Configuration conf, final String[] auths, + final String user) throws Throwable { + return setOrClearAuths(conf, auths, user, true); + } + + /** + * @param conf + * @param user + * @return labels, the given user is globally authorized for. + * @throws Throwable + */ + public static GetAuthsResponse getAuths(Configuration conf, final String user) throws Throwable { + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + Batch.Call callable = + new Batch.Call() { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + public GetAuthsResponse call(VisibilityLabelsService service) throws IOException { + GetAuthsRequest.Builder getAuthReqBuilder = GetAuthsRequest.newBuilder(); + getAuthReqBuilder.setUser(ByteString.copyFrom(Bytes.toBytes(user))); + service.getAuths(controller, getAuthReqBuilder.build(), rpcCallback); + return rpcCallback.get(); + } + }; + Map result = ht.coprocessorService(VisibilityLabelsService.class, + HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, callable); + return result.values().iterator().next(); // There will be exactly one region for labels + // table and so one entry in result Map. + } finally { + if (ht != null) { + ht.close(); + } + } + } + + /** + * Removes given labels from user's globally authorized list of labels. + * @param conf + * @param auths + * @param user + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse clearAuths(Configuration conf, final String[] auths, + final String user) throws Throwable { + return setOrClearAuths(conf, auths, user, false); + } + + private static VisibilityLabelsResponse setOrClearAuths(Configuration conf, final String[] auths, + final String user, final boolean setOrClear) throws IOException, ServiceException, Throwable { + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + Batch.Call callable = + new Batch.Call() { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException { + SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder(); + setAuthReqBuilder.setUser(ByteString.copyFrom(Bytes.toBytes(user))); + for (String auth : auths) { + if (auth.length() > 0) { + setAuthReqBuilder.addAuth(ByteString.copyFrom(Bytes.toBytes(auth))); + } + } + if (setOrClear) { + service.setAuths(controller, setAuthReqBuilder.build(), rpcCallback); + } else { + service.clearAuths(controller, setAuthReqBuilder.build(), rpcCallback); + } + return rpcCallback.get(); + } + }; + Map result = ht.coprocessorService( + VisibilityLabelsService.class, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, + callable); + return result.values().iterator().next(); // There will be exactly one region for labels + // table and so one entry in result Map. + } finally { + if (ht != null) { + ht.close(); + } + } + } +} Index: hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java (working copy) @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Private +public final class VisibilityConstants { + + /** + * The string that is used as key in setting the Operation attributes for visibility labels + */ + public static final String VISIBILITY_LABELS_ATTR_KEY = "VISIBILITY"; + + /** Internal storage table for visibility labels */ + public static final TableName LABELS_TABLE_NAME = TableName.valueOf( + NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "labels"); + + /** Family for the internal storage table for visibility labels */ + public static final byte[] LABELS_TABLE_FAMILY = Bytes.toBytes("f"); + + /** Qualifier for the internal storage table for visibility labels */ + public static final byte[] LABEL_QUALIFIER = new byte[1]; + +} Index: hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java =================================================================== --- hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java (revision 0) +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java (working copy) @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; + +/** + * A simple validator that validates the labels passed + */ +@InterfaceAudience.Private +public class VisibilityLabelsValidator { + // We follow Accumulo parity for valid visibility labels. + private static final boolean[] validAuthChars = new boolean[256]; + + static { + for (int i = 0; i < 256; i++) { + validAuthChars[i] = false; + } + + for (int i = 'a'; i <= 'z'; i++) { + validAuthChars[i] = true; + } + + for (int i = 'A'; i <= 'Z'; i++) { + validAuthChars[i] = true; + } + + for (int i = '0'; i <= '9'; i++) { + validAuthChars[i] = true; + } + + validAuthChars['_'] = true; + validAuthChars['-'] = true; + validAuthChars[':'] = true; + validAuthChars['.'] = true; + validAuthChars['/'] = true; + } + + static final boolean isValidAuthChar(byte b) { + return validAuthChars[0xff & b]; + } + + static final boolean isValidLabel(byte[] label) { + for (int i = 0; i < label.length; i++) { + if (!isValidAuthChar(label[i])) { + return false; + } + } + return true; + } +} Index: hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java =================================================================== --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java (revision 1542892) +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java (working copy) @@ -24,6 +24,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.util.Pair; import com.google.common.base.Preconditions; @@ -38,7 +39,6 @@ public class StreamUtils { public static void writeRawVInt32(OutputStream output, int value) throws IOException { - assert value >= 0; while (true) { if ((value & ~0x7F) == 0) { output.write(value); @@ -118,6 +118,57 @@ return result; } + /** + * Reads a varInt value stored in an array. + * + * @param input + * Input array where the varInt is available + * @param offset + * Offset in the input array where varInt is available + * @return A pair of integers in which first value is the actual decoded varInt value and second + * value as number of bytes taken by this varInt for it's storage in the input array. + * @throws IOException + */ + public static Pair readRawVarint32(byte[] input, int offset) throws IOException { + int newOffset = offset; + byte tmp = input[newOffset++]; + if (tmp >= 0) { + return new Pair((int) tmp, newOffset - offset); + } + int result = tmp & 0x7f; + tmp = input[newOffset++]; + if (tmp >= 0) { + result |= tmp << 7; + } else { + result |= (tmp & 0x7f) << 7; + tmp = input[newOffset++]; + if (tmp >= 0) { + result |= tmp << 14; + } else { + result |= (tmp & 0x7f) << 14; + tmp = input[newOffset++]; + if (tmp >= 0) { + result |= tmp << 21; + } else { + result |= (tmp & 0x7f) << 21; + tmp = input[newOffset++]; + result |= tmp << 28; + if (tmp < 0) { + // Discard upper 32 bits. + for (int i = 0; i < 5; i++) { + tmp = input[newOffset++]; + if (tmp >= 0) { + return new Pair(result, newOffset - offset); + } + } + throw new IOException("Malformed varint"); + } + } + } + } + return new Pair(result, newOffset - offset); + } + public static short toShort(byte hi, byte lo) { short s = (short) (((hi & 0xFF) << 8) | (lo & 0xFF)); Preconditions.checkArgument(s >= 0); Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (revision 1542892) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (working copy) @@ -8,6 +8,1075 @@ public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + public interface AuthorizationsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string label = 1; + /** + * repeated string label = 1; + */ + java.util.List + getLabelList(); + /** + * repeated string label = 1; + */ + int getLabelCount(); + /** + * repeated string label = 1; + */ + java.lang.String getLabel(int index); + /** + * repeated string label = 1; + */ + com.google.protobuf.ByteString + getLabelBytes(int index); + } + /** + * Protobuf type {@code Authorizations} + * + *
+   **
+   * The protocol buffer version of Authorizations.
+   * 
+ */ + public static final class Authorizations extends + com.google.protobuf.GeneratedMessage + implements AuthorizationsOrBuilder { + // Use Authorizations.newBuilder() to construct. + private Authorizations(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Authorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Authorizations defaultInstance; + public static Authorizations getDefaultInstance() { + return defaultInstance; + } + + public Authorizations getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Authorizations( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + label_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + label_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + label_ = new com.google.protobuf.UnmodifiableLazyStringList(label_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Authorizations parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Authorizations(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated string label = 1; + public static final int LABEL_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList label_; + /** + * repeated string label = 1; + */ + public java.util.List + getLabelList() { + return label_; + } + /** + * repeated string label = 1; + */ + public int getLabelCount() { + return label_.size(); + } + /** + * repeated string label = 1; + */ + public java.lang.String getLabel(int index) { + return label_.get(index); + } + /** + * repeated string label = 1; + */ + public com.google.protobuf.ByteString + getLabelBytes(int index) { + return label_.getByteString(index); + } + + private void initFields() { + label_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < label_.size(); i++) { + output.writeBytes(1, label_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < label_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(label_.getByteString(i)); + } + size += dataSize; + size += 1 * getLabelList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) obj; + + boolean result = true; + result = result && getLabelList() + .equals(other.getLabelList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getLabelCount() > 0) { + hash = (37 * hash) + LABEL_FIELD_NUMBER; + hash = (53 * hash) + getLabelList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code Authorizations} + * + *
+     **
+     * The protocol buffer version of Authorizations.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.AuthorizationsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + label_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + label_ = new com.google.protobuf.UnmodifiableLazyStringList( + label_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.label_ = label_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance()) return this; + if (!other.label_.isEmpty()) { + if (label_.isEmpty()) { + label_ = other.label_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureLabelIsMutable(); + label_.addAll(other.label_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated string label = 1; + private com.google.protobuf.LazyStringList label_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureLabelIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + label_ = new com.google.protobuf.LazyStringArrayList(label_); + bitField0_ |= 0x00000001; + } + } + /** + * repeated string label = 1; + */ + public java.util.List + getLabelList() { + return java.util.Collections.unmodifiableList(label_); + } + /** + * repeated string label = 1; + */ + public int getLabelCount() { + return label_.size(); + } + /** + * repeated string label = 1; + */ + public java.lang.String getLabel(int index) { + return label_.get(index); + } + /** + * repeated string label = 1; + */ + public com.google.protobuf.ByteString + getLabelBytes(int index) { + return label_.getByteString(index); + } + /** + * repeated string label = 1; + */ + public Builder setLabel( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelIsMutable(); + label_.set(index, value); + onChanged(); + return this; + } + /** + * repeated string label = 1; + */ + public Builder addLabel( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelIsMutable(); + label_.add(value); + onChanged(); + return this; + } + /** + * repeated string label = 1; + */ + public Builder addAllLabel( + java.lang.Iterable values) { + ensureLabelIsMutable(); + super.addAll(values, label_); + onChanged(); + return this; + } + /** + * repeated string label = 1; + */ + public Builder clearLabel() { + label_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * repeated string label = 1; + */ + public Builder addLabelBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelIsMutable(); + label_.add(value); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Authorizations) + } + + static { + defaultInstance = new Authorizations(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Authorizations) + } + + public interface CellVisibilityOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string expression = 1; + /** + * required string expression = 1; + */ + boolean hasExpression(); + /** + * required string expression = 1; + */ + java.lang.String getExpression(); + /** + * required string expression = 1; + */ + com.google.protobuf.ByteString + getExpressionBytes(); + } + /** + * Protobuf type {@code CellVisibility} + * + *
+   **
+   * The protocol buffer version of CellVisibility.
+   * 
+ */ + public static final class CellVisibility extends + com.google.protobuf.GeneratedMessage + implements CellVisibilityOrBuilder { + // Use CellVisibility.newBuilder() to construct. + private CellVisibility(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CellVisibility(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CellVisibility defaultInstance; + public static CellVisibility getDefaultInstance() { + return defaultInstance; + } + + public CellVisibility getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CellVisibility( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + expression_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CellVisibility parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CellVisibility(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string expression = 1; + public static final int EXPRESSION_FIELD_NUMBER = 1; + private java.lang.Object expression_; + /** + * required string expression = 1; + */ + public boolean hasExpression() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string expression = 1; + */ + public java.lang.String getExpression() { + java.lang.Object ref = expression_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + expression_ = s; + } + return s; + } + } + /** + * required string expression = 1; + */ + public com.google.protobuf.ByteString + getExpressionBytes() { + java.lang.Object ref = expression_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + expression_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + expression_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasExpression()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getExpressionBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getExpressionBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) obj; + + boolean result = true; + result = result && (hasExpression() == other.hasExpression()); + if (hasExpression()) { + result = result && getExpression() + .equals(other.getExpression()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasExpression()) { + hash = (37 * hash) + EXPRESSION_FIELD_NUMBER; + hash = (53 * hash) + getExpression().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code CellVisibility} + * + *
+     **
+     * The protocol buffer version of CellVisibility.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibilityOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + expression_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.expression_ = expression_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance()) return this; + if (other.hasExpression()) { + bitField0_ |= 0x00000001; + expression_ = other.expression_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasExpression()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string expression = 1; + private java.lang.Object expression_ = ""; + /** + * required string expression = 1; + */ + public boolean hasExpression() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string expression = 1; + */ + public java.lang.String getExpression() { + java.lang.Object ref = expression_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + expression_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string expression = 1; + */ + public com.google.protobuf.ByteString + getExpressionBytes() { + java.lang.Object ref = expression_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + expression_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string expression = 1; + */ + public Builder setExpression( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + expression_ = value; + onChanged(); + return this; + } + /** + * required string expression = 1; + */ + public Builder clearExpression() { + bitField0_ = (bitField0_ & ~0x00000001); + expression_ = getDefaultInstance().getExpression(); + onChanged(); + return this; + } + /** + * required string expression = 1; + */ + public Builder setExpressionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + expression_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CellVisibility) + } + + static { + defaultInstance = new CellVisibility(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CellVisibility) + } + public interface ColumnOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -27744,6 +28813,16 @@ } private static com.google.protobuf.Descriptors.Descriptor + internal_static_Authorizations_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Authorizations_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CellVisibility_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CellVisibility_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_Column_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -27883,145 +28962,158 @@ static { java.lang.String[] descriptorData = { "\n\014Client.proto\032\013HBase.proto\032\014Filter.prot" + - "o\032\nCell.proto\032\020Comparator.proto\"+\n\006Colum" + - "n\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"\251\002\n" + - "\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007.Colu" + - "mn\022!\n\tattribute\030\003 \003(\0132\016.NameBytesPair\022\027\n" + - "\006filter\030\004 \001(\0132\007.Filter\022\036\n\ntime_range\030\005 \001" + - "(\0132\n.TimeRange\022\027\n\014max_versions\030\006 \001(\r:\0011\022" + - "\032\n\014cache_blocks\030\007 \001(\010:\004true\022\023\n\013store_lim" + - "it\030\010 \001(\r\022\024\n\014store_offset\030\t \001(\r\022\035\n\016existe" + - "nce_only\030\n \001(\010:\005false\022!\n\022closest_row_bef", - "ore\030\013 \001(\010:\005false\"L\n\006Result\022\023\n\004cell\030\001 \003(\013" + - "2\005.Cell\022\035\n\025associated_cell_count\030\002 \001(\005\022\016" + - "\n\006exists\030\003 \001(\010\"A\n\nGetRequest\022 \n\006region\030\001" + - " \002(\0132\020.RegionSpecifier\022\021\n\003get\030\002 \002(\0132\004.Ge" + - "t\"&\n\013GetResponse\022\027\n\006result\030\001 \001(\0132\007.Resul" + - "t\"\200\001\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002" + - " \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\"\n\014compare_type\030" + - "\004 \002(\0162\014.CompareType\022\037\n\ncomparator\030\005 \002(\0132" + - "\013.Comparator\"\265\006\n\rMutationProto\022\013\n\003row\030\001 " + - "\001(\014\0220\n\013mutate_type\030\002 \001(\0162\033.MutationProto", - ".MutationType\0220\n\014column_value\030\003 \003(\0132\032.Mu" + - "tationProto.ColumnValue\022\021\n\ttimestamp\030\004 \001" + - "(\004\022!\n\tattribute\030\005 \003(\0132\016.NameBytesPair\022:\n" + - "\ndurability\030\006 \001(\0162\031.MutationProto.Durabi" + - "lity:\013USE_DEFAULT\022\036\n\ntime_range\030\007 \001(\0132\n." + - "TimeRange\022\035\n\025associated_cell_count\030\010 \001(\005" + - "\022\r\n\005nonce\030\t \001(\004\032\347\001\n\013ColumnValue\022\016\n\006famil" + - "y\030\001 \002(\014\022B\n\017qualifier_value\030\002 \003(\0132).Mutat" + - "ionProto.ColumnValue.QualifierValue\032\203\001\n\016" + - "QualifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005val", - "ue\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\022.\n\013delete_ty" + - "pe\030\004 \001(\0162\031.MutationProto.DeleteType\022\014\n\004t" + - "ags\030\005 \001(\014\"W\n\nDurability\022\017\n\013USE_DEFAULT\020\000" + - "\022\014\n\010SKIP_WAL\020\001\022\r\n\tASYNC_WAL\020\002\022\014\n\010SYNC_WA" + - "L\020\003\022\r\n\tFSYNC_WAL\020\004\">\n\014MutationType\022\n\n\006AP" + - "PEND\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DELETE" + - "\020\003\"p\n\nDeleteType\022\026\n\022DELETE_ONE_VERSION\020\000" + - "\022\034\n\030DELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rDELETE" + - "_FAMILY\020\002\022\031\n\025DELETE_FAMILY_VERSION\020\003\"\207\001\n" + - "\rMutateRequest\022 \n\006region\030\001 \002(\0132\020.RegionS", - "pecifier\022 \n\010mutation\030\002 \002(\0132\016.MutationPro" + - "to\022\035\n\tcondition\030\003 \001(\0132\n.Condition\022\023\n\013non" + - "ce_group\030\004 \001(\004\"<\n\016MutateResponse\022\027\n\006resu" + - "lt\030\001 \001(\0132\007.Result\022\021\n\tprocessed\030\002 \001(\010\"\344\002\n" + - "\004Scan\022\027\n\006column\030\001 \003(\0132\007.Column\022!\n\tattrib" + - "ute\030\002 \003(\0132\016.NameBytesPair\022\021\n\tstart_row\030\003" + - " \001(\014\022\020\n\010stop_row\030\004 \001(\014\022\027\n\006filter\030\005 \001(\0132\007" + - ".Filter\022\036\n\ntime_range\030\006 \001(\0132\n.TimeRange\022" + - "\027\n\014max_versions\030\007 \001(\r:\0011\022\032\n\014cache_blocks" + - "\030\010 \001(\010:\004true\022\022\n\nbatch_size\030\t \001(\r\022\027\n\017max_", - "result_size\030\n \001(\004\022\023\n\013store_limit\030\013 \001(\r\022\024" + - "\n\014store_offset\030\014 \001(\r\022&\n\036load_column_fami" + - "lies_on_demand\030\r \001(\010\022\r\n\005small\030\016 \001(\010\"\236\001\n\013" + - "ScanRequest\022 \n\006region\030\001 \001(\0132\020.RegionSpec" + - "ifier\022\023\n\004scan\030\002 \001(\0132\005.Scan\022\022\n\nscanner_id" + - "\030\003 \001(\004\022\026\n\016number_of_rows\030\004 \001(\r\022\025\n\rclose_" + - "scanner\030\005 \001(\010\022\025\n\rnext_call_seq\030\006 \001(\004\"y\n\014" + - "ScanResponse\022\030\n\020cells_per_result\030\001 \003(\r\022\022" + - "\n\nscanner_id\030\002 \001(\004\022\024\n\014more_results\030\003 \001(\010" + - "\022\013\n\003ttl\030\004 \001(\r\022\030\n\007results\030\005 \003(\0132\007.Result\"", - "\263\001\n\024BulkLoadHFileRequest\022 \n\006region\030\001 \002(\013" + - "2\020.RegionSpecifier\0225\n\013family_path\030\002 \003(\0132" + - " .BulkLoadHFileRequest.FamilyPath\022\026\n\016ass" + - "ign_seq_num\030\003 \001(\010\032*\n\nFamilyPath\022\016\n\006famil" + - "y\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoadHFileRe" + - "sponse\022\016\n\006loaded\030\001 \002(\010\"a\n\026CoprocessorSer" + - "viceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014service_name\030\002 " + - "\002(\t\022\023\n\013method_name\030\003 \002(\t\022\017\n\007request\030\004 \002(" + - "\014\"d\n\031CoprocessorServiceRequest\022 \n\006region" + - "\030\001 \002(\0132\020.RegionSpecifier\022%\n\004call\030\002 \002(\0132\027", - ".CoprocessorServiceCall\"]\n\032CoprocessorSe" + - "rviceResponse\022 \n\006region\030\001 \002(\0132\020.RegionSp" + - "ecifier\022\035\n\005value\030\002 \002(\0132\016.NameBytesPair\"L" + - "\n\006Action\022\r\n\005index\030\001 \001(\r\022 \n\010mutation\030\002 \001(" + - "\0132\016.MutationProto\022\021\n\003get\030\003 \001(\0132\004.Get\"Y\n\014" + - "RegionAction\022 \n\006region\030\001 \002(\0132\020.RegionSpe" + - "cifier\022\016\n\006atomic\030\002 \001(\010\022\027\n\006action\030\003 \003(\0132\007" + - ".Action\"^\n\021ResultOrException\022\r\n\005index\030\001 " + - "\001(\r\022\027\n\006result\030\002 \001(\0132\007.Result\022!\n\texceptio" + - "n\030\003 \001(\0132\016.NameBytesPair\"f\n\022RegionActionR", - "esult\022-\n\021resultOrException\030\001 \003(\0132\022.Resul" + - "tOrException\022!\n\texception\030\002 \001(\0132\016.NameBy" + - "tesPair\"G\n\014MultiRequest\022#\n\014regionAction\030" + - "\001 \003(\0132\r.RegionAction\022\022\n\nnonceGroup\030\002 \001(\004" + - "\"@\n\rMultiResponse\022/\n\022regionActionResult\030" + - "\001 \003(\0132\023.RegionActionResult2\261\002\n\rClientSer" + - "vice\022 \n\003Get\022\013.GetRequest\032\014.GetResponse\022)" + - "\n\006Mutate\022\016.MutateRequest\032\017.MutateRespons" + - "e\022#\n\004Scan\022\014.ScanRequest\032\r.ScanResponse\022>" + - "\n\rBulkLoadHFile\022\025.BulkLoadHFileRequest\032\026", - ".BulkLoadHFileResponse\022F\n\013ExecService\022\032." + - "CoprocessorServiceRequest\032\033.CoprocessorS" + - "erviceResponse\022&\n\005Multi\022\r.MultiRequest\032\016" + - ".MultiResponseBB\n*org.apache.hadoop.hbas" + - "e.protobuf.generatedB\014ClientProtosH\001\210\001\001\240" + - "\001\001" + "o\032\nCell.proto\032\020Comparator.proto\"\037\n\016Autho" + + "rizations\022\r\n\005label\030\001 \003(\t\"$\n\016CellVisibili" + + "ty\022\022\n\nexpression\030\001 \002(\t\"+\n\006Column\022\016\n\006fami" + + "ly\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"\251\002\n\003Get\022\013\n\003r" + + "ow\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007.Column\022!\n\tatt" + + "ribute\030\003 \003(\0132\016.NameBytesPair\022\027\n\006filter\030\004" + + " \001(\0132\007.Filter\022\036\n\ntime_range\030\005 \001(\0132\n.Time" + + "Range\022\027\n\014max_versions\030\006 \001(\r:\0011\022\032\n\014cache_" + + "blocks\030\007 \001(\010:\004true\022\023\n\013store_limit\030\010 \001(\r\022", + "\024\n\014store_offset\030\t \001(\r\022\035\n\016existence_only\030" + + "\n \001(\010:\005false\022!\n\022closest_row_before\030\013 \001(\010" + + ":\005false\"L\n\006Result\022\023\n\004cell\030\001 \003(\0132\005.Cell\022\035" + + "\n\025associated_cell_count\030\002 \001(\005\022\016\n\006exists\030" + + "\003 \001(\010\"A\n\nGetRequest\022 \n\006region\030\001 \002(\0132\020.Re" + + "gionSpecifier\022\021\n\003get\030\002 \002(\0132\004.Get\"&\n\013GetR" + + "esponse\022\027\n\006result\030\001 \001(\0132\007.Result\"\200\001\n\tCon" + + "dition\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tq" + + "ualifier\030\003 \002(\014\022\"\n\014compare_type\030\004 \002(\0162\014.C" + + "ompareType\022\037\n\ncomparator\030\005 \002(\0132\013.Compara", + "tor\"\265\006\n\rMutationProto\022\013\n\003row\030\001 \001(\014\0220\n\013mu" + + "tate_type\030\002 \001(\0162\033.MutationProto.Mutation" + + "Type\0220\n\014column_value\030\003 \003(\0132\032.MutationPro" + + "to.ColumnValue\022\021\n\ttimestamp\030\004 \001(\004\022!\n\tatt" + + "ribute\030\005 \003(\0132\016.NameBytesPair\022:\n\ndurabili" + + "ty\030\006 \001(\0162\031.MutationProto.Durability:\013USE" + + "_DEFAULT\022\036\n\ntime_range\030\007 \001(\0132\n.TimeRange" + + "\022\035\n\025associated_cell_count\030\010 \001(\005\022\r\n\005nonce" + + "\030\t \001(\004\032\347\001\n\013ColumnValue\022\016\n\006family\030\001 \002(\014\022B" + + "\n\017qualifier_value\030\002 \003(\0132).MutationProto.", + "ColumnValue.QualifierValue\032\203\001\n\016Qualifier" + + "Value\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value\030\002 \001(\014\022" + + "\021\n\ttimestamp\030\003 \001(\004\022.\n\013delete_type\030\004 \001(\0162" + + "\031.MutationProto.DeleteType\022\014\n\004tags\030\005 \001(\014" + + "\"W\n\nDurability\022\017\n\013USE_DEFAULT\020\000\022\014\n\010SKIP_" + + "WAL\020\001\022\r\n\tASYNC_WAL\020\002\022\014\n\010SYNC_WAL\020\003\022\r\n\tFS" + + "YNC_WAL\020\004\">\n\014MutationType\022\n\n\006APPEND\020\000\022\r\n" + + "\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\"p\n\nDel" + + "eteType\022\026\n\022DELETE_ONE_VERSION\020\000\022\034\n\030DELET" + + "E_MULTIPLE_VERSIONS\020\001\022\021\n\rDELETE_FAMILY\020\002", + "\022\031\n\025DELETE_FAMILY_VERSION\020\003\"\207\001\n\rMutateRe" + + "quest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022" + + " \n\010mutation\030\002 \002(\0132\016.MutationProto\022\035\n\tcon" + + "dition\030\003 \001(\0132\n.Condition\022\023\n\013nonce_group\030" + + "\004 \001(\004\"<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132" + + "\007.Result\022\021\n\tprocessed\030\002 \001(\010\"\344\002\n\004Scan\022\027\n\006" + + "column\030\001 \003(\0132\007.Column\022!\n\tattribute\030\002 \003(\013" + + "2\016.NameBytesPair\022\021\n\tstart_row\030\003 \001(\014\022\020\n\010s" + + "top_row\030\004 \001(\014\022\027\n\006filter\030\005 \001(\0132\007.Filter\022\036" + + "\n\ntime_range\030\006 \001(\0132\n.TimeRange\022\027\n\014max_ve", + "rsions\030\007 \001(\r:\0011\022\032\n\014cache_blocks\030\010 \001(\010:\004t" + + "rue\022\022\n\nbatch_size\030\t \001(\r\022\027\n\017max_result_si" + + "ze\030\n \001(\004\022\023\n\013store_limit\030\013 \001(\r\022\024\n\014store_o" + + "ffset\030\014 \001(\r\022&\n\036load_column_families_on_d" + + "emand\030\r \001(\010\022\r\n\005small\030\016 \001(\010\"\236\001\n\013ScanReque" + + "st\022 \n\006region\030\001 \001(\0132\020.RegionSpecifier\022\023\n\004" + + "scan\030\002 \001(\0132\005.Scan\022\022\n\nscanner_id\030\003 \001(\004\022\026\n" + + "\016number_of_rows\030\004 \001(\r\022\025\n\rclose_scanner\030\005" + + " \001(\010\022\025\n\rnext_call_seq\030\006 \001(\004\"y\n\014ScanRespo" + + "nse\022\030\n\020cells_per_result\030\001 \003(\r\022\022\n\nscanner", + "_id\030\002 \001(\004\022\024\n\014more_results\030\003 \001(\010\022\013\n\003ttl\030\004" + + " \001(\r\022\030\n\007results\030\005 \003(\0132\007.Result\"\263\001\n\024BulkL" + + "oadHFileRequest\022 \n\006region\030\001 \002(\0132\020.Region" + + "Specifier\0225\n\013family_path\030\002 \003(\0132 .BulkLoa" + + "dHFileRequest.FamilyPath\022\026\n\016assign_seq_n" + + "um\030\003 \001(\010\032*\n\nFamilyPath\022\016\n\006family\030\001 \002(\014\022\014" + + "\n\004path\030\002 \002(\t\"\'\n\025BulkLoadHFileResponse\022\016\n" + + "\006loaded\030\001 \002(\010\"a\n\026CoprocessorServiceCall\022" + + "\013\n\003row\030\001 \002(\014\022\024\n\014service_name\030\002 \002(\t\022\023\n\013me" + + "thod_name\030\003 \002(\t\022\017\n\007request\030\004 \002(\014\"d\n\031Copr", + "ocessorServiceRequest\022 \n\006region\030\001 \002(\0132\020." + + "RegionSpecifier\022%\n\004call\030\002 \002(\0132\027.Coproces" + + "sorServiceCall\"]\n\032CoprocessorServiceResp" + + "onse\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\035" + + "\n\005value\030\002 \002(\0132\016.NameBytesPair\"L\n\006Action\022" + + "\r\n\005index\030\001 \001(\r\022 \n\010mutation\030\002 \001(\0132\016.Mutat" + + "ionProto\022\021\n\003get\030\003 \001(\0132\004.Get\"Y\n\014RegionAct" + + "ion\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\016\n" + + "\006atomic\030\002 \001(\010\022\027\n\006action\030\003 \003(\0132\007.Action\"^" + + "\n\021ResultOrException\022\r\n\005index\030\001 \001(\r\022\027\n\006re", + "sult\030\002 \001(\0132\007.Result\022!\n\texception\030\003 \001(\0132\016" + + ".NameBytesPair\"f\n\022RegionActionResult\022-\n\021" + + "resultOrException\030\001 \003(\0132\022.ResultOrExcept" + + "ion\022!\n\texception\030\002 \001(\0132\016.NameBytesPair\"G" + + "\n\014MultiRequest\022#\n\014regionAction\030\001 \003(\0132\r.R" + + "egionAction\022\022\n\nnonceGroup\030\002 \001(\004\"@\n\rMulti" + + "Response\022/\n\022regionActionResult\030\001 \003(\0132\023.R" + + "egionActionResult2\261\002\n\rClientService\022 \n\003G" + + "et\022\013.GetRequest\032\014.GetResponse\022)\n\006Mutate\022" + + "\016.MutateRequest\032\017.MutateResponse\022#\n\004Scan", + "\022\014.ScanRequest\032\r.ScanResponse\022>\n\rBulkLoa" + + "dHFile\022\025.BulkLoadHFileRequest\032\026.BulkLoad" + + "HFileResponse\022F\n\013ExecService\022\032.Coprocess" + + "orServiceRequest\032\033.CoprocessorServiceRes" + + "ponse\022&\n\005Multi\022\r.MultiRequest\032\016.MultiRes" + + "ponseBB\n*org.apache.hadoop.hbase.protobu" + + "f.generatedB\014ClientProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; + internal_static_Authorizations_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Authorizations_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Authorizations_descriptor, + new java.lang.String[] { "Label", }); + internal_static_CellVisibility_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_CellVisibility_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CellVisibility_descriptor, + new java.lang.String[] { "Expression", }); internal_static_Column_descriptor = - getDescriptor().getMessageTypes().get(0); + getDescriptor().getMessageTypes().get(2); internal_static_Column_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Column_descriptor, new java.lang.String[] { "Family", "Qualifier", }); internal_static_Get_descriptor = - getDescriptor().getMessageTypes().get(1); + getDescriptor().getMessageTypes().get(3); internal_static_Get_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Get_descriptor, new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "ClosestRowBefore", }); internal_static_Result_descriptor = - getDescriptor().getMessageTypes().get(2); + getDescriptor().getMessageTypes().get(4); internal_static_Result_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Result_descriptor, new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", }); internal_static_GetRequest_descriptor = - getDescriptor().getMessageTypes().get(3); + getDescriptor().getMessageTypes().get(5); internal_static_GetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetRequest_descriptor, new java.lang.String[] { "Region", "Get", }); internal_static_GetResponse_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(6); internal_static_GetResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetResponse_descriptor, new java.lang.String[] { "Result", }); internal_static_Condition_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(7); internal_static_Condition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Condition_descriptor, new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", }); internal_static_MutationProto_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(8); internal_static_MutationProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutationProto_descriptor, @@ -28039,37 +29131,37 @@ internal_static_MutationProto_ColumnValue_QualifierValue_descriptor, new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", }); internal_static_MutateRequest_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(9); internal_static_MutateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutateRequest_descriptor, new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", }); internal_static_MutateResponse_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(10); internal_static_MutateResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MutateResponse_descriptor, new java.lang.String[] { "Result", "Processed", }); internal_static_Scan_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(11); internal_static_Scan_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Scan_descriptor, new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", }); internal_static_ScanRequest_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(12); internal_static_ScanRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ScanRequest_descriptor, new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", }); internal_static_ScanResponse_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(13); internal_static_ScanResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ScanResponse_descriptor, new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", }); internal_static_BulkLoadHFileRequest_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(14); internal_static_BulkLoadHFileRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileRequest_descriptor, @@ -28081,61 +29173,61 @@ internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, new java.lang.String[] { "Family", "Path", }); internal_static_BulkLoadHFileResponse_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(15); internal_static_BulkLoadHFileResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_BulkLoadHFileResponse_descriptor, new java.lang.String[] { "Loaded", }); internal_static_CoprocessorServiceCall_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(16); internal_static_CoprocessorServiceCall_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CoprocessorServiceCall_descriptor, new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", }); internal_static_CoprocessorServiceRequest_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(17); internal_static_CoprocessorServiceRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CoprocessorServiceRequest_descriptor, new java.lang.String[] { "Region", "Call", }); internal_static_CoprocessorServiceResponse_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(18); internal_static_CoprocessorServiceResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_CoprocessorServiceResponse_descriptor, new java.lang.String[] { "Region", "Value", }); internal_static_Action_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(19); internal_static_Action_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Action_descriptor, new java.lang.String[] { "Index", "Mutation", "Get", }); internal_static_RegionAction_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(20); internal_static_RegionAction_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionAction_descriptor, new java.lang.String[] { "Region", "Atomic", "Action", }); internal_static_ResultOrException_descriptor = - getDescriptor().getMessageTypes().get(19); + getDescriptor().getMessageTypes().get(21); internal_static_ResultOrException_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ResultOrException_descriptor, new java.lang.String[] { "Index", "Result", "Exception", }); internal_static_RegionActionResult_descriptor = - getDescriptor().getMessageTypes().get(20); + getDescriptor().getMessageTypes().get(22); internal_static_RegionActionResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RegionActionResult_descriptor, new java.lang.String[] { "ResultOrException", "Exception", }); internal_static_MultiRequest_descriptor = - getDescriptor().getMessageTypes().get(21); + getDescriptor().getMessageTypes().get(23); internal_static_MultiRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiRequest_descriptor, new java.lang.String[] { "RegionAction", "NonceGroup", }); internal_static_MultiResponse_descriptor = - getDescriptor().getMessageTypes().get(22); + getDescriptor().getMessageTypes().get(24); internal_static_MultiResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiResponse_descriptor, Index: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java =================================================================== --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java (revision 0) +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java (working copy) @@ -0,0 +1,5543 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: VisibilityLabels.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class VisibilityLabelsProtos { + private VisibilityLabelsProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface VisibilityLabelsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .VisibilityLabel visLabel = 1; + /** + * repeated .VisibilityLabel visLabel = 1; + */ + java.util.List + getVisLabelList(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + int getVisLabelCount(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + java.util.List + getVisLabelOrBuilderList(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index); + } + /** + * Protobuf type {@code VisibilityLabelsRequest} + */ + public static final class VisibilityLabelsRequest extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelsRequestOrBuilder { + // Use VisibilityLabelsRequest.newBuilder() to construct. + private VisibilityLabelsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabelsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabelsRequest defaultInstance; + public static VisibilityLabelsRequest getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabelsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabelsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + visLabel_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = java.util.Collections.unmodifiableList(visLabel_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabelsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabelsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .VisibilityLabel visLabel = 1; + public static final int VISLABEL_FIELD_NUMBER = 1; + private java.util.List visLabel_; + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List getVisLabelList() { + return visLabel_; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelOrBuilderList() { + return visLabel_; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public int getVisLabelCount() { + return visLabel_.size(); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index) { + return visLabel_.get(index); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index) { + return visLabel_.get(index); + } + + private void initFields() { + visLabel_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getVisLabelCount(); i++) { + if (!getVisLabel(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < visLabel_.size(); i++) { + output.writeMessage(1, visLabel_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < visLabel_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, visLabel_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) obj; + + boolean result = true; + result = result && getVisLabelList() + .equals(other.getVisLabelList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getVisLabelCount() > 0) { + hash = (37 * hash) + VISLABEL_FIELD_NUMBER; + hash = (53 * hash) + getVisLabelList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabelsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getVisLabelFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (visLabelBuilder_ == null) { + visLabel_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + visLabelBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest(this); + int from_bitField0_ = bitField0_; + if (visLabelBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = java.util.Collections.unmodifiableList(visLabel_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.visLabel_ = visLabel_; + } else { + result.visLabel_ = visLabelBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance()) return this; + if (visLabelBuilder_ == null) { + if (!other.visLabel_.isEmpty()) { + if (visLabel_.isEmpty()) { + visLabel_ = other.visLabel_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureVisLabelIsMutable(); + visLabel_.addAll(other.visLabel_); + } + onChanged(); + } + } else { + if (!other.visLabel_.isEmpty()) { + if (visLabelBuilder_.isEmpty()) { + visLabelBuilder_.dispose(); + visLabelBuilder_ = null; + visLabel_ = other.visLabel_; + bitField0_ = (bitField0_ & ~0x00000001); + visLabelBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getVisLabelFieldBuilder() : null; + } else { + visLabelBuilder_.addAllMessages(other.visLabel_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getVisLabelCount(); i++) { + if (!getVisLabel(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .VisibilityLabel visLabel = 1; + private java.util.List visLabel_ = + java.util.Collections.emptyList(); + private void ensureVisLabelIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = new java.util.ArrayList(visLabel_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder> visLabelBuilder_; + + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List getVisLabelList() { + if (visLabelBuilder_ == null) { + return java.util.Collections.unmodifiableList(visLabel_); + } else { + return visLabelBuilder_.getMessageList(); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public int getVisLabelCount() { + if (visLabelBuilder_ == null) { + return visLabel_.size(); + } else { + return visLabelBuilder_.getCount(); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index) { + if (visLabelBuilder_ == null) { + return visLabel_.get(index); + } else { + return visLabelBuilder_.getMessage(index); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder setVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.set(index, value); + onChanged(); + } else { + visLabelBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder setVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.set(index, builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.add(value); + onChanged(); + } else { + visLabelBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.add(index, value); + onChanged(); + } else { + visLabelBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.add(builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.add(index, builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addAllVisLabel( + java.lang.Iterable values) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + super.addAll(values, visLabel_); + onChanged(); + } else { + visLabelBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder clearVisLabel() { + if (visLabelBuilder_ == null) { + visLabel_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + visLabelBuilder_.clear(); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder removeVisLabel(int index) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.remove(index); + onChanged(); + } else { + visLabelBuilder_.remove(index); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder getVisLabelBuilder( + int index) { + return getVisLabelFieldBuilder().getBuilder(index); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index) { + if (visLabelBuilder_ == null) { + return visLabel_.get(index); } else { + return visLabelBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelOrBuilderList() { + if (visLabelBuilder_ != null) { + return visLabelBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(visLabel_); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder addVisLabelBuilder() { + return getVisLabelFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder addVisLabelBuilder( + int index) { + return getVisLabelFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelBuilderList() { + return getVisLabelFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder> + getVisLabelFieldBuilder() { + if (visLabelBuilder_ == null) { + visLabelBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder>( + visLabel_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + visLabel_ = null; + } + return visLabelBuilder_; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabelsRequest) + } + + static { + defaultInstance = new VisibilityLabelsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsRequest) + } + + public interface VisibilityLabelOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes label = 1; + /** + * required bytes label = 1; + */ + boolean hasLabel(); + /** + * required bytes label = 1; + */ + com.google.protobuf.ByteString getLabel(); + + // optional uint32 ordinal = 2; + /** + * optional uint32 ordinal = 2; + */ + boolean hasOrdinal(); + /** + * optional uint32 ordinal = 2; + */ + int getOrdinal(); + } + /** + * Protobuf type {@code VisibilityLabel} + */ + public static final class VisibilityLabel extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelOrBuilder { + // Use VisibilityLabel.newBuilder() to construct. + private VisibilityLabel(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabel(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabel defaultInstance; + public static VisibilityLabel getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabel getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabel( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + label_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ordinal_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabel parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabel(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes label = 1; + public static final int LABEL_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString label_; + /** + * required bytes label = 1; + */ + public boolean hasLabel() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes label = 1; + */ + public com.google.protobuf.ByteString getLabel() { + return label_; + } + + // optional uint32 ordinal = 2; + public static final int ORDINAL_FIELD_NUMBER = 2; + private int ordinal_; + /** + * optional uint32 ordinal = 2; + */ + public boolean hasOrdinal() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint32 ordinal = 2; + */ + public int getOrdinal() { + return ordinal_; + } + + private void initFields() { + label_ = com.google.protobuf.ByteString.EMPTY; + ordinal_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLabel()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, label_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, ordinal_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, label_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, ordinal_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) obj; + + boolean result = true; + result = result && (hasLabel() == other.hasLabel()); + if (hasLabel()) { + result = result && getLabel() + .equals(other.getLabel()); + } + result = result && (hasOrdinal() == other.hasOrdinal()); + if (hasOrdinal()) { + result = result && (getOrdinal() + == other.getOrdinal()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLabel()) { + hash = (37 * hash) + LABEL_FIELD_NUMBER; + hash = (53 * hash) + getLabel().hashCode(); + } + if (hasOrdinal()) { + hash = (37 * hash) + ORDINAL_FIELD_NUMBER; + hash = (53 * hash) + getOrdinal(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabel} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + label_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + ordinal_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.label_ = label_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ordinal_ = ordinal_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()) return this; + if (other.hasLabel()) { + setLabel(other.getLabel()); + } + if (other.hasOrdinal()) { + setOrdinal(other.getOrdinal()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLabel()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes label = 1; + private com.google.protobuf.ByteString label_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes label = 1; + */ + public boolean hasLabel() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes label = 1; + */ + public com.google.protobuf.ByteString getLabel() { + return label_; + } + /** + * required bytes label = 1; + */ + public Builder setLabel(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + label_ = value; + onChanged(); + return this; + } + /** + * required bytes label = 1; + */ + public Builder clearLabel() { + bitField0_ = (bitField0_ & ~0x00000001); + label_ = getDefaultInstance().getLabel(); + onChanged(); + return this; + } + + // optional uint32 ordinal = 2; + private int ordinal_ ; + /** + * optional uint32 ordinal = 2; + */ + public boolean hasOrdinal() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint32 ordinal = 2; + */ + public int getOrdinal() { + return ordinal_; + } + /** + * optional uint32 ordinal = 2; + */ + public Builder setOrdinal(int value) { + bitField0_ |= 0x00000002; + ordinal_ = value; + onChanged(); + return this; + } + /** + * optional uint32 ordinal = 2; + */ + public Builder clearOrdinal() { + bitField0_ = (bitField0_ & ~0x00000002); + ordinal_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabel) + } + + static { + defaultInstance = new VisibilityLabel(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabel) + } + + public interface VisibilityLabelsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionActionResult result = 1; + /** + * repeated .RegionActionResult result = 1; + */ + java.util.List + getResultList(); + /** + * repeated .RegionActionResult result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index); + /** + * repeated .RegionActionResult result = 1; + */ + int getResultCount(); + /** + * repeated .RegionActionResult result = 1; + */ + java.util.List + getResultOrBuilderList(); + /** + * repeated .RegionActionResult result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index); + } + /** + * Protobuf type {@code VisibilityLabelsResponse} + */ + public static final class VisibilityLabelsResponse extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelsResponseOrBuilder { + // Use VisibilityLabelsResponse.newBuilder() to construct. + private VisibilityLabelsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabelsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabelsResponse defaultInstance; + public static VisibilityLabelsResponse getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabelsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabelsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + result_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabelsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabelsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .RegionActionResult result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List getResultList() { + return result_; + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultOrBuilderList() { + return result_; + } + /** + * repeated .RegionActionResult result = 1; + */ + public int getResultCount() { + return result_.size(); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index) { + return result_.get(index); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabelsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse(this); + int from_bitField0_ = bitField0_; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .RegionActionResult result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> resultBuilder_; + + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabelsResponse) + } + + static { + defaultInstance = new VisibilityLabelsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsResponse) + } + + public interface SetAuthsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + + // repeated bytes auth = 2; + /** + * repeated bytes auth = 2; + */ + java.util.List getAuthList(); + /** + * repeated bytes auth = 2; + */ + int getAuthCount(); + /** + * repeated bytes auth = 2; + */ + com.google.protobuf.ByteString getAuth(int index); + } + /** + * Protobuf type {@code SetAuthsRequest} + */ + public static final class SetAuthsRequest extends + com.google.protobuf.GeneratedMessage + implements SetAuthsRequestOrBuilder { + // Use SetAuthsRequest.newBuilder() to construct. + private SetAuthsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SetAuthsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SetAuthsRequest defaultInstance; + public static SetAuthsRequest getDefaultInstance() { + return defaultInstance; + } + + public SetAuthsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetAuthsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + auth_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetAuthsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetAuthsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // repeated bytes auth = 2; + public static final int AUTH_FIELD_NUMBER = 2; + private java.util.List auth_; + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return auth_; + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + auth_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + for (int i = 0; i < auth_.size(); i++) { + output.writeBytes(2, auth_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + { + int dataSize = 0; + for (int i = 0; i < auth_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(auth_.get(i)); + } + size += dataSize; + size += 1 * getAuthList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && getAuthList() + .equals(other.getAuthList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (getAuthCount() > 0) { + hash = (37 * hash) + AUTH_FIELD_NUMBER; + hash = (53 * hash) + getAuthList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code SetAuthsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.auth_ = auth_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (!other.auth_.isEmpty()) { + if (auth_.isEmpty()) { + auth_ = other.auth_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAuthIsMutable(); + auth_.addAll(other.auth_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // repeated bytes auth = 2; + private java.util.List auth_ = java.util.Collections.emptyList(); + private void ensureAuthIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(auth_); + bitField0_ |= 0x00000002; + } + } + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return java.util.Collections.unmodifiableList(auth_); + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + /** + * repeated bytes auth = 2; + */ + public Builder setAuth( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.set(index, value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAuth(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.add(value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAllAuth( + java.lang.Iterable values) { + ensureAuthIsMutable(); + super.addAll(values, auth_); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder clearAuth() { + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SetAuthsRequest) + } + + static { + defaultInstance = new SetAuthsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SetAuthsRequest) + } + + public interface UserAuthorizationsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + + // repeated uint32 auth = 2; + /** + * repeated uint32 auth = 2; + */ + java.util.List getAuthList(); + /** + * repeated uint32 auth = 2; + */ + int getAuthCount(); + /** + * repeated uint32 auth = 2; + */ + int getAuth(int index); + } + /** + * Protobuf type {@code UserAuthorizations} + */ + public static final class UserAuthorizations extends + com.google.protobuf.GeneratedMessage + implements UserAuthorizationsOrBuilder { + // Use UserAuthorizations.newBuilder() to construct. + private UserAuthorizations(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private UserAuthorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final UserAuthorizations defaultInstance; + public static UserAuthorizations getDefaultInstance() { + return defaultInstance; + } + + public UserAuthorizations getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserAuthorizations( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 16: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + auth_.add(input.readUInt32()); + break; + } + case 18: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + while (input.getBytesUntilLimit() > 0) { + auth_.add(input.readUInt32()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserAuthorizations parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserAuthorizations(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // repeated uint32 auth = 2; + public static final int AUTH_FIELD_NUMBER = 2; + private java.util.List auth_; + /** + * repeated uint32 auth = 2; + */ + public java.util.List + getAuthList() { + return auth_; + } + /** + * repeated uint32 auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated uint32 auth = 2; + */ + public int getAuth(int index) { + return auth_.get(index); + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + auth_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + for (int i = 0; i < auth_.size(); i++) { + output.writeUInt32(2, auth_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + { + int dataSize = 0; + for (int i = 0; i < auth_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeUInt32SizeNoTag(auth_.get(i)); + } + size += dataSize; + size += 1 * getAuthList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && getAuthList() + .equals(other.getAuthList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (getAuthCount() > 0) { + hash = (37 * hash) + AUTH_FIELD_NUMBER; + hash = (53 * hash) + getAuthList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code UserAuthorizations} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.auth_ = auth_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (!other.auth_.isEmpty()) { + if (auth_.isEmpty()) { + auth_ = other.auth_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAuthIsMutable(); + auth_.addAll(other.auth_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // repeated uint32 auth = 2; + private java.util.List auth_ = java.util.Collections.emptyList(); + private void ensureAuthIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(auth_); + bitField0_ |= 0x00000002; + } + } + /** + * repeated uint32 auth = 2; + */ + public java.util.List + getAuthList() { + return java.util.Collections.unmodifiableList(auth_); + } + /** + * repeated uint32 auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated uint32 auth = 2; + */ + public int getAuth(int index) { + return auth_.get(index); + } + /** + * repeated uint32 auth = 2; + */ + public Builder setAuth( + int index, int value) { + ensureAuthIsMutable(); + auth_.set(index, value); + onChanged(); + return this; + } + /** + * repeated uint32 auth = 2; + */ + public Builder addAuth(int value) { + ensureAuthIsMutable(); + auth_.add(value); + onChanged(); + return this; + } + /** + * repeated uint32 auth = 2; + */ + public Builder addAllAuth( + java.lang.Iterable values) { + ensureAuthIsMutable(); + super.addAll(values, auth_); + onChanged(); + return this; + } + /** + * repeated uint32 auth = 2; + */ + public Builder clearAuth() { + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UserAuthorizations) + } + + static { + defaultInstance = new UserAuthorizations(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserAuthorizations) + } + + public interface MultiUserAuthorizationsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .UserAuthorizations userAuths = 1; + /** + * repeated .UserAuthorizations userAuths = 1; + */ + java.util.List + getUserAuthsList(); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getUserAuths(int index); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + int getUserAuthsCount(); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + java.util.List + getUserAuthsOrBuilderList(); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder getUserAuthsOrBuilder( + int index); + } + /** + * Protobuf type {@code MultiUserAuthorizations} + */ + public static final class MultiUserAuthorizations extends + com.google.protobuf.GeneratedMessage + implements MultiUserAuthorizationsOrBuilder { + // Use MultiUserAuthorizations.newBuilder() to construct. + private MultiUserAuthorizations(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MultiUserAuthorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MultiUserAuthorizations defaultInstance; + public static MultiUserAuthorizations getDefaultInstance() { + return defaultInstance; + } + + public MultiUserAuthorizations getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiUserAuthorizations( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + userAuths_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = java.util.Collections.unmodifiableList(userAuths_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiUserAuthorizations parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiUserAuthorizations(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .UserAuthorizations userAuths = 1; + public static final int USERAUTHS_FIELD_NUMBER = 1; + private java.util.List userAuths_; + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List getUserAuthsList() { + return userAuths_; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List + getUserAuthsOrBuilderList() { + return userAuths_; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public int getUserAuthsCount() { + return userAuths_.size(); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getUserAuths(int index) { + return userAuths_.get(index); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder getUserAuthsOrBuilder( + int index) { + return userAuths_.get(index); + } + + private void initFields() { + userAuths_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getUserAuthsCount(); i++) { + if (!getUserAuths(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < userAuths_.size(); i++) { + output.writeMessage(1, userAuths_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < userAuths_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, userAuths_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations) obj; + + boolean result = true; + result = result && getUserAuthsList() + .equals(other.getUserAuthsList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getUserAuthsCount() > 0) { + hash = (37 * hash) + USERAUTHS_FIELD_NUMBER; + hash = (53 * hash) + getUserAuthsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MultiUserAuthorizations} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizationsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getUserAuthsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (userAuthsBuilder_ == null) { + userAuths_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + userAuthsBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations(this); + int from_bitField0_ = bitField0_; + if (userAuthsBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = java.util.Collections.unmodifiableList(userAuths_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.userAuths_ = userAuths_; + } else { + result.userAuths_ = userAuthsBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.getDefaultInstance()) return this; + if (userAuthsBuilder_ == null) { + if (!other.userAuths_.isEmpty()) { + if (userAuths_.isEmpty()) { + userAuths_ = other.userAuths_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureUserAuthsIsMutable(); + userAuths_.addAll(other.userAuths_); + } + onChanged(); + } + } else { + if (!other.userAuths_.isEmpty()) { + if (userAuthsBuilder_.isEmpty()) { + userAuthsBuilder_.dispose(); + userAuthsBuilder_ = null; + userAuths_ = other.userAuths_; + bitField0_ = (bitField0_ & ~0x00000001); + userAuthsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getUserAuthsFieldBuilder() : null; + } else { + userAuthsBuilder_.addAllMessages(other.userAuths_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getUserAuthsCount(); i++) { + if (!getUserAuths(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .UserAuthorizations userAuths = 1; + private java.util.List userAuths_ = + java.util.Collections.emptyList(); + private void ensureUserAuthsIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = new java.util.ArrayList(userAuths_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder> userAuthsBuilder_; + + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List getUserAuthsList() { + if (userAuthsBuilder_ == null) { + return java.util.Collections.unmodifiableList(userAuths_); + } else { + return userAuthsBuilder_.getMessageList(); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public int getUserAuthsCount() { + if (userAuthsBuilder_ == null) { + return userAuths_.size(); + } else { + return userAuthsBuilder_.getCount(); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getUserAuths(int index) { + if (userAuthsBuilder_ == null) { + return userAuths_.get(index); + } else { + return userAuthsBuilder_.getMessage(index); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder setUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations value) { + if (userAuthsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureUserAuthsIsMutable(); + userAuths_.set(index, value); + onChanged(); + } else { + userAuthsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder setUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder builderForValue) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.set(index, builderForValue.build()); + onChanged(); + } else { + userAuthsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations value) { + if (userAuthsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureUserAuthsIsMutable(); + userAuths_.add(value); + onChanged(); + } else { + userAuthsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations value) { + if (userAuthsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureUserAuthsIsMutable(); + userAuths_.add(index, value); + onChanged(); + } else { + userAuthsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder builderForValue) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.add(builderForValue.build()); + onChanged(); + } else { + userAuthsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder builderForValue) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.add(index, builderForValue.build()); + onChanged(); + } else { + userAuthsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addAllUserAuths( + java.lang.Iterable values) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + super.addAll(values, userAuths_); + onChanged(); + } else { + userAuthsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder clearUserAuths() { + if (userAuthsBuilder_ == null) { + userAuths_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + userAuthsBuilder_.clear(); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder removeUserAuths(int index) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.remove(index); + onChanged(); + } else { + userAuthsBuilder_.remove(index); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder getUserAuthsBuilder( + int index) { + return getUserAuthsFieldBuilder().getBuilder(index); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder getUserAuthsOrBuilder( + int index) { + if (userAuthsBuilder_ == null) { + return userAuths_.get(index); } else { + return userAuthsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List + getUserAuthsOrBuilderList() { + if (userAuthsBuilder_ != null) { + return userAuthsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(userAuths_); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder addUserAuthsBuilder() { + return getUserAuthsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance()); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder addUserAuthsBuilder( + int index) { + return getUserAuthsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance()); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List + getUserAuthsBuilderList() { + return getUserAuthsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder> + getUserAuthsFieldBuilder() { + if (userAuthsBuilder_ == null) { + userAuthsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder>( + userAuths_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + userAuths_ = null; + } + return userAuthsBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiUserAuthorizations) + } + + static { + defaultInstance = new MultiUserAuthorizations(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiUserAuthorizations) + } + + public interface GetAuthsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + } + /** + * Protobuf type {@code GetAuthsRequest} + */ + public static final class GetAuthsRequest extends + com.google.protobuf.GeneratedMessage + implements GetAuthsRequestOrBuilder { + // Use GetAuthsRequest.newBuilder() to construct. + private GetAuthsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetAuthsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetAuthsRequest defaultInstance; + public static GetAuthsRequest getDefaultInstance() { + return defaultInstance; + } + + public GetAuthsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetAuthsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetAuthsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetAuthsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetAuthsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetAuthsRequest) + } + + static { + defaultInstance = new GetAuthsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetAuthsRequest) + } + + public interface GetAuthsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + + // repeated bytes auth = 2; + /** + * repeated bytes auth = 2; + */ + java.util.List getAuthList(); + /** + * repeated bytes auth = 2; + */ + int getAuthCount(); + /** + * repeated bytes auth = 2; + */ + com.google.protobuf.ByteString getAuth(int index); + } + /** + * Protobuf type {@code GetAuthsResponse} + */ + public static final class GetAuthsResponse extends + com.google.protobuf.GeneratedMessage + implements GetAuthsResponseOrBuilder { + // Use GetAuthsResponse.newBuilder() to construct. + private GetAuthsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetAuthsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetAuthsResponse defaultInstance; + public static GetAuthsResponse getDefaultInstance() { + return defaultInstance; + } + + public GetAuthsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetAuthsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + auth_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetAuthsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetAuthsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // repeated bytes auth = 2; + public static final int AUTH_FIELD_NUMBER = 2; + private java.util.List auth_; + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return auth_; + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + auth_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + for (int i = 0; i < auth_.size(); i++) { + output.writeBytes(2, auth_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + { + int dataSize = 0; + for (int i = 0; i < auth_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(auth_.get(i)); + } + size += dataSize; + size += 1 * getAuthList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && getAuthList() + .equals(other.getAuthList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (getAuthCount() > 0) { + hash = (37 * hash) + AUTH_FIELD_NUMBER; + hash = (53 * hash) + getAuthList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetAuthsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.auth_ = auth_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (!other.auth_.isEmpty()) { + if (auth_.isEmpty()) { + auth_ = other.auth_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAuthIsMutable(); + auth_.addAll(other.auth_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // repeated bytes auth = 2; + private java.util.List auth_ = java.util.Collections.emptyList(); + private void ensureAuthIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(auth_); + bitField0_ |= 0x00000002; + } + } + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return java.util.Collections.unmodifiableList(auth_); + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + /** + * repeated bytes auth = 2; + */ + public Builder setAuth( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.set(index, value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAuth(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.add(value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAllAuth( + java.lang.Iterable values) { + ensureAuthIsMutable(); + super.addAll(values, auth_); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder clearAuth() { + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetAuthsResponse) + } + + static { + defaultInstance = new GetAuthsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetAuthsResponse) + } + + /** + * Protobuf service {@code VisibilityLabelsService} + */ + public static abstract class VisibilityLabelsService + implements com.google.protobuf.Service { + protected VisibilityLabelsService() {} + + public interface Interface { + /** + * rpc addLabels(.VisibilityLabelsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc setAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc clearAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc getAuths(.GetAuthsRequest) returns (.GetAuthsResponse); + */ + public abstract void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new VisibilityLabelsService() { + @java.lang.Override + public void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done) { + impl.addLabels(controller, request, done); + } + + @java.lang.Override + public void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + impl.setAuths(controller, request, done); + } + + @java.lang.Override + public void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + impl.clearAuths(controller, request, done); + } + + @java.lang.Override + public void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + impl.getAuths(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.addLabels(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)request); + case 1: + return impl.setAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request); + case 2: + return impl.clearAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request); + case 3: + return impl.getAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc addLabels(.VisibilityLabelsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc setAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc clearAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc getAuths(.GetAuthsRequest) returns (.GetAuthsResponse); + */ + public abstract void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.addLabels(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.setAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.clearAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.getAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance())); + } + + public void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance())); + } + + public void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance())); + } + + public void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabelsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabelsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabel_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabel_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabelsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabelsResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SetAuthsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SetAuthsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserAuthorizations_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserAuthorizations_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiUserAuthorizations_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiUserAuthorizations_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetAuthsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetAuthsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetAuthsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetAuthsResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\026VisibilityLabels.proto\032\014Client.proto\"=" + + "\n\027VisibilityLabelsRequest\022\"\n\010visLabel\030\001 " + + "\003(\0132\020.VisibilityLabel\"1\n\017VisibilityLabel" + + "\022\r\n\005label\030\001 \002(\014\022\017\n\007ordinal\030\002 \001(\r\"?\n\030Visi" + + "bilityLabelsResponse\022#\n\006result\030\001 \003(\0132\023.R" + + "egionActionResult\"-\n\017SetAuthsRequest\022\014\n\004" + + "user\030\001 \002(\014\022\014\n\004auth\030\002 \003(\014\"0\n\022UserAuthoriz" + + "ations\022\014\n\004user\030\001 \002(\014\022\014\n\004auth\030\002 \003(\r\"A\n\027Mu" + + "ltiUserAuthorizations\022&\n\tuserAuths\030\001 \003(\013" + + "2\023.UserAuthorizations\"\037\n\017GetAuthsRequest", + "\022\014\n\004user\030\001 \002(\014\".\n\020GetAuthsResponse\022\014\n\004us" + + "er\030\001 \002(\014\022\014\n\004auth\030\002 \003(\0142\200\002\n\027VisibilityLab" + + "elsService\022@\n\taddLabels\022\030.VisibilityLabe" + + "lsRequest\032\031.VisibilityLabelsResponse\0227\n\010" + + "setAuths\022\020.SetAuthsRequest\032\031.VisibilityL" + + "abelsResponse\0229\n\nclearAuths\022\020.SetAuthsRe" + + "quest\032\031.VisibilityLabelsResponse\022/\n\010getA" + + "uths\022\020.GetAuthsRequest\032\021.GetAuthsRespons" + + "eBL\n*org.apache.hadoop.hbase.protobuf.ge" + + "neratedB\026VisibilityLabelsProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_VisibilityLabelsRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_VisibilityLabelsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabelsRequest_descriptor, + new java.lang.String[] { "VisLabel", }); + internal_static_VisibilityLabel_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_VisibilityLabel_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabel_descriptor, + new java.lang.String[] { "Label", "Ordinal", }); + internal_static_VisibilityLabelsResponse_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_VisibilityLabelsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabelsResponse_descriptor, + new java.lang.String[] { "Result", }); + internal_static_SetAuthsRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_SetAuthsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SetAuthsRequest_descriptor, + new java.lang.String[] { "User", "Auth", }); + internal_static_UserAuthorizations_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_UserAuthorizations_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserAuthorizations_descriptor, + new java.lang.String[] { "User", "Auth", }); + internal_static_MultiUserAuthorizations_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_MultiUserAuthorizations_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiUserAuthorizations_descriptor, + new java.lang.String[] { "UserAuths", }); + internal_static_GetAuthsRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_GetAuthsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetAuthsRequest_descriptor, + new java.lang.String[] { "User", }); + internal_static_GetAuthsResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_GetAuthsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetAuthsResponse_descriptor, + new java.lang.String[] { "User", "Auth", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} Index: hbase-protocol/src/main/protobuf/Client.proto =================================================================== --- hbase-protocol/src/main/protobuf/Client.proto (revision 1542892) +++ hbase-protocol/src/main/protobuf/Client.proto (working copy) @@ -30,6 +30,20 @@ import "Comparator.proto"; /** + * The protocol buffer version of Authorizations. + */ +message Authorizations { + repeated string label = 1; +} + +/** + * The protocol buffer version of CellVisibility. + */ +message CellVisibility { + required string expression = 1; +} + +/** * Container for a list of column qualifier names of a family. */ message Column { Index: hbase-protocol/src/main/protobuf/VisibilityLabels.proto =================================================================== --- hbase-protocol/src/main/protobuf/VisibilityLabels.proto (revision 0) +++ hbase-protocol/src/main/protobuf/VisibilityLabels.proto (working copy) @@ -0,0 +1,72 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "VisibilityLabelsProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "Client.proto"; + +message VisibilityLabelsRequest { + repeated VisibilityLabel visLabel = 1; +} + +message VisibilityLabel { + required bytes label = 1; + optional uint32 ordinal = 2; +} + +message VisibilityLabelsResponse { + repeated RegionActionResult result = 1; +} + +message SetAuthsRequest { + required bytes user = 1; + repeated bytes auth = 2; +} + +message UserAuthorizations { + required bytes user = 1; + repeated uint32 auth = 2; +} + +message MultiUserAuthorizations { + repeated UserAuthorizations userAuths = 1; +} + +message GetAuthsRequest { + required bytes user = 1; +} + +message GetAuthsResponse { + required bytes user = 1; + repeated bytes auth = 2; +} + +service VisibilityLabelsService { + rpc addLabels(VisibilityLabelsRequest) + returns (VisibilityLabelsResponse); + rpc setAuths(SetAuthsRequest) + returns (VisibilityLabelsResponse); + rpc clearAuths(SetAuthsRequest) + returns (VisibilityLabelsResponse); + rpc getAuths(GetAuthsRequest) + returns (GetAuthsResponse); +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java (working copy) @@ -0,0 +1,87 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.security.User; + +/** + * This is the default implementation for ScanLabelGenerator. It will extract labels passed via + * Scan#authorizations and cross check against the global auths set for the user. The labels for which + * user is not authenticated will be dropped even if it is passed via Scan Authorizations. + */ +@InterfaceAudience.Private +public class DefaultScanLabelGenerator implements ScanLabelGenerator { + + private static final Log LOG = LogFactory.getLog(DefaultScanLabelGenerator.class); + + private Configuration conf; + + private VisibilityLabelsManager labelsManager; + + public DefaultScanLabelGenerator() { + this.labelsManager = VisibilityLabelsManager.get(); + } + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return this.conf; + } + + @Override + public List getLabels(User user, Authorizations authorizations) { + if (authorizations != null) { + List labels = authorizations.getLabels(); + String userName = user.getName(); + List auths = this.labelsManager.getAuths(userName); + return dropLabelsNotInUserAuths(labels, auths, userName); + } + return null; + } + + private List dropLabelsNotInUserAuths(List labels, List auths, + String userName) { + List droppedLabels = new ArrayList(); + List passedLabels = new ArrayList(labels.size()); + for (String label : labels) { + if (auths.contains(label)) { + passedLabels.add(label); + } else { + droppedLabels.add(label); + } + } + if (!droppedLabels.isEmpty()) { + if (LOG.isDebugEnabled()) { + LOG.debug("Labels " + droppedLabels + " in Scan/Get visibility attributes dropped as user " + + userName + " having no auth set for those."); + } + } + return passedLabels; + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java (working copy) @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public interface ExpressionNode { + boolean isSingleNode(); + + ExpressionNode deepClone(); +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java (working copy) @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class LeafExpressionNode implements ExpressionNode { + public static final LeafExpressionNode OPEN_PARAN_NODE = new LeafExpressionNode("("); + public static final LeafExpressionNode CLOSE_PARAN_NODE = new LeafExpressionNode(")"); + + private String identifier; + + public LeafExpressionNode(String identifier) { + this.identifier = identifier; + } + + public String getIdentifier() { + return this.identifier; + } + + @Override + public int hashCode() { + return this.identifier.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof LeafExpressionNode) { + LeafExpressionNode that = (LeafExpressionNode) obj; + return this.identifier.equals(that.identifier); + } + return false; + } + + @Override + public String toString() { + return this.identifier; + } + + @Override + public boolean isSingleNode() { + return true; + } + + public LeafExpressionNode deepClone() { + LeafExpressionNode clone = new LeafExpressionNode(this.identifier); + return clone; + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java (working copy) @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class NonLeafExpressionNode implements ExpressionNode { + private Operator op; + private List childExps = new ArrayList(2); + + public NonLeafExpressionNode() { + + } + + public NonLeafExpressionNode(Operator op) { + this.op = op; + } + + public NonLeafExpressionNode(Operator op, List exps) { + this.op = op; + if (op == Operator.NOT && exps.size() > 1) { + throw new IllegalArgumentException(Operator.NOT + " should be on 1 child expression"); + } + this.childExps = exps; + } + + public NonLeafExpressionNode(Operator op, ExpressionNode... exps) { + this.op = op; + List expLst = new ArrayList(); + for (ExpressionNode exp : exps) { + expLst.add(exp); + } + this.childExps = expLst; + } + + public Operator getOperator() { + return op; + } + + public List getChildExps() { + return childExps; + } + + public void addChildExp(ExpressionNode exp) { + if (op == Operator.NOT && this.childExps.size() == 1) { + throw new IllegalStateException(Operator.NOT + " should be on 1 child expression"); + } + this.childExps.add(exp); + } + + public void addChildExps(List exps) { + this.childExps.addAll(exps); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("("); + if (this.op == Operator.NOT) { + sb.append(this.op); + } + for (int i = 0; i < this.childExps.size(); i++) { + sb.append(childExps.get(i)); + if (i < this.childExps.size() - 1) { + sb.append(" " + this.op + " "); + } + } + sb.append(")"); + return sb.toString(); + } + + @Override + public boolean isSingleNode() { + return this.op == Operator.NOT; + } + + public NonLeafExpressionNode deepClone() { + NonLeafExpressionNode clone = new NonLeafExpressionNode(this.op); + for (ExpressionNode exp : this.childExps) { + clone.addChildExp(exp.deepClone()); + } + return clone; + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java (working copy) @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +public enum Operator { + AND('&'), OR('|'), NOT('!'); + + private char rep; + + private Operator(char rep) { + this.rep = rep; + } + + public String toString() { + return String.valueOf(this.rep); + }; +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java (working copy) @@ -0,0 +1,184 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; + +@InterfaceAudience.Private +public class ExpressionExpander { + + public ExpressionNode expand(ExpressionNode src) { + if (!src.isSingleNode()) { + NonLeafExpressionNode nlExp = (NonLeafExpressionNode) src; + List childExps = nlExp.getChildExps(); + Operator outerOp = nlExp.getOperator(); + if (isToBeExpanded(childExps)) { + // Any of the child exp is a non leaf exp with & or | operator + NonLeafExpressionNode newNode = new NonLeafExpressionNode(nlExp.getOperator()); + for (ExpressionNode exp : childExps) { + if (exp.isSingleNode()) { + newNode.addChildExp(exp); + } else { + newNode.addChildExp(expand(exp)); + } + } + nlExp = expandNonLeaf(newNode, outerOp); + } + return nlExp; + } + if (src instanceof NonLeafExpressionNode + && ((NonLeafExpressionNode) src).getOperator() == Operator.NOT) { + // Negate the exp + return negate((NonLeafExpressionNode) src); + } + return src; + } + + private ExpressionNode negate(NonLeafExpressionNode nlExp) { + ExpressionNode notChild = nlExp.getChildExps().get(0); + if (notChild instanceof LeafExpressionNode) { + return nlExp; + } + NonLeafExpressionNode nlNotChild = (NonLeafExpressionNode) notChild; + if (nlNotChild.getOperator() == Operator.NOT) { + // negate the negate + return nlNotChild.getChildExps().get(0); + } + Operator negateOp = nlNotChild.getOperator() == Operator.AND ? Operator.OR : Operator.AND; + NonLeafExpressionNode newNode = new NonLeafExpressionNode(negateOp); + for (ExpressionNode expNode : nlNotChild.getChildExps()) { + NonLeafExpressionNode negateNode = new NonLeafExpressionNode(Operator.NOT); + negateNode.addChildExp(expNode.deepClone()); + newNode.addChildExp(expand(negateNode)); + } + return newNode; + } + + private boolean isToBeExpanded(List childExps) { + for (ExpressionNode exp : childExps) { + if (!exp.isSingleNode()) { + return true; + } + } + return false; + } + + private NonLeafExpressionNode expandNonLeaf(NonLeafExpressionNode newNode, Operator outerOp) { + // Now go for the merge or expansion across brackets + List newChildExps = newNode.getChildExps(); + assert newChildExps.size() == 2; + ExpressionNode leftChild = newChildExps.get(0); + ExpressionNode rightChild = newChildExps.get(1); + if (rightChild.isSingleNode()) { + // Merge the single right node into the left side + assert leftChild instanceof NonLeafExpressionNode; + newNode = mergeChildNodes(newNode, outerOp, rightChild, (NonLeafExpressionNode) leftChild); + } else if (leftChild.isSingleNode()) { + // Merge the single left node into the right side + assert rightChild instanceof NonLeafExpressionNode; + newNode = mergeChildNodes(newNode, outerOp, leftChild, (NonLeafExpressionNode) rightChild); + } else { + // Both the child exp nodes are non single. + NonLeafExpressionNode leftChildNLE = (NonLeafExpressionNode) leftChild; + NonLeafExpressionNode rightChildNLE = (NonLeafExpressionNode) rightChild; + if (outerOp == leftChildNLE.getOperator() && outerOp == rightChildNLE.getOperator()) { + // Merge + NonLeafExpressionNode leftChildNLEClone = leftChildNLE.deepClone(); + leftChildNLEClone.addChildExps(rightChildNLE.getChildExps()); + newNode = leftChildNLEClone; + } else { + // (a | b) & (c & d) ... + if (outerOp == Operator.OR) { + // (a | b) | (c & d) + if (leftChildNLE.getOperator() == Operator.OR + && rightChildNLE.getOperator() == Operator.AND) { + leftChildNLE.addChildExp(rightChildNLE); + newNode = leftChildNLE; + } else if (leftChildNLE.getOperator() == Operator.AND + && rightChildNLE.getOperator() == Operator.OR) { + // (a & b) | (c | d) + rightChildNLE.addChildExp(leftChildNLE); + newNode = rightChildNLE; + } + // (a & b) | (c & d) + // This case no need to do any thing + } else { + // outer op is & + // (a | b) & (c & d) => (a & c & d) | (b & c & d) + if (leftChildNLE.getOperator() == Operator.OR + && rightChildNLE.getOperator() == Operator.AND) { + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : leftChildNLE.getChildExps()) { + NonLeafExpressionNode rightChildNLEClone = rightChildNLE.deepClone(); + rightChildNLEClone.addChildExp(exp); + newNode.addChildExp(rightChildNLEClone); + } + } else if (leftChildNLE.getOperator() == Operator.AND + && rightChildNLE.getOperator() == Operator.OR) { + // (a & b) & (c | d) => (a & b & c) | (a & b & d) + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : rightChildNLE.getChildExps()) { + NonLeafExpressionNode leftChildNLEClone = leftChildNLE.deepClone(); + leftChildNLEClone.addChildExp(exp); + newNode.addChildExp(leftChildNLEClone); + } + } else { + // (a | b) & (c | d) => (a & c) | (a & d) | (b & c) | (b & d) + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode leftExp : leftChildNLE.getChildExps()) { + for (ExpressionNode rightExp : rightChildNLE.getChildExps()) { + NonLeafExpressionNode newChild = new NonLeafExpressionNode(Operator.AND); + newChild.addChildExp(leftExp.deepClone()); + newChild.addChildExp(rightExp.deepClone()); + newNode.addChildExp(newChild); + } + } + } + } + } + } + return newNode; + } + + private NonLeafExpressionNode mergeChildNodes(NonLeafExpressionNode newOuterNode, + Operator outerOp, ExpressionNode lChild, NonLeafExpressionNode nlChild) { + // Merge the single right/left node into the other side + if (nlChild.getOperator() == outerOp) { + NonLeafExpressionNode leftChildNLEClone = nlChild.deepClone(); + leftChildNLEClone.addChildExp(lChild); + newOuterNode = leftChildNLEClone; + } else if (outerOp == Operator.AND) { + assert nlChild.getOperator() == Operator.OR; + // outerOp is & here. We need to expand the node here + // (a | b) & c -> (a & c) | (b & c) + // OR + // c & (a | b) -> (c & a) | (c & b) + newOuterNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : nlChild.getChildExps()) { + newOuterNode.addChildExp(new NonLeafExpressionNode(Operator.AND, exp, lChild)); + } + } + return newOuterNode; + } +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java (working copy) @@ -0,0 +1,273 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.Stack; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Private +public class ExpressionParser { + + private static final char CLOSE_PARAN = ')'; + private static final char OPEN_PARAN = '('; + private static final char OR = '|'; + private static final char AND = '&'; + private static final char NOT = '!'; + private static final char SPACE = ' '; + + public ExpressionNode parse(String expS) throws ParseException { + expS = expS.trim(); + Stack expStack = new Stack(); + int index = 0; + int endPos = expS.length(); + byte[] exp = Bytes.toBytes(expS); + while (index < endPos) { + byte b = exp[index]; + switch (b) { + case OPEN_PARAN: + processOpenParan(expStack, expS, index); + index = skipSpaces(exp, index); + break; + case CLOSE_PARAN: + processCloseParan(expStack, expS, index); + index = skipSpaces(exp, index); + break; + case AND: + case OR: + processANDorOROp(getOperator(b), expStack, expS, index); + index = skipSpaces(exp, index); + break; + case NOT: + processNOTOp(expStack, expS, index); + break; + default: + int labelOffset = index; + do { + if (!VisibilityLabelsValidator.isValidAuthChar(exp[index])) { + throw new ParseException("Error parsing expression " + expS + " at column : " + + index); + } + index++; + } while (index < endPos && !isEndOfLabel(exp[index])); + String leafExp = new String(exp, labelOffset, index - labelOffset).trim(); + if (leafExp.isEmpty()) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + processLabelExpNode(new LeafExpressionNode(leafExp), expStack, expS, index); + // We already crossed the label node index. So need to reduce 1 here. + index--; + index = skipSpaces(exp, index); + } + index++; + } + if (expStack.size() != 1) { + throw new ParseException("Error parsing expression " + expS); + } + ExpressionNode top = expStack.pop(); + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS); + } + if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if (nlTop.getOperator() == Operator.NOT) { + if (nlTop.getChildExps().size() != 1) { + throw new ParseException("Error parsing expression " + expS); + } + } else if (nlTop.getChildExps().size() != 2) { + throw new ParseException("Error parsing expression " + expS); + } + } + return top; + } + + private int skipSpaces(byte[] exp, int index) { + while (index < exp.length -1 && exp[index+1] == SPACE) { + index++; + } + return index; + } + + private void processCloseParan(Stack expStack, String expS, int index) + throws ParseException { + if (expStack.size() < 2) { + // When ) comes we expect atleast a ( node and another leaf/non leaf node + // in stack. + throw new ParseException(); + } else { + ExpressionNode top = expStack.pop(); + ExpressionNode secondTop = expStack.pop(); + // The second top must be a ( node and top should not be a ). Top can be + // any thing else + if (top == LeafExpressionNode.OPEN_PARAN_NODE + || secondTop != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + // a&(b|) is not valid. + // The top can be a ! node but with exactly child nodes. !).. is invalid + // Other NonLeafExpressionNode , then there should be exactly 2 child. + // (a&) is not valid. + if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if ((nlTop.getOperator() == Operator.NOT && nlTop.getChildExps().size() != 1) + || (nlTop.getOperator() != Operator.NOT && nlTop.getChildExps().size() != 2)) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + // When (a|b)&(c|d) comes while processing the second ) there will be + // already (a|b)& node + // avail in the stack. The top will be c|d node. We need to take it out + // and combine as one + // node. + if (!expStack.isEmpty()) { + ExpressionNode thirdTop = expStack.peek(); + if (thirdTop instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlThirdTop = (NonLeafExpressionNode) expStack.pop(); + nlThirdTop.addChildExp(top); + if (nlThirdTop.getOperator() == Operator.NOT) { + // It is a NOT node. So there may be a NonLeafExpressionNode below + // it to which the + // completed NOT can be added now. + if (!expStack.isEmpty()) { + ExpressionNode fourthTop = expStack.peek(); + if (fourthTop instanceof NonLeafExpressionNode) { + // Its Operator will be OR or AND + NonLeafExpressionNode nlFourthTop = (NonLeafExpressionNode) fourthTop; + assert nlFourthTop.getOperator() != Operator.NOT; + // Also for sure its number of children will be 1 + assert nlFourthTop.getChildExps().size() == 1; + nlFourthTop.addChildExp(nlThirdTop); + return;// This case no need to add back the nlThirdTop. + } + } + } + top = nlThirdTop; + } + } + expStack.push(top); + } + } + + private void processOpenParan(Stack expStack, String expS, int index) + throws ParseException { + if (!expStack.isEmpty()) { + ExpressionNode top = expStack.peek(); + // Top can not be a Label Node. a(.. is not valid. but ((a.. is fine. + if (top instanceof LeafExpressionNode && top != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } else if (top instanceof NonLeafExpressionNode) { + // Top is non leaf. + // It can be ! node but with out any child nodes. !a(.. is invalid + // Other NonLeafExpressionNode , then there should be exactly 1 child. + // a&b( is not valid. + // a&( is valid though. Also !( is valid + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if ((nlTop.getOperator() == Operator.NOT && nlTop.getChildExps().size() != 0) + || (nlTop.getOperator() != Operator.NOT && nlTop.getChildExps().size() != 1)) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + } + expStack.push(LeafExpressionNode.OPEN_PARAN_NODE); + } + + private void processLabelExpNode(LeafExpressionNode node, Stack expStack, + String expS, int index) throws ParseException { + if (expStack.isEmpty()) { + expStack.push(node); + } else { + ExpressionNode top = expStack.peek(); + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + expStack.push(node); + } else if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) expStack.pop(); + nlTop.addChildExp(node); + if (nlTop.getOperator() == Operator.NOT && !expStack.isEmpty()) { + ExpressionNode secondTop = expStack.peek(); + if (secondTop == LeafExpressionNode.OPEN_PARAN_NODE) { + expStack.push(nlTop); + } else if (secondTop instanceof NonLeafExpressionNode) { + ((NonLeafExpressionNode) secondTop).addChildExp(nlTop); + } + } else { + expStack.push(nlTop); + } + } else { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + } + + private void processANDorOROp(Operator op, Stack expStack, String expS, int index) + throws ParseException { + if (expStack.isEmpty()) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + ExpressionNode top = expStack.pop(); + if (top.isSingleNode()) { + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + expStack.push(new NonLeafExpressionNode(op, top)); + } else { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if (nlTop.getChildExps().size() != 2) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + expStack.push(new NonLeafExpressionNode(op, nlTop)); + } + } + + private void processNOTOp(Stack expStack, String expS, int index) + throws ParseException { + // When ! comes, the stack can be empty or top ( or top can be some exp like + // a& + // !!.., a!, a&b!, !a! are invalid + if (!expStack.isEmpty()) { + ExpressionNode top = expStack.peek(); + if (top.isSingleNode() && top != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + if (!top.isSingleNode() && ((NonLeafExpressionNode) top).getChildExps().size() != 1) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + expStack.push(new NonLeafExpressionNode(Operator.NOT)); + } + + private static boolean isEndOfLabel(byte b) { + return (b == OPEN_PARAN || b == CLOSE_PARAN || b == OR || b == AND || b == NOT || b == SPACE); + } + + private static Operator getOperator(byte op) { + switch (op) { + case AND: + return Operator.AND; + case OR: + return Operator.OR; + case NOT: + return Operator.NOT; + } + return null; + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java (working copy) @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class ParseException extends Exception { + + private static final long serialVersionUID = 1725986524206989173L; + + public ParseException() { + + } + + public ParseException(String msg) { + super(msg); + } + + public ParseException(Throwable t) { + super(t); + } + + public ParseException(String msg, Throwable t) { + super(msg, t); + } + +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java (working copy) @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.hbase.security.User; + +/** + * This would be the interface which would be used add labels to the RPC context + * and this would be stored against the UGI. + * + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public interface ScanLabelGenerator extends Configurable { + + /** + * Helps to get a list of lables associated with an UGI + * @param user + * @param authorizations + * @return The labels + */ + public List getLabels(User user, Authorizations authorizations); +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/SimpleScanLabelGenerator.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/SimpleScanLabelGenerator.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/SimpleScanLabelGenerator.java (working copy) @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.security.User; + +/** + * This is a simple implementation for ScanLabelGenerator. It will just extract labels passed via + * Scan#Authorizations. + */ +@InterfaceAudience.Private +public class SimpleScanLabelGenerator implements ScanLabelGenerator { + + private Configuration conf; + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return this.conf; + } + + @Override + public List getLabels(User user, Authorizations authorizations) { + if (authorizations != null) { + return authorizations.getLabels(); + } + return null; + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java (working copy) @@ -0,0 +1,1262 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SANITY_CHECK_FAILURE; +import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SUCCESS; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABEL_QUALIFIER; +import static org.apache.hadoop.hbase.security.visibility.VisibilityUtils.SYSTEM_LABEL; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.catalog.MetaReader; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.MasterObserver; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.RegionObserver; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.io.util.StreamUtils; +import org.apache.hadoop.hbase.ipc.RequestContext; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.master.RegionPlan; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.InternalScanner; +import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; +import org.apache.hadoop.hbase.regionserver.OperationStatus; +import org.apache.hadoop.hbase.regionserver.RegionScanner; +import org.apache.hadoop.hbase.security.AccessDeniedException; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; + +import com.google.common.collect.Lists; +import com.google.common.collect.MapMaker; +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +/** + * Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in + * visibility labels + */ +@InterfaceAudience.Private +public class VisibilityController extends BaseRegionObserver implements MasterObserver, + RegionObserver, VisibilityLabelsService.Interface, CoprocessorService { + + private static final Log LOG = LogFactory.getLog(VisibilityController.class); + private static final byte[] DUMMY_VALUE = new byte[0]; + // "system" label is having an ordinal value 1. + private static final int SYSTEM_LABEL_ORDINAL = 1; + private static final Tag[] LABELS_TABLE_TAGS = new Tag[1]; + + private final ExpressionParser expressionParser = new ExpressionParser(); + private final ExpressionExpander expressionExpander = new ExpressionExpander(); + private VisibilityLabelsManager visibilityManager; + // defined only for Endpoint implementation, so it can have way to access region services. + private RegionCoprocessorEnvironment regionEnv; + private ScanLabelGenerator scanLabelGenerator; + + private volatile int ordinalCounter = -1; + // flags if we are running on a region of the 'labels' table + private boolean labelsRegion = false; + // Flag denoting whether AcessController is available or not. + private boolean acOn = false; + private Configuration conf; + + /** Mapping of scanner instances to the user who created them */ + private Map scannerOwners = + new MapMaker().weakKeys().makeMap(); + + static { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream dos = new DataOutputStream(baos); + try { + StreamUtils.writeRawVInt32(dos, SYSTEM_LABEL_ORDINAL); + } catch (IOException e) { + // We write to a byte array. No Exception can happen. + } + LABELS_TABLE_TAGS[0] = new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()); + } + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + this.conf = env.getConfiguration(); + ZooKeeperWatcher zk = null; + if (env instanceof MasterCoprocessorEnvironment) { + // if running on HMaster + MasterCoprocessorEnvironment mEnv = (MasterCoprocessorEnvironment) env; + zk = mEnv.getMasterServices().getZooKeeper(); + } else if (env instanceof RegionCoprocessorEnvironment) { + // if running at region + regionEnv = (RegionCoprocessorEnvironment) env; + zk = regionEnv.getRegionServerServices().getZooKeeper(); + } + + // If zk is null or IOException while obtaining auth manager, + // throw RuntimeException so that the coprocessor is unloaded. + if (zk == null) { + throw new RuntimeException("Error obtaining VisibilityLabelsManager, zk found null."); + } + try { + this.visibilityManager = VisibilityLabelsManager.get(zk, this.conf); + } catch (IOException ioe) { + throw new RuntimeException("Error obtaining VisibilityLabelsManager", ioe); + } + if (env instanceof RegionCoprocessorEnvironment) { + // ScanLabelGenerator to be instantiated only with Region Observer. + scanLabelGenerator = VisibilityUtils.getScanLabelGenerator(this.conf); + } + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + + } + + /********************************* Master related hooks **********************************/ + + @Override + public void postStartMaster(ObserverContext ctx) throws IOException { + // Need to create the new system table for labels here + MasterServices master = ctx.getEnvironment().getMasterServices(); + if (!MetaReader.tableExists(master.getCatalogTracker(), LABELS_TABLE_NAME)) { + HTableDescriptor labelsTable = new HTableDescriptor(LABELS_TABLE_NAME); + HColumnDescriptor labelsColumn = new HColumnDescriptor(LABELS_TABLE_FAMILY); + labelsColumn.setBloomFilterType(BloomType.NONE); + labelsColumn.setBlockCacheEnabled(false); // We will cache all the labels. No need of normal + // table block cache. + labelsTable.addFamily(labelsColumn); + // Let the "labels" table having only one region always. We are not expecting too many labels in + // the system. + labelsTable.setValue(HTableDescriptor.SPLIT_POLICY, + DisabledRegionSplitPolicy.class.getName()); + master.createTable(labelsTable, null); + } + } + + @Override + public void preCreateTable(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void postCreateTable(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void preCreateTableHandler(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void postCreateTableHandler(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void preDeleteTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postDeleteTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void preDeleteTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postDeleteTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preModifyTable(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void postModifyTable(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void preModifyTableHandler(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void postModifyTableHandler(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void preAddColumn(ObserverContext ctx, TableName tableName, + HColumnDescriptor column) throws IOException { + } + + @Override + public void postAddColumn(ObserverContext ctx, TableName tableName, + HColumnDescriptor column) throws IOException { + } + + @Override + public void preAddColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor column) throws IOException { + } + + @Override + public void postAddColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor column) throws IOException { + } + + @Override + public void preModifyColumn(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void postModifyColumn(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void preModifyColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void postModifyColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void preDeleteColumn(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void postDeleteColumn(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void preDeleteColumnHandler(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void postDeleteColumnHandler(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void preEnableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postEnableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void preEnableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postEnableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preDisableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postDisableTable(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preDisableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postDisableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preMove(ObserverContext ctx, HRegionInfo region, + ServerName srcServer, ServerName destServer) throws IOException { + } + + @Override + public void postMove(ObserverContext ctx, HRegionInfo region, + ServerName srcServer, ServerName destServer) throws IOException { + } + + @Override + public void preAssign(ObserverContext ctx, HRegionInfo regionInfo) + throws IOException { + } + + @Override + public void postAssign(ObserverContext ctx, HRegionInfo regionInfo) + throws IOException { + } + + @Override + public void preUnassign(ObserverContext ctx, + HRegionInfo regionInfo, boolean force) throws IOException { + } + + @Override + public void postUnassign(ObserverContext ctx, + HRegionInfo regionInfo, boolean force) throws IOException { + } + + @Override + public void preRegionOffline(ObserverContext ctx, + HRegionInfo regionInfo) throws IOException { + } + + @Override + public void postRegionOffline(ObserverContext ctx, + HRegionInfo regionInfo) throws IOException { + } + + @Override + public void preBalance(ObserverContext ctx) throws IOException { + } + + @Override + public void postBalance(ObserverContext ctx, List plans) + throws IOException { + } + + @Override + public boolean preBalanceSwitch(ObserverContext ctx, + boolean newValue) throws IOException { + return false; + } + + @Override + public void postBalanceSwitch(ObserverContext ctx, + boolean oldValue, boolean newValue) throws IOException { + } + + @Override + public void preShutdown(ObserverContext ctx) throws IOException { + } + + @Override + public void preStopMaster(ObserverContext ctx) throws IOException { + } + + @Override + public void preSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preCloneSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postCloneSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preRestoreSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postRestoreSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preDeleteSnapshot(ObserverContext ctx, + SnapshotDescription snapshot) throws IOException { + } + + @Override + public void postDeleteSnapshot(ObserverContext ctx, + SnapshotDescription snapshot) throws IOException { + } + + @Override + public void preGetTableDescriptors(ObserverContext ctx, + List tableNamesList, List descriptors) throws IOException { + } + + @Override + public void postGetTableDescriptors(ObserverContext ctx, + List descriptors) throws IOException { + } + + @Override + public void preCreateNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void postCreateNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void preDeleteNamespace(ObserverContext ctx, + String namespace) throws IOException { + } + + @Override + public void postDeleteNamespace(ObserverContext ctx, + String namespace) throws IOException { + } + + @Override + public void preModifyNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void postModifyNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void preMasterInitialization(ObserverContext ctx) + throws IOException { + + } + + /****************************** Region related hooks ******************************/ + + @Override + public void postOpen(ObserverContext e) { + // Read the entire labels table and populate the zk + if (e.getEnvironment().getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) { + this.labelsRegion = true; + this.acOn = CoprocessorHost.getLoadedCoprocessors().contains(AccessController.class.getName()); + try { + Pair, Map>> labelsAndUserAuths = + extractLabelsAndAuths(getExistingLabelsWithAuths()); + Map labels = labelsAndUserAuths.getFirst(); + Map> userAuths = labelsAndUserAuths.getSecond(); + // Add the "system" label if it is not added into the system yet + addSystemLabel(e.getEnvironment().getRegion(), labels, userAuths); + int ordinal = 1; // Ordinal 1 is reserved for "system" label. + for (Integer i : labels.values()) { + if (i > ordinal) { + ordinal = i; + } + } + this.ordinalCounter = ordinal + 1; + if (labels.size() > 0) { + // If there is no data need not write to zk + byte[] serialized = VisibilityUtils.getDataToWriteToZooKeeper(labels); + this.visibilityManager.writeToZookeeper(serialized, true); + } + if (userAuths.size() > 0) { + byte[] serialized = VisibilityUtils.getUserAuthsDataToWriteToZooKeeper(userAuths); + this.visibilityManager.writeToZookeeper(serialized, false); + } + } catch (IOException ioe) { + LOG.error("Error while updating the zk with the exisiting labels data", ioe); + } + } + } + + private void addSystemLabel(HRegion region, Map labels, + Map> userAuths) throws IOException { + if (!labels.containsKey(SYSTEM_LABEL)) { + Put p = new Put(Bytes.toBytes(SYSTEM_LABEL_ORDINAL)); + p.add(LABELS_TABLE_FAMILY, LABEL_QUALIFIER, Bytes.toBytes(SYSTEM_LABEL)); + // Set auth for "system" label for all super users. + List superUsers = getSystemAndSuperUsers(); + for (String superUser : superUsers) { + p.add(LABELS_TABLE_FAMILY, Bytes.toBytes(superUser), DUMMY_VALUE, LABELS_TABLE_TAGS); + } + region.put(p); + labels.put(SYSTEM_LABEL, SYSTEM_LABEL_ORDINAL); + for (String superUser : superUsers) { + List auths = userAuths.get(superUser); + if (auths == null) { + auths = new ArrayList(1); + userAuths.put(superUser, auths); + } + auths.add(SYSTEM_LABEL_ORDINAL); + } + } + } + + @Override + public void preBatchMutate(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + if (c.getEnvironment().getRegion().getRegionInfo().getTable().isSystemTable()) { + return; + } + // TODO this can be made as a global LRU cache at HRS level? + Map> labelCache = new HashMap>(); + for (int i = 0; i < miniBatchOp.size(); i++) { + Mutation m = miniBatchOp.getOperation(i); + CellVisibility cellVisibility = null; + try { + cellVisibility = m.getCellVisibility(); + } catch (DeserializationException de) { + miniBatchOp.setOperationStatus(i, + new OperationStatus(SANITY_CHECK_FAILURE, de.getMessage())); + continue; + } + if (m instanceof Put) { + Put p = (Put) m; + boolean sanityFailure = false; + for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { + if (!checkForReservedVisibilityTagPresence(cellScanner.current())) { + miniBatchOp.setOperationStatus(i, new OperationStatus(SANITY_CHECK_FAILURE, + "Mutation contains cell with reserved type tag")); + sanityFailure = true; + break; + } + } + if (!sanityFailure) { + if (cellVisibility != null) { + String labelsExp = cellVisibility.getExpression(); + List visibilityTags = labelCache.get(labelsExp); + if (visibilityTags == null) { + try { + visibilityTags = createVisibilityTags(labelsExp); + } catch (ParseException e) { + miniBatchOp.setOperationStatus(i, + new OperationStatus(SANITY_CHECK_FAILURE, e.getMessage())); + } catch (InvalidLabelException e) { + miniBatchOp.setOperationStatus(i, + new OperationStatus(SANITY_CHECK_FAILURE, e.getMessage())); + } + } + if (visibilityTags != null) { + labelCache.put(labelsExp, visibilityTags); + List updatedCells = new ArrayList(); + for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { + Cell cell = cellScanner.current(); + List tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); + tags.addAll(visibilityTags); + Cell updatedCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength(), cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength(), cell.getTimestamp(), Type.codeToType(cell + .getTypeByte()), cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength(), tags); + updatedCells.add(updatedCell); + } + p.getFamilyCellMap().clear(); + // Clear and add new Cells to the Mutation. + for (Cell cell : updatedCells) { + p.add(cell); + } + } + } + } + } else { + // CellVisibility in a Delete is not legal! Fail the operation + miniBatchOp.setOperationStatus(i, new OperationStatus(SANITY_CHECK_FAILURE, + "CellVisibility cannot be set on Delete mutation")); + } + } + } + + @Override + public void postBatchMutate(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + if (this.labelsRegion) { + // We will add to zookeeper here. + Pair, Map>> labelsAndUserAuths = + extractLabelsAndAuths(getExistingLabelsWithAuths()); + Map existingLabels = labelsAndUserAuths.getFirst(); + Map> userAuths = labelsAndUserAuths.getSecond(); + boolean isNewLabels = false; + boolean isUserAuthsChange = false; + for (int i = 0; i < miniBatchOp.size(); i++) { + Mutation m = miniBatchOp.getOperation(i); + if (miniBatchOp.getOperationStatus(i).getOperationStatusCode() == SUCCESS) { + for (List cells : m.getFamilyCellMap().values()) { + for (Cell cell : cells) { + int labelOrdinal = Bytes.toInt(cell.getRowArray(), cell.getRowOffset()); + if (Bytes.equals(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength(), LABEL_QUALIFIER, 0, + LABEL_QUALIFIER.length)) { + if (m instanceof Put) { + existingLabels.put( + Bytes.toString(cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength()), labelOrdinal); + isNewLabels = true; + } + } else { + String user = Bytes.toString(cell.getQualifierArray(), + cell.getQualifierOffset(), cell.getQualifierLength()); + List auths = userAuths.get(user); + if (auths == null) { + auths = new ArrayList(); + userAuths.put(user, auths); + } + if (m instanceof Delete) { + auths.remove(Integer.valueOf(labelOrdinal)); + } else { + auths.add(labelOrdinal); + } + isUserAuthsChange = true; + } + } + } + } + } + if (isNewLabels) { + byte[] serialized = VisibilityUtils.getDataToWriteToZooKeeper(existingLabels); + this.visibilityManager.writeToZookeeper(serialized, true); + } + if (isUserAuthsChange) { + byte[] serialized = VisibilityUtils.getUserAuthsDataToWriteToZooKeeper(userAuths); + this.visibilityManager.writeToZookeeper(serialized, false); + } + } + } + + private Pair, Map>> extractLabelsAndAuths( + List> labelDetails) { + Map labels = new HashMap(); + Map> userAuths = new HashMap>(); + for (List cells : labelDetails) { + for (Cell cell : cells) { + if (Bytes.equals(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength(), LABEL_QUALIFIER, 0, LABEL_QUALIFIER.length)) { + labels.put( + Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()), + Bytes.toInt(cell.getRowArray(), cell.getRowOffset())); + } else { + // These are user cells who has authorization for this label + String user = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength()); + List auths = userAuths.get(user); + if (auths == null) { + auths = new ArrayList(); + userAuths.put(user, auths); + } + auths.add(Bytes.toInt(cell.getRowArray(), cell.getRowOffset())); + } + } + } + return new Pair, Map>>(labels, userAuths); + } + + // Checks whether cell contains any tag with type as VISIBILITY_TAG_TYPE. + // This tag type is reserved and should not be explicitly set by user. + private boolean checkForReservedVisibilityTagPresence(Cell cell) throws IOException { + if (cell.getTagsLength() > 0) { + Iterator tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); + while (tagsItr.hasNext()) { + if (tagsItr.next().getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) { + return false; + } + } + } + return true; + } + + private List createVisibilityTags(String visibilityLabelsExp) throws IOException, + ParseException, InvalidLabelException { + ExpressionNode node = null; + node = this.expressionParser.parse(visibilityLabelsExp); + node = this.expressionExpander.expand(node); + List tags = new ArrayList(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream dos = new DataOutputStream(baos); + if (node.isSingleNode()) { + writeLabelOrdinalsToStream(node, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } else { + NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node; + if (nlNode.getOperator() == Operator.OR) { + for (ExpressionNode child : nlNode.getChildExps()) { + writeLabelOrdinalsToStream(child, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } + } else { + writeLabelOrdinalsToStream(nlNode, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } + } + return tags; + } + + private void writeLabelOrdinalsToStream(ExpressionNode node, DataOutputStream dos) + throws IOException, InvalidLabelException { + if (node.isSingleNode()) { + String identifier = null; + int labelOrdinal = 0; + if (node instanceof LeafExpressionNode) { + identifier = ((LeafExpressionNode) node) + .getIdentifier(); + labelOrdinal = this.visibilityManager.getLabelOrdinal(identifier); + } else { + // This is a NOT node. + LeafExpressionNode lNode = (LeafExpressionNode) ((NonLeafExpressionNode) node) + .getChildExps().get(0); + identifier = lNode.getIdentifier(); + labelOrdinal = this.visibilityManager.getLabelOrdinal(identifier); + labelOrdinal = -1 * labelOrdinal; // Store NOT node as -ve ordinal. + } + if (labelOrdinal == 0) { + throw new InvalidLabelException("Invalid visibility label " + identifier); + } + StreamUtils.writeRawVInt32(dos, labelOrdinal); + } else { + List childExps = ((NonLeafExpressionNode) node).getChildExps(); + for (ExpressionNode child : childExps) { + writeLabelOrdinalsToStream(child, dos); + } + } + } + + @Override + public RegionScanner preScannerOpen(ObserverContext e, Scan scan, + RegionScanner s) throws IOException { + HRegion region = e.getEnvironment().getRegion(); + Authorizations authorizations = null; + try { + authorizations = scan.getAuthorizations(); + } catch (DeserializationException de) { + throw new IOException(de); + } + Filter visibilityLabelFilter = createVisibilityLabelFilter(region, authorizations); + if (visibilityLabelFilter != null) { + Filter filter = scan.getFilter(); + if (filter != null) { + scan.setFilter(new FilterList(filter, visibilityLabelFilter)); + } else { + scan.setFilter(visibilityLabelFilter); + } + } + return s; + } + + @Override + public RegionScanner postScannerOpen(final ObserverContext c, + final Scan scan, final RegionScanner s) throws IOException { + User user = getActiveUser(); + if (user != null && user.getShortName() != null) { + scannerOwners.put(s, user.getShortName()); + } + return s; + } + + @Override + public boolean preScannerNext(final ObserverContext c, + final InternalScanner s, final List result, final int limit, final boolean hasNext) + throws IOException { + requireScannerOwner(s); + return hasNext; + } + + @Override + public void preScannerClose(final ObserverContext c, + final InternalScanner s) throws IOException { + requireScannerOwner(s); + } + + @Override + public void postScannerClose(final ObserverContext c, + final InternalScanner s) throws IOException { + // clean up any associated owner mapping + scannerOwners.remove(s); + } + + /** + * Verify, when servicing an RPC, that the caller is the scanner owner. If so, we assume that + * access control is correctly enforced based on the checks performed in preScannerOpen() + */ + private void requireScannerOwner(InternalScanner s) throws AccessDeniedException { + if (RequestContext.isInRequestContext()) { + String requestUName = RequestContext.getRequestUserName(); + String owner = scannerOwners.get(s); + if (owner != null && !owner.equals(requestUName)) { + throw new AccessDeniedException("User '" + requestUName + "' is not the scanner owner!"); + } + } + } + + @Override + public void preGetOp(ObserverContext e, Get get, List results) + throws IOException { + Authorizations authorizations = null; + try { + authorizations = get.getAuthorizations(); + } catch (DeserializationException de) { + throw new IOException(de); + } + Filter visibilityLabelFilter = createVisibilityLabelFilter(e.getEnvironment().getRegion(), + authorizations); + if (visibilityLabelFilter != null) { + Filter filter = get.getFilter(); + if (filter != null) { + get.setFilter(new FilterList(filter, visibilityLabelFilter)); + } else { + get.setFilter(visibilityLabelFilter); + } + } + } + + private Filter createVisibilityLabelFilter(HRegion region, Authorizations authorizations) { + if (authorizations == null) { + // No Authorizations present for this scan/Get! + // In case of "labels" table and user tables, create an empty auth set. In other system tables + // just scan with out visibility check and filtering. Checking visibility labels for META and + // NAMESPACE table is not needed. + TableName table = region.getRegionInfo().getTable(); + if (table.isSystemTable() && !table.equals(LABELS_TABLE_NAME)) { + return null; + } + return new VisibilityLabelFilter(new BitSet(0)); + } + Filter visibilityLabelFilter = null; + if (this.scanLabelGenerator != null) { + List labels = null; + try { + labels = this.scanLabelGenerator.getLabels(getActiveUser(), authorizations); + } catch (Throwable t) { + LOG.error(t); + } + int labelsCount = this.visibilityManager.getLabelsCount(); + BitSet bs = new BitSet(labelsCount + 1); // ordinal is index 1 based + if (labels != null) { + for (String label : labels) { + int labelOrdinal = this.visibilityManager.getLabelOrdinal(label); + if (labelOrdinal != 0) { + bs.set(labelOrdinal); + } + } + } + visibilityLabelFilter = new VisibilityLabelFilter(bs); + } + return visibilityLabelFilter; + } + + private User getActiveUser() throws IOException { + User user = RequestContext.getRequestUser(); + if (!RequestContext.isInRequestContext()) { + // for non-rpc handling, fallback to system user + user = User.getCurrent(); + } + return user; + } + + private List getSystemAndSuperUsers() throws IOException { + User user = User.getCurrent(); + if (user == null) { + throw new IOException("Unable to obtain the current user, " + + "authorization checks for internal operations will not work correctly!"); + } + String currentUser = user.getShortName(); + List superUsers = Lists.asList(currentUser, + this.conf.getStrings(AccessControlLists.SUPERUSER_CONF_KEY, new String[0])); + return superUsers; + } + + private boolean isSystemOrSuperUser() throws IOException { + List superUsers = getSystemAndSuperUsers(); + User activeUser = getActiveUser(); + return superUsers.contains(activeUser.getShortName()); + } + + @Override + public Cell postMutationBeforeWAL(ObserverContext ctx, + MutationType opType, Mutation mutation, Cell oldCell, Cell newCell) throws IOException { + List tags = Lists.newArrayList(); + CellVisibility cellVisibility = null; + try { + cellVisibility = mutation.getCellVisibility(); + } catch (DeserializationException e) { + throw new IOException(e); + } + if (cellVisibility == null) { + return newCell; + } + // Adding all other tags + Iterator tagsItr = CellUtil.tagsIterator(newCell.getTagsArray(), newCell.getTagsOffset(), + newCell.getTagsLength()); + while (tagsItr.hasNext()) { + Tag tag = tagsItr.next(); + if (tag.getType() != VisibilityUtils.VISIBILITY_TAG_TYPE) { + tags.add(tag); + } + } + try { + tags.addAll(createVisibilityTags(cellVisibility.getExpression())); + } catch (ParseException e) { + throw new IOException(e); + } + + // We need to create another KV, unfortunately, because the current new KV + // has no space for tags + KeyValue newKv = KeyValueUtil.ensureKeyValue(newCell); + byte[] bytes = newKv.getBuffer(); + KeyValue rewriteKv = new KeyValue(bytes, newKv.getRowOffset(), newKv.getRowLength(), bytes, + newKv.getFamilyOffset(), newKv.getFamilyLength(), bytes, newKv.getQualifierOffset(), + newKv.getQualifierLength(), newKv.getTimestamp(), KeyValue.Type.codeToType(newKv + .getTypeByte()), bytes, newKv.getValueOffset(), newKv.getValueLength(), tags); + // Preserve mvcc data + rewriteKv.setMvccVersion(newKv.getMvccVersion()); + return rewriteKv; + } + + @Override + public Service getService() { + return VisibilityLabelsProtos.VisibilityLabelsService.newReflectiveService(this); + } + + /****************************** VisibilityEndpoint service related methods ******************************/ + @Override + public synchronized void addLabels(RpcController controller, VisibilityLabelsRequest request, + RpcCallback done) { + VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder(); + List labels = request.getVisLabelList(); + try { + checkCallingUserAuth(); + List puts = new ArrayList(labels.size()); + RegionActionResult successResult = RegionActionResult.newBuilder().build(); + for (VisibilityLabel visLabel : labels) { + byte[] label = visLabel.getLabel().toByteArray(); + String labelStr = Bytes.toString(label); + if (VisibilityLabelsValidator.isValidLabel(label)) { + if (this.visibilityManager.getLabelOrdinal(labelStr) > 0) { + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new LabelAlreadyExistsException("Label '" + labelStr + + "' already exists"))); + response.addResult(failureResultBuilder.build()); + } else { + Put p = new Put(Bytes.toBytes(ordinalCounter)); + p.add(LABELS_TABLE_FAMILY, LABEL_QUALIFIER, label, LABELS_TABLE_TAGS); + puts.add(p); + ordinalCounter++; + response.addResult(successResult); + } + } else { + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new InvalidLabelException("Invalid visibility label '" + labelStr + + "'"))); + response.addResult(failureResultBuilder.build()); + } + } + OperationStatus[] opStatus = this.regionEnv.getRegion().batchMutate( + puts.toArray(new Mutation[puts.size()])); + int i = 0; + for (OperationStatus status : opStatus) { + if (status.getOperationStatusCode() != SUCCESS) { + while (response.getResult(i) != successResult) + i++; + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new DoNotRetryIOException(status.getExceptionMsg()))); + response.setResult(i, failureResultBuilder.build()); + } + i++; + } + } catch (IOException e) { + LOG.error(e); + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter.buildException(e)); + RegionActionResult failureResult = failureResultBuilder.build(); + for (int i = 0; i < labels.size(); i++) { + response.addResult(i, failureResult); + } + } + done.run(response.build()); + } + + private void performACLCheck() + throws IOException { + // Do ACL check only when the security is enabled. + if (this.acOn && !isSystemOrSuperUser()) { + User user = getActiveUser(); + throw new AccessDeniedException("User '" + (user != null ? user.getShortName() : "null") + + " is not authorized to perform this action."); + } + } + + private List> getExistingLabelsWithAuths() throws IOException { + Scan scan = new Scan(); + RegionScanner scanner = this.regionEnv.getRegion().getScanner(scan); + List> existingLabels = new ArrayList>(); + try { + while (true) { + List cells = new ArrayList(); + scanner.next(cells); + if (cells.isEmpty()) { + break; + } + existingLabels.add(cells); + } + } finally { + scanner.close(); + } + return existingLabels; + } + + @Override + public synchronized void setAuths(RpcController controller, SetAuthsRequest request, + RpcCallback done) { + VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder(); + List auths = request.getAuthList(); + byte[] user = request.getUser().toByteArray(); + try { + checkCallingUserAuth(); + List puts = new ArrayList(auths.size()); + RegionActionResult successResult = RegionActionResult.newBuilder().build(); + for (ByteString authBS : auths) { + byte[] auth = authBS.toByteArray(); + String authStr = Bytes.toString(auth); + int labelOrdinal = this.visibilityManager.getLabelOrdinal(authStr); + if (labelOrdinal == 0) { + // This label is not yet added. 1st this should be added to the system + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new InvalidLabelException("Label '" + authStr + "' doesn't exist"))); + response.addResult(failureResultBuilder.build()); + } else { + Put p = new Put(Bytes.toBytes(labelOrdinal)); + p.add(LABELS_TABLE_FAMILY, user, DUMMY_VALUE, LABELS_TABLE_TAGS); + puts.add(p); + response.addResult(successResult); + } + } + OperationStatus[] opStatus = this.regionEnv.getRegion().batchMutate( + puts.toArray(new Mutation[puts.size()])); + int i = 0; + for (OperationStatus status : opStatus) { + if (status.getOperationStatusCode() != SUCCESS) { + while (response.getResult(i) != successResult) i++; + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new DoNotRetryIOException(status.getExceptionMsg()))); + response.setResult(i, failureResultBuilder.build()); + } + i++; + } + } catch (IOException e) { + LOG.error(e); + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter.buildException(e)); + RegionActionResult failureResult = failureResultBuilder.build(); + for (int i = 0; i < auths.size(); i++) { + response.addResult(i, failureResult); + } + } + done.run(response.build()); + } + + @Override + public synchronized void getAuths(RpcController controller, GetAuthsRequest request, + RpcCallback done) { + byte[] user = request.getUser().toByteArray(); + GetAuthsResponse.Builder response = GetAuthsResponse.newBuilder(); + response.setUser(request.getUser()); + + Scan s = new Scan(); + s.addColumn(LABELS_TABLE_FAMILY, user); + Filter filter = createVisibilityLabelFilter(this.regionEnv.getRegion(), new Authorizations( + SYSTEM_LABEL)); + s.setFilter(filter); + try { + // We do ACL check here as we create scanner directly on region. It will not make calls to + // AccessController CP methods. + performACLCheck(); + RegionScanner scanner = this.regionEnv.getRegion().getScanner(s); + List results = new ArrayList(1); + while (true) { + scanner.next(results); + if (results.isEmpty()) break; + Cell cell = results.get(0); + int ordinal = Bytes.toInt(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); + String label = this.visibilityManager.getLabel(ordinal); + if (label != null) { + response.addAuth(ByteString.copyFrom(Bytes.toBytes(label))); + } + results.clear(); + } + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + } + done.run(response.build()); + } + + @Override + public synchronized void clearAuths(RpcController controller, SetAuthsRequest request, + RpcCallback done) { + VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder(); + List auths = request.getAuthList(); + byte[] user = request.getUser().toByteArray(); + try { + checkCallingUserAuth(); + List currentAuths = this.visibilityManager.getAuths(Bytes.toString(user)); + List deletes = new ArrayList(auths.size()); + RegionActionResult successResult = RegionActionResult.newBuilder().build(); + for (ByteString authBS : auths) { + byte[] auth = authBS.toByteArray(); + String authStr = Bytes.toString(auth); + if (currentAuths.contains(authStr)) { + int labelOrdinal = this.visibilityManager.getLabelOrdinal(authStr); + assert labelOrdinal > 0; + Delete d = new Delete(Bytes.toBytes(labelOrdinal)); + d.deleteColumns(LABELS_TABLE_FAMILY, user); + deletes.add(d); + response.addResult(successResult); + } else { + // This label is not set for the user. + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new InvalidLabelException("Label '" + authStr + + "' is not set for the user " + Bytes.toString(user)))); + response.addResult(failureResultBuilder.build()); + } + } + OperationStatus[] opStatus = this.regionEnv.getRegion().batchMutate( + deletes.toArray(new Mutation[deletes.size()])); + int i = 0; + for (OperationStatus status : opStatus) { + if (status.getOperationStatusCode() != SUCCESS) { + while (response.getResult(i) != successResult) i++; + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new DoNotRetryIOException(status.getExceptionMsg()))); + response.setResult(i, failureResultBuilder.build()); + } + i++; + } + } catch (IOException e) { + LOG.error(e); + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter.buildException(e)); + RegionActionResult failureResult = failureResultBuilder.build(); + for (int i = 0; i < auths.size(); i++) { + response.addResult(i, failureResult); + } + } + done.run(response.build()); + } + + private void checkCallingUserAuth() throws IOException { + if (!this.acOn) { + User user = getActiveUser(); + if (user == null) { + throw new IOException("Unable to retrieve calling user"); + } + List auths = this.visibilityManager.getAuths(user.getShortName()); + if (!auths.contains(SYSTEM_LABEL)) { + throw new AccessDeniedException("User '" + user.getShortName() + + "' is not authorized to perform this action."); + } + } + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java (working copy) @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; +import java.util.BitSet; +import java.util.Iterator; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.filter.FilterBase; +import org.apache.hadoop.hbase.io.util.StreamUtils; +import org.apache.hadoop.hbase.util.Pair; + +/** + * This Filter checks the visibility expression with each KV against visibility labels associated + * with the scan. Based on the check the KV is included in the scan result or gets filtered out. + */ +@InterfaceAudience.Private +class VisibilityLabelFilter extends FilterBase { + + private BitSet authLabels; + + public VisibilityLabelFilter(BitSet authLabels) { + this.authLabels = authLabels; + } + + @Override + public ReturnCode filterKeyValue(Cell cell) throws IOException { + Iterator tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); + while (tagsItr.hasNext()) { + boolean includeKV = true; + Tag tag = tagsItr.next(); + if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) { + int offset = tag.getTagOffset(); + int endOffset = offset + tag.getTagLength(); + while (offset < endOffset) { + Pair result = StreamUtils.readRawVarint32(tag.getBuffer(), offset); + int currLabelOrdinal = result.getFirst(); + if (currLabelOrdinal < 0) { + // check for the absence of this label in the Scan Auth labels + // ie. to check BitSet corresponding bit is 0 + int temp = -currLabelOrdinal; + if (this.authLabels.get(temp)) { + includeKV = false; + break; + } + } else { + if (!this.authLabels.get(currLabelOrdinal)) { + includeKV = false; + break; + } + } + offset += result.getSecond(); + } + if (includeKV) { + // We got one visibility expression getting evaluated to true. Good to include this KV in + // the result then. + return ReturnCode.INCLUDE; + } + return ReturnCode.SKIP; + } + } + return ReturnCode.INCLUDE; + } +} \ No newline at end of file Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java (working copy) @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.zookeeper.KeeperException; + +/** + * Maintains the cache for visibility labels and also uses the zookeeper to update the labels in the + * system. The cache updation happens based on the data change event that happens on the zookeeper + * znode for labels table + */ +@InterfaceAudience.Private +public class VisibilityLabelsManager { + + private static final Log LOG = LogFactory.getLog(VisibilityLabelsManager.class); + private static final List EMPTY_LIST = new ArrayList(0); + private static VisibilityLabelsManager instance; + + private ZKVisibilityLabelWatcher zkVisibilityWatcher; + private Map labels = new HashMap(); + private Map ordinalVsLabels = new HashMap(); + private Map> userAuths = new HashMap>(); + private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + + private VisibilityLabelsManager(ZooKeeperWatcher watcher, Configuration conf) throws IOException { + zkVisibilityWatcher = new ZKVisibilityLabelWatcher(watcher, this, conf); + try { + zkVisibilityWatcher.start(); + } catch (KeeperException ke) { + LOG.error("ZooKeeper initialization failed", ke); + throw new IOException(ke); + } + } + + public synchronized static VisibilityLabelsManager get(ZooKeeperWatcher watcher, + Configuration conf) throws IOException { + if (instance == null) { + instance = new VisibilityLabelsManager(watcher, conf); + } + return instance; + } + + public static VisibilityLabelsManager get() { + return instance; + } + + public void refreshLabelsCache(byte[] data) throws IOException { + List visibilityLabels = null; + try { + visibilityLabels = VisibilityUtils.readLabelsFromZKData(data); + } catch (DeserializationException dse) { + throw new IOException(dse); + } + this.lock.writeLock().lock(); + try { + for (VisibilityLabel visLabel : visibilityLabels) { + String label = Bytes.toString(visLabel.getLabel().toByteArray()); + labels.put(label, visLabel.getOrdinal()); + ordinalVsLabels.put(visLabel.getOrdinal(), label); + } + } finally { + this.lock.writeLock().unlock(); + } + } + + public void refreshUserAuthsCache(byte[] data) throws IOException { + MultiUserAuthorizations multiUserAuths = null; + try { + multiUserAuths = VisibilityUtils.readUserAuthsFromZKData(data); + } catch (DeserializationException dse) { + throw new IOException(dse); + } + this.lock.writeLock().lock(); + try { + for (UserAuthorizations userAuths : multiUserAuths.getUserAuthsList()) { + String user = Bytes.toString(userAuths.getUser().toByteArray()); + this.userAuths.put(user, new HashSet(userAuths.getAuthList())); + } + } finally { + this.lock.writeLock().unlock(); + } + } + + /** + * @param label + * @return The ordinal for the label. The ordinal starts from 1. Returns 0 when the passed a non + * existing label. + */ + public int getLabelOrdinal(String label) { + Integer ordinal = null; + this.lock.readLock().lock(); + try { + ordinal = labels.get(label); + } finally { + this.lock.readLock().unlock(); + } + if (ordinal != null) { + return ordinal.intValue(); + } + // 0 denotes not available + return 0; + } + + public String getLabel(int ordinal) { + this.lock.readLock().lock(); + try { + return this.ordinalVsLabels.get(ordinal); + } finally { + this.lock.readLock().unlock(); + } + } + + /** + * @return The total number of visibility labels. + */ + public int getLabelsCount(){ + return this.labels.size(); + } + + /** + * @param user + * @return The labels that the given user is authorized for. + */ + public List getAuths(String user) { + List auths = EMPTY_LIST; + this.lock.readLock().lock(); + try { + Set authOrdinals = userAuths.get(user); + if (authOrdinals != null) { + auths = new ArrayList(authOrdinals.size()); + for (Integer authOrdinal : authOrdinals) { + auths.add(ordinalVsLabels.get(authOrdinal)); + } + } + } finally { + this.lock.readLock().unlock(); + } + return auths; + } + + /** + * Writes the labels data to zookeeper node. + * @param data + * @param labelsOrUserAuths true for writing labels and false for user auths. + */ + public void writeToZookeeper(byte[] data, boolean labelsOrUserAuths) { + this.zkVisibilityWatcher.writeToZookeeper(data, labelsOrUserAuths); + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java (working copy) @@ -0,0 +1,133 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.util.ReflectionUtils; + +import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; + +/** + * Utility method to support visibility + */ +@InterfaceAudience.Private +public class VisibilityUtils { + + public static final String VISIBILITY_LABEL_GENERATOR_CLASS = + "hbase.regionserver.scan.visibility.label.generator.class"; + public static final byte VISIBILITY_TAG_TYPE = (byte) 2; + public static final String SYSTEM_LABEL = "system"; + + /** + * Creates the labels data to be written to zookeeper. + * @param existingLabels + * @return Bytes form of labels and their ordinal details to be written to zookeeper. + */ + public static byte[] getDataToWriteToZooKeeper(Map existingLabels) { + VisibilityLabelsRequest.Builder visReqBuilder = VisibilityLabelsRequest.newBuilder(); + for (Entry entry : existingLabels.entrySet()) { + VisibilityLabel.Builder visLabBuilder = VisibilityLabel.newBuilder(); + visLabBuilder.setLabel(ByteString.copyFrom(Bytes.toBytes(entry.getKey()))); + visLabBuilder.setOrdinal(entry.getValue()); + visReqBuilder.addVisLabel(visLabBuilder.build()); + } + return ProtobufUtil.prependPBMagic(visReqBuilder.build().toByteArray()); + } + + /** + * Creates the user auth data to be written to zookeeper. + * @param userAuths + * @return Bytes form of user auths details to be written to zookeeper. + */ + public static byte[] getUserAuthsDataToWriteToZooKeeper(Map> userAuths) { + MultiUserAuthorizations.Builder builder = MultiUserAuthorizations.newBuilder(); + for (Entry> entry : userAuths.entrySet()) { + UserAuthorizations.Builder userAuthsBuilder = UserAuthorizations.newBuilder(); + userAuthsBuilder.setUser(ByteString.copyFrom(Bytes.toBytes(entry.getKey()))); + for (Integer label : entry.getValue()) { + userAuthsBuilder.addAuth(label); + } + builder.addUserAuths(userAuthsBuilder.build()); + } + return ProtobufUtil.prependPBMagic(builder.build().toByteArray()); + } + + /** + * Reads back from the zookeeper. The data read here is of the form written by + * writeToZooKeeper(Map entries). + * + * @param data + * @return Labels and their ordinal details + * @throws DeserializationException + */ + public static List readLabelsFromZKData(byte[] data) + throws DeserializationException { + if (ProtobufUtil.isPBMagicPrefix(data)) { + int pblen = ProtobufUtil.lengthOfPBMagic(); + try { + VisibilityLabelsRequest request = VisibilityLabelsRequest.newBuilder() + .mergeFrom(data, pblen, data.length - pblen).build(); + return request.getVisLabelList(); + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + } + return null; + } + + /** + * Reads back User auth data written to zookeeper. + * @param data + * @return User auth details + * @throws DeserializationException + */ + public static MultiUserAuthorizations readUserAuthsFromZKData(byte[] data) + throws DeserializationException { + if (ProtobufUtil.isPBMagicPrefix(data)) { + int pblen = ProtobufUtil.lengthOfPBMagic(); + try { + MultiUserAuthorizations multiUserAuths = MultiUserAuthorizations.newBuilder() + .mergeFrom(data, pblen, data.length - pblen).build(); + return multiUserAuths; + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + } + return null; + } + + public static ScanLabelGenerator getScanLabelGenerator(Configuration conf) { + Class scanLabelGeneratorKlass = conf + .getClass(VISIBILITY_LABEL_GENERATOR_CLASS, DefaultScanLabelGenerator.class, + ScanLabelGenerator.class); + return ReflectionUtils.newInstance(scanLabelGeneratorKlass, conf); + } +} Index: hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java (revision 0) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java (working copy) @@ -0,0 +1,143 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.zookeeper.ZKUtil; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.zookeeper.KeeperException; + +/** + * A zk watcher that watches the labels table znode. This would create a znode + * /hbase/visibility_labels and will have a serialized form of a set of labels in the system. + */ +@InterfaceAudience.Private +public class ZKVisibilityLabelWatcher extends ZooKeeperListener { + + private static final Log LOG = LogFactory.getLog(ZKVisibilityLabelWatcher.class); + private static final String VISIBILITY_LABEL_ZK_PATH = "zookeeper.znode.visibility.label.parent"; + private static final String DEFAULT_VISIBILITY_LABEL_NODE = "visibility/labels"; + private static final String VISIBILITY_USER_AUTHS_ZK_PATH = + "zookeeper.znode.visibility.user.auths.parent"; + private static final String DEFAULT_VISIBILITY_USER_AUTHS_NODE = "visibility/user_auths"; + + private VisibilityLabelsManager labelsManager; + private String labelZnode; + private String userAuthsZnode; + + public ZKVisibilityLabelWatcher(ZooKeeperWatcher watcher, VisibilityLabelsManager labelsManager, + Configuration conf) { + super(watcher); + this.labelsManager = labelsManager; + String labelZnodeParent = conf.get(VISIBILITY_LABEL_ZK_PATH, DEFAULT_VISIBILITY_LABEL_NODE); + String userAuthsZnodeParent = conf.get(VISIBILITY_USER_AUTHS_ZK_PATH, + DEFAULT_VISIBILITY_USER_AUTHS_NODE); + this.labelZnode = ZKUtil.joinZNode(watcher.baseZNode, labelZnodeParent); + this.userAuthsZnode = ZKUtil.joinZNode(watcher.baseZNode, userAuthsZnodeParent); + } + + public void start() throws KeeperException { + watcher.registerListener(this); + ZKUtil.watchAndCheckExists(watcher, labelZnode); + ZKUtil.watchAndCheckExists(watcher, userAuthsZnode); + } + + private void refreshVisibilityLabelsCache(byte[] data) { + try { + this.labelsManager.refreshLabelsCache(data); + } catch (IOException ioe) { + LOG.error("Failed parsing data from labels table " + " from zk", ioe); + } + } + + private void refreshUserAuthsCache(byte[] data) { + try { + this.labelsManager.refreshUserAuthsCache(data); + } catch (IOException ioe) { + LOG.error("Failed parsing data from labels table " + " from zk", ioe); + } + } + + @Override + public void nodeCreated(String path) { + if (path.equals(labelZnode) || path.equals(userAuthsZnode)) { + try { + ZKUtil.watchAndCheckExists(watcher, path); + } catch (KeeperException ke) { + LOG.error("Error setting watcher on node " + path, ke); + // only option is to abort + watcher.abort("Zookeeper error obtaining label node children", ke); + } + } + } + + @Override + public void nodeDeleted(String path) { + // There is no case of visibility labels path to get deleted. + } + + @Override + public void nodeDataChanged(String path) { + if (path.equals(labelZnode) || path.equals(userAuthsZnode)) { + try { + watcher.sync(path); + byte[] data = ZKUtil.getDataAndWatch(watcher, path); + if (path.equals(labelZnode)) { + refreshVisibilityLabelsCache(data); + } else { + refreshUserAuthsCache(data); + } + } catch (KeeperException ke) { + LOG.error("Error reading data from zookeeper for node " + path, ke); + // only option is to abort + watcher.abort("Zookeeper error getting data for node " + path, ke); + } + } + } + + @Override + public void nodeChildrenChanged(String path) { + // We are not dealing with child nodes under the label znode or userauths znode. + } + + /** + * Write a labels mirror or user auths mirror into zookeeper + * + * @param data + * @param labelsOrUserAuths true for writing labels and false for user auths. + */ + public void writeToZookeeper(byte[] data, boolean labelsOrUserAuths) { + String znode = this.labelZnode; + if (!labelsOrUserAuths) { + znode = this.userAuthsZnode; + } + try { + ZKUtil.createWithParents(watcher, znode); + ZKUtil.updateExistingNodeData(watcher, znode, data, -1); + } catch (KeeperException e) { + LOG.error("Failed writing to " + znode, e); + watcher.abort("Failed writing node " + znode + " to zookeeper", e); + } + } +} Index: hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java (revision 0) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java (working copy) @@ -0,0 +1,393 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestExpressionExpander { + + @Test + public void testPositiveCases() throws Exception { + ExpressionExpander expander = new ExpressionExpander(); + + // (!a) -> (!a) + NonLeafExpressionNode exp1 = new NonLeafExpressionNode(Operator.NOT, + new LeafExpressionNode("a")); + ExpressionNode result = expander.expand(exp1); + assertTrue(result instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.NOT, nlResult.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + + // (a | b) -> (a | b) + NonLeafExpressionNode exp2 = new NonLeafExpressionNode(Operator.OR, + new LeafExpressionNode("a"), new LeafExpressionNode("b")); + result = expander.expand(exp2); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // (a & b) -> (a & b) + NonLeafExpressionNode exp3 = new NonLeafExpressionNode(Operator.AND, + new LeafExpressionNode("a"), new LeafExpressionNode("b")); + result = expander.expand(exp3); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // ((a | b) | c) -> (a | b | c) + NonLeafExpressionNode exp4 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode( + Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp4); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + + // ((a & b) & c) -> (a & b & c) + NonLeafExpressionNode exp5 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode( + Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp5); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + + // (a | b) & c -> ((a & c) | (b & c)) + NonLeafExpressionNode exp6 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode( + Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp6); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + NonLeafExpressionNode temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a & b) | c -> ((a & b) | c) + NonLeafExpressionNode exp7 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode( + Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp7); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + nlResult = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // ((a & b) | c) & d -> (((a & b) & d) | (c & d)) + NonLeafExpressionNode exp8 = new NonLeafExpressionNode(Operator.AND); + exp8.addChildExp(new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode(Operator.AND, + new LeafExpressionNode("a"), new LeafExpressionNode("b")), new LeafExpressionNode("c"))); + exp8.addChildExp(new LeafExpressionNode("d")); + result = expander.expand(exp8); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a | b) | (c | d) -> (a | b | c | d) + NonLeafExpressionNode exp9 = new NonLeafExpressionNode(Operator.OR); + exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp9); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier()); + + // (a & b) & (c & d) -> (a & b & c & d) + NonLeafExpressionNode exp10 = new NonLeafExpressionNode(Operator.AND); + exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp10); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier()); + + // (a | b) & (c | d) -> ((a & c) | (a & d) | (b & c) | (b & d)) + NonLeafExpressionNode exp11 = new NonLeafExpressionNode(Operator.AND); + exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp11); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (((a | b) | c) | d) & e -> ((a & e) | (b & e) | (c & e) | (d & e)) + NonLeafExpressionNode exp12 = new NonLeafExpressionNode(Operator.AND); + NonLeafExpressionNode tempExp1 = new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode( + "a"), new LeafExpressionNode("b")); + NonLeafExpressionNode tempExp2 = new NonLeafExpressionNode(Operator.OR, tempExp1, + new LeafExpressionNode("c")); + NonLeafExpressionNode tempExp3 = new NonLeafExpressionNode(Operator.OR, tempExp2, + new LeafExpressionNode("d")); + exp12.addChildExp(tempExp3); + exp12.addChildExp(new LeafExpressionNode("e")); + result = expander.expand(exp12); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a | b | c) & d -> ((a & d) | (b & d) | (c & d)) + NonLeafExpressionNode exp13 = new NonLeafExpressionNode(Operator.AND, + new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode( + "b"), new LeafExpressionNode("c")), new LeafExpressionNode("d")); + result = expander.expand(exp13); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // ((a | b) & (c | d)) & (e | f) -> (((a & c) & e) | ((a & c) & f) | ((a & d) & e) | ((a & d) & + // f) | ((b & c) & e) | ((b & c) & f) | ((b & d) & e) | ((b & d) & f)) + NonLeafExpressionNode exp15 = new NonLeafExpressionNode(Operator.AND); + NonLeafExpressionNode temp1 = new NonLeafExpressionNode(Operator.AND); + temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + exp15.addChildExp(temp1); + exp15.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("e"), + new LeafExpressionNode("f"))); + result = expander.expand(exp15); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(8, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(4); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(5); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(6); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(7); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // !(a | b) -> ((!a) & (!b)) + NonLeafExpressionNode exp16 = new NonLeafExpressionNode(Operator.NOT, + new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode( + "b"))); + result = expander.expand(exp16); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.NOT, temp.getOperator()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.NOT, temp.getOperator()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + } +} Index: hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java (revision 0) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java (working copy) @@ -0,0 +1,318 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestExpressionParser { + + private ExpressionParser parser = new ExpressionParser(); + + @Test + public void testPositiveCases() throws Exception { + // abc -> (abc) + ExpressionNode node = parser.parse("abc"); + assertTrue(node instanceof LeafExpressionNode); + assertEquals("abc", ((LeafExpressionNode) node).getIdentifier()); + + // a&b|c&d -> (((a & b) | c) & ) + node = parser.parse("a&b|c&d"); + assertTrue(node instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // (a) -> (a) + node = parser.parse("(a)"); + assertTrue(node instanceof LeafExpressionNode); + assertEquals("a", ((LeafExpressionNode) node).getIdentifier()); + + // (a&b) -> (a & b) + node = parser.parse(" ( a & b )"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // ((((a&b)))) -> (a & b) + node = parser.parse("((((a&b))))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a|b)&(cc|def) -> ((a | b) & (cc | def)) + node = parser.parse("( a | b ) & (cc|def)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + NonLeafExpressionNode nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNodeLeft.getOperator()); + assertEquals(2, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(1)).getIdentifier()); + assertEquals(Operator.OR, nlNodeRight.getOperator()); + assertEquals(2, nlNodeRight.getChildExps().size()); + assertEquals("cc", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + assertEquals("def", ((LeafExpressionNode) nlNodeRight.getChildExps().get(1)).getIdentifier()); + + // a&(cc|de) -> (a & (cc | de)) + node = parser.parse("a&(cc|de)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("cc", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("de", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a&b)|c -> ((a & b) | c) + node = parser.parse("(a&b)|c"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a&b&c)|d -> (((a & b) & c) | d) + node = parser.parse("(a&b&c)|d"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // a&(b|(c|d)) -> (a & (b | (c | d))) + node = parser.parse("a&(b|(c|d))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (!a) -> (!a) + node = parser.parse("(!a)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // a&(!b) -> (a & (!b)) + node = parser.parse("a&(!b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !a&b -> ((!a) & b) + node = parser.parse("!a&b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !a&(!b) -> ((!a) & (!b)) + node = parser.parse("!a&(!b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeLeft.getOperator()); + assertEquals(1, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + + // !a&!b -> ((!a) & (!b)) + node = parser.parse("!a&!b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeLeft.getOperator()); + assertEquals(1, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + + // !(a&b) -> (!(a & b)) + node = parser.parse("!(a&b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // a&!b -> (a & (!b)) + node = parser.parse("a&!b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !((a|b)&!(c&!b)) -> (!((a | b) & (!(c & (!b))))) + node = parser.parse("!((a | b) & !(c & !b))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNodeLeft.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(1)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + nlNodeRight = (NonLeafExpressionNode) nlNodeRight.getChildExps().get(0); + assertEquals(Operator.AND, nlNodeRight.getOperator()); + assertEquals(2, nlNodeRight.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNodeRight.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeRight = (NonLeafExpressionNode) nlNodeRight.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + } + + @Test + public void testNegativeCases() throws Exception { + executeNegativeCase("("); + executeNegativeCase(")"); + executeNegativeCase("()"); + executeNegativeCase("(a"); + executeNegativeCase("a&"); + executeNegativeCase("a&|b"); + executeNegativeCase("!"); + executeNegativeCase("a!"); + executeNegativeCase("a!&"); + executeNegativeCase("&"); + executeNegativeCase("|"); + executeNegativeCase("!(a|(b&c)&!b"); + executeNegativeCase("!!a"); + executeNegativeCase("( a & b ) | ( c & d e)"); + executeNegativeCase("! a"); + } + + private void executeNegativeCase(String exp) { + try { + parser.parse(exp); + fail("Expected ParseException for expression " + exp); + } catch (ParseException e) { + } + } +} Index: hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java (revision 0) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java (working copy) @@ -0,0 +1,675 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABEL_QUALIFIER; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.HRegionServer; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; + +import com.google.protobuf.ByteString; + +/** + * Test class that tests the visibility labels + */ +@Category(MediumTests.class) +public class TestVisibilityLabels { + + private static final String TOPSECRET = "topsecret"; + private static final String PUBLIC = "public"; + private static final String PRIVATE = "private"; + private static final String CONFIDENTIAL = "confidential"; + private static final String SECRET = "secret"; + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final byte[] row1 = Bytes.toBytes("row1"); + private static final byte[] row2 = Bytes.toBytes("row2"); + private static final byte[] row3 = Bytes.toBytes("row3"); + private static final byte[] row4 = Bytes.toBytes("row4"); + private final static byte[] fam = Bytes.toBytes("info"); + private final static byte[] qual = Bytes.toBytes("qual"); + private final static byte[] value = Bytes.toBytes("value"); + private static Configuration conf; + + private volatile boolean killedRS = false; + @Rule + public final TestName TEST_NAME = new TestName(); + private static User SUPERUSER; + + @BeforeClass + public static void setupBeforeClass() throws Exception { + // setup configuration + conf = TEST_UTIL.getConfiguration(); + conf.setInt("hfile.format.version", 3); + conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName()); + conf.setClass(VisibilityUtils.VISIBILITY_LABEL_GENERATOR_CLASS, SimpleScanLabelGenerator.class, + ScanLabelGenerator.class); + String currentUser = User.getCurrent().getName(); + conf.set("hbase.superuser", "admin,"+currentUser); + TEST_UTIL.startMiniCluster(2); + SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" }); + + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + addLabels(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @After + public void tearDown() throws Exception { + killedRS = false; + } + + @Test + public void testSimpleVisibilityLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "|" + CONFIDENTIAL, + PRIVATE + "|" + CONFIDENTIAL); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL, PRIVATE)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + + assertTrue(next.length == 2); + CellScanner cellScanner = next[0].cellScanner(); + cellScanner.advance(); + Cell current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row1, 0, row1.length)); + cellScanner = next[1].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row2, 0, row2.length)); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsWithComplexLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")" + "&" + "!" + TOPSECRET, "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + PRIVATE + "&" + CONFIDENTIAL + + "&" + SECRET + ")"); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(TOPSECRET, CONFIDENTIAL, PRIVATE, PUBLIC, SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(4); + assertEquals(3, next.length); + CellScanner cellScanner = next[0].cellScanner(); + cellScanner.advance(); + Cell current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row2, 0, row2.length)); + cellScanner = next[1].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row3, 0, row3.length)); + cellScanner = next[2].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row4, 0, row4.length)); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsThatDoesNotPassTheCriteria() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(PUBLIC)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 0); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsInPutsThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + try { + createTableAndWriteDataWithLabels(tableName, "SAMPLE_LABEL", "TEST"); + fail("Should have failed with failed sanity check exception"); + } catch (Exception e) { + } + } + + @Test + public void testVisibilityLabelsInScanThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations("SAMPLE")); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 0); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsWithGet() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + + PRIVATE, SECRET + "&" + CONFIDENTIAL + "&" + PRIVATE); + try { + Get get = new Get(row1); + get.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); + Result result = table.get(get); + assertTrue(!result.isEmpty()); + Cell cell = result.getColumnLatestCell(fam, qual); + assertTrue(Bytes.equals(value, 0, value.length, cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength())); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsOnKillingOfRSContainingLabelsTable() throws Exception { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + int liveRS = 0; + for (RegionServerThread rsThreads : regionServerThreads) { + if (!rsThreads.getRegionServer().isAborted()) { + liveRS++; + } + } + if (liveRS == 1) { + TEST_UTIL.getHBaseCluster().startRegionServer(); + } + Thread t1 = new Thread() { + public void run() { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + List onlineRegions = rsThread.getRegionServer().getOnlineRegions( + LABELS_TABLE_NAME); + if (onlineRegions.size() > 0) { + rsThread.getRegionServer().abort("Aborting "); + killedRS = true; + break; + } + } + } + + }; + t1.start(); + final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + Thread t = new Thread() { + public void run() { + try { + while (!killedRS) { + Thread.sleep(1); + } + createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", + PRIVATE); + } catch (Exception e) { + } + } + }; + t.start(); + regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); + while (!killedRS) { + Thread.sleep(10); + } + regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + while (true) { + if (!rsThread.getRegionServer().isAborted()) { + List onlineRegions = rsThread.getRegionServer().getOnlineRegions( + LABELS_TABLE_NAME); + if (onlineRegions.size() > 0) { + break; + } else { + Thread.sleep(10); + } + } else { + break; + } + } + } + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + t.join(); + HTable table = null; + try { + table = new HTable(TEST_UTIL.getConfiguration(), tableName); + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 1); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsOnRSRestart() throws Exception { + final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + rsThread.getRegionServer().abort("Aborting "); + } + // Start one new RS + RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); + HRegionServer regionServer = rs.getRegionServer(); + while (!regionServer.isOnline()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + } + } + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 1); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testAddVisibilityLabelsOnRSRestart() throws Exception { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + rsThread.getRegionServer().abort("Aborting "); + } + // Start one new RS + RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); + HRegionServer regionServer = rs.getRegionServer(); + while (!regionServer.isOnline()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + } + } + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, "ABC", "XYZ" }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + // Scan the visibility label + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL)); + HTable ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + int i = 0; + try { + ResultScanner scanner = ht.getScanner(s); + while (true) { + Result next = scanner.next(); + if (next == null) { + break; + } + i++; + } + } finally { + if (ht != null) { + ht.close(); + } + } + // One label is the "system" label. + Assert.assertEquals("The count should be 8", 8, i); + } + + @Test + public void testVisibilityLabelsInGetThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Get get = new Get(row1); + get.setAuthorizations(new Authorizations("SAMPLE")); + Result result = table.get(get); + assertTrue(result.isEmpty()); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testAddLabels() throws Throwable { + String[] labels = { "L1", SECRET, "L2", "invalid~", "L3" }; + VisibilityLabelsResponse response = VisibilityClient.addLabels(conf, labels); + List resultList = response.getResultList(); + assertEquals(5, resultList.size()); + assertTrue(resultList.get(0).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.LabelAlreadyExistsException", + resultList.get(1).getException().getName()); + assertTrue(resultList.get(2).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.InvalidLabelException", resultList + .get(3).getException().getName()); + assertTrue(resultList.get(4).getException().getValue().isEmpty()); + } + + @Test + public void testSetAndGetUserAuths() throws Throwable { + String[] auths = { SECRET, CONFIDENTIAL }; + String user = "user1"; + VisibilityClient.setAuths(conf, auths, user); + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME); + ResultScanner scanner = ht.getScanner(new Scan()); + Result result = null; + while ((result = scanner.next()) != null) { + Cell label = result.getColumnLatestCell(LABELS_TABLE_FAMILY, LABEL_QUALIFIER); + Cell userAuth = result.getColumnLatestCell(LABELS_TABLE_FAMILY, user.getBytes()); + if (Bytes.equals(SECRET.getBytes(), 0, SECRET.getBytes().length, label.getValueArray(), + label.getValueOffset(), label.getValueLength()) + || Bytes.equals(CONFIDENTIAL.getBytes(), 0, CONFIDENTIAL.getBytes().length, + label.getValueArray(), label.getValueOffset(), label.getValueLength())) { + assertNotNull(userAuth); + } else { + assertNull(userAuth); + } + } + } finally { + if (ht != null) { + ht.close(); + } + } + GetAuthsResponse authsResponse = VisibilityClient.getAuths(conf, user); + List authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(2, authsList.size()); + assertTrue(authsList.contains(SECRET)); + assertTrue(authsList.contains(CONFIDENTIAL)); + + // Try doing setAuths once again and there should not be any duplicates + String[] auths1 = { SECRET, CONFIDENTIAL }; + user = "user1"; + VisibilityClient.setAuths(conf, auths1, user); + + authsResponse = VisibilityClient.getAuths(conf, user); + authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(2, authsList.size()); + assertTrue(authsList.contains(SECRET)); + assertTrue(authsList.contains(CONFIDENTIAL)); + } + + @Test + public void testClearUserAuths() throws Throwable { + String[] auths = { SECRET, CONFIDENTIAL, PRIVATE }; + String user = "testUser"; + VisibilityClient.setAuths(conf, auths, user); + // Removing the auths for SECRET and CONFIDENTIAL for the user. + // Passing a non existing auth also. + auths = new String[] { SECRET, PUBLIC, CONFIDENTIAL }; + VisibilityLabelsResponse response = VisibilityClient.clearAuths(conf, auths, user); + List resultList = response.getResultList(); + assertEquals(3, resultList.size()); + assertTrue(resultList.get(0).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.InvalidLabelException", + resultList.get(1).getException().getName()); + assertTrue(resultList.get(2).getException().getValue().isEmpty()); + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME); + ResultScanner scanner = ht.getScanner(new Scan()); + Result result = null; + while ((result = scanner.next()) != null) { + Cell label = result.getColumnLatestCell(LABELS_TABLE_FAMILY, LABEL_QUALIFIER); + Cell userAuth = result.getColumnLatestCell(LABELS_TABLE_FAMILY, user.getBytes()); + if (Bytes.equals(PRIVATE.getBytes(), 0, PRIVATE.getBytes().length, label.getValueArray(), + label.getValueOffset(), label.getValueLength())) { + assertNotNull(userAuth); + } else { + assertNull(userAuth); + } + } + } finally { + if (ht != null) { + ht.close(); + } + } + + GetAuthsResponse authsResponse = VisibilityClient.getAuths(conf, user); + List authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(1, authsList.size()); + assertTrue(authsList.contains(PRIVATE)); + } + + @Test + public void testLablesWithCheckAndPut() throws Throwable { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + byte[] row1 = Bytes.toBytes("row1"); + Put put = new Put(row1); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value); + put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL)); + table.checkAndPut(row1, fam, qual, null, put); + byte[] row2 = Bytes.toBytes("row2"); + put = new Put(row2); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value); + put.setCellVisibility(new CellVisibility(SECRET)); + table.checkAndPut(row2, fam, qual, null, put); + + Scan scan = new Scan(); + scan.setAuthorizations(new Authorizations(SECRET)); + ResultScanner scanner = table.getScanner(scan); + Result result = scanner.next(); + assertTrue(!result.isEmpty()); + assertTrue(Bytes.equals(row2, result.getRow())); + result = scanner.next(); + assertNull(result); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testLablesWithIncrement() throws Throwable { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + byte[] row1 = Bytes.toBytes("row1"); + byte[] val = Bytes.toBytes(1L); + Put put = new Put(row1); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, val); + put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL)); + table.put(put); + Get get = new Get(row1); + get.setAuthorizations(new Authorizations(SECRET)); + Result result = table.get(get); + assertTrue(result.isEmpty()); + table.incrementColumnValue(row1, fam, qual, 2L); + result = table.get(get); + assertTrue(result.isEmpty()); + Increment increment = new Increment(row1); + increment.addColumn(fam, qual, 2L); + increment.setCellVisibility(new CellVisibility(SECRET)); + table.increment(increment); + result = table.get(get); + assertTrue(!result.isEmpty()); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testLablesWithAppend() throws Throwable { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + byte[] row1 = Bytes.toBytes("row1"); + byte[] val = Bytes.toBytes("a"); + Put put = new Put(row1); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, val); + put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL)); + table.put(put); + Get get = new Get(row1); + get.setAuthorizations(new Authorizations(SECRET)); + Result result = table.get(get); + assertTrue(result.isEmpty()); + Append append = new Append(row1); + append.add(fam, qual, Bytes.toBytes("b")); + table.append(append); + result = table.get(get); + assertTrue(result.isEmpty()); + append = new Append(row1); + append.add(fam, qual, Bytes.toBytes("c")); + append.setCellVisibility(new CellVisibility(SECRET)); + table.append(append); + result = table.get(get); + assertTrue(!result.isEmpty()); + } finally { + if (table != null) { + table.close(); + } + } + } + + private static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) + throws Exception { + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + int i = 1; + List puts = new ArrayList(); + for (String labelExp : labelExps) { + Put put = new Put(Bytes.toBytes("row" + i)); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value); + put.setCellVisibility(new CellVisibility(labelExp)); + puts.add(put); + i++; + } + table.put(puts); + } finally { + if (table != null) { + table.close(); + } + } + return table; + } + + private static void addLabels() throws Exception { + PrivilegedExceptionAction action = + new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + String[] labels = { SECRET, TOPSECRET, CONFIDENTIAL, PUBLIC, PRIVATE }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + return null; + } + }; + SUPERUSER.runAs(action); + } +} Index: hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java (revision 0) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java (working copy) @@ -0,0 +1,175 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; + +import com.google.protobuf.ByteString; + +@Category(MediumTests.class) +public class TestVisibilityLabelsOpWithDifferentUsersNoACL { + private static final String PRIVATE = "private"; + private static final String CONFIDENTIAL = "confidential"; + private static final String SECRET = "secret"; + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static Configuration conf; + + @Rule + public final TestName TEST_NAME = new TestName(); + private static User SUPERUSER; + private static User NORMAL_USER; + private static User NORMAL_USER1; + + @BeforeClass + public static void setupBeforeClass() throws Exception { + // setup configuration + conf = TEST_UTIL.getConfiguration(); + conf.setInt("hfile.format.version", 3); + String currentUser = User.getCurrent().getName(); + conf.set("hbase.superuser", "admin,"+currentUser); + conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName()); + TEST_UTIL.startMiniCluster(2); + + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" }); + NORMAL_USER = User.createUserForTesting(conf, "user1", new String[] {}); + NORMAL_USER1 = User.createUserForTesting(conf, "user2", new String[] {}); + addLabels(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testLabelsTableOpsWithDifferentUsers() throws Throwable { + PrivilegedExceptionAction action = + new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + VisibilityLabelsResponse response = SUPERUSER.runAs(action); + assertTrue(response.getResult(0).getException().getValue().isEmpty()); + assertTrue(response.getResult(1).getException().getValue().isEmpty()); + + // Ideally this should not be allowed. this operation should fail or do nothing. + action = new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user3"); + } catch (Throwable e) { + } + return null; + } + }; + response = NORMAL_USER1.runAs(action); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(0).getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(1).getException().getName()); + + PrivilegedExceptionAction action1 = + new PrivilegedExceptionAction() { + public GetAuthsResponse run() throws Exception { + try { + return VisibilityClient.getAuths(conf, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + GetAuthsResponse authsResponse = NORMAL_USER.runAs(action1); + assertTrue(authsResponse.getAuthList().isEmpty()); + authsResponse = NORMAL_USER1.runAs(action1); + assertTrue(authsResponse.getAuthList().isEmpty()); + authsResponse = SUPERUSER.runAs(action1); + List authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(2, authsList.size()); + assertTrue(authsList.contains(CONFIDENTIAL)); + assertTrue(authsList.contains(PRIVATE)); + + PrivilegedExceptionAction action2 = + new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.clearAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + response = NORMAL_USER1.runAs(action2); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(0).getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(1).getException().getName()); + response = SUPERUSER.runAs(action2); + assertTrue(response.getResult(0).getException().getValue().isEmpty()); + assertTrue(response.getResult(1).getException().getValue().isEmpty()); + authsResponse = SUPERUSER.runAs(action1); + assertTrue(authsResponse.getAuthList().isEmpty()); + } + + private static void addLabels() throws Exception { + PrivilegedExceptionAction action = + new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + return null; + } + }; + SUPERUSER.runAs(action); + } +} Index: hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java (revision 0) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java (working copy) @@ -0,0 +1,273 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; + +import com.google.protobuf.ByteString; + +@Category(MediumTests.class) +public class TestVisibilityLabelsWithACL { + + private static final String PRIVATE = "private"; + private static final String CONFIDENTIAL = "confidential"; + private static final String SECRET = "secret"; + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final byte[] row1 = Bytes.toBytes("row1"); + private final static byte[] fam = Bytes.toBytes("info"); + private final static byte[] qual = Bytes.toBytes("qual"); + private final static byte[] value = Bytes.toBytes("value"); + private static Configuration conf; + + @Rule + public final TestName TEST_NAME = new TestName(); + private static User SUPERUSER; + private static User NORMAL_USER; + + @BeforeClass + public static void setupBeforeClass() throws Exception { + // setup configuration + conf = TEST_UTIL.getConfiguration(); + conf.setInt("hfile.format.version", 3); + SecureTestUtil.enableSecurity(conf); + conf.set("hbase.coprocessor.master.classes", AccessController.class.getName() + "," + + VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", AccessController.class.getName() + "," + + VisibilityController.class.getName()); + TEST_UTIL.startMiniCluster(2); + + TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName(), 50000); + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" }); + NORMAL_USER = User.createUserForTesting(conf, "user1", new String[] {}); + addLabels(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testScanForUserWithFewerLabelAuthsThanLabelsInScanAuthorizations() throws Throwable { + String[] auths = { SECRET }; + String user = "admin"; + VisibilityClient.setAuths(conf, auths, user); + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + + "&!" + PRIVATE, SECRET + "&!" + PRIVATE); + PrivilegedExceptionAction scanAction = new PrivilegedExceptionAction() { + public Void run() throws Exception { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); + HTable t = new HTable(conf, table.getTableName()); + try { + ResultScanner scanner = t.getScanner(s); + Result result = scanner.next(); + assertTrue(!result.isEmpty()); + assertTrue(Bytes.equals(Bytes.toBytes("row2"), result.getRow())); + result = scanner.next(); + assertNull(result); + } finally { + t.close(); + } + return null; + } + }; + SUPERUSER.runAs(scanAction); + } + + @Test + public void testVisibilityLabelsForUserWithNoAuths() throws Throwable { + String user = "admin"; + String[] auths = { SECRET }; + VisibilityClient.clearAuths(conf, auths, user); // Removing all auths if any. + VisibilityClient.setAuths(conf, auths, "user1"); + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET); + PrivilegedExceptionAction getAction = new PrivilegedExceptionAction() { + public Void run() throws Exception { + Get g = new Get(row1); + g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); + HTable t = new HTable(conf, table.getTableName()); + try { + Result result = t.get(g); + assertTrue(result.isEmpty()); + } finally { + t.close(); + } + return null; + } + }; + SUPERUSER.runAs(getAction); + } + + @Test + public void testLabelsTableOpsWithDifferentUsers() throws Throwable { + PrivilegedExceptionAction action = + new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.addLabels(conf, new String[] { "l1", "l2" }); + } catch (Throwable e) { + } + return null; + } + }; + VisibilityLabelsResponse response = NORMAL_USER.runAs(action); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(0).getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(1).getException().getName()); + + action = new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + response = NORMAL_USER.runAs(action); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(0).getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(1).getException().getName()); + + action = new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + response = SUPERUSER.runAs(action); + assertTrue(response.getResult(0).getException().getValue().isEmpty()); + assertTrue(response.getResult(1).getException().getValue().isEmpty()); + + action = new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.clearAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + response = NORMAL_USER.runAs(action); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response.getResult(0) + .getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response.getResult(1) + .getException().getName()); + + response = VisibilityClient.clearAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + assertTrue(response.getResult(0).getException().getValue().isEmpty()); + assertTrue(response.getResult(1).getException().getValue().isEmpty()); + + VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user2"); + PrivilegedExceptionAction action1 = + new PrivilegedExceptionAction() { + public GetAuthsResponse run() throws Exception { + try { + return VisibilityClient.getAuths(conf, "user2"); + } catch (Throwable e) { + } + return null; + } + }; + GetAuthsResponse authsResponse = NORMAL_USER.runAs(action1); + assertNull(authsResponse); + authsResponse = SUPERUSER.runAs(action1); + List authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(2, authsList.size()); + assertTrue(authsList.contains(CONFIDENTIAL)); + assertTrue(authsList.contains(PRIVATE)); + } + + private static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) + throws Exception { + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + int i = 1; + List puts = new ArrayList(); + for (String labelExp : labelExps) { + Put put = new Put(Bytes.toBytes("row" + i)); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value); + put.setCellVisibility(new CellVisibility(labelExp)); + puts.add(put); + i++; + } + table.put(puts); + } finally { + if (table != null) { + table.close(); + } + } + return table; + } + + private static void addLabels() throws IOException { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + } +} Index: hbase-shell/src/main/ruby/hbase.rb =================================================================== --- hbase-shell/src/main/ruby/hbase.rb (revision 1542892) +++ hbase-shell/src/main/ruby/hbase.rb (working copy) @@ -79,3 +79,4 @@ require 'hbase/table' require 'hbase/replication_admin' require 'hbase/security' +require 'hbase/visibility_labels' Index: hbase-shell/src/main/ruby/hbase/hbase.rb =================================================================== --- hbase-shell/src/main/ruby/hbase/hbase.rb (revision 1542892) +++ hbase-shell/src/main/ruby/hbase/hbase.rb (working copy) @@ -22,6 +22,7 @@ require 'hbase/admin' require 'hbase/table' require 'hbase/security' +require 'hbase/visibility_labels' module Hbase class Hbase @@ -55,5 +56,9 @@ def security_admin(formatter) ::Hbase::SecurityAdmin.new(configuration, formatter) end + + def visibility_labels_admin(formatter) + ::Hbase::VisibilityLabelsAdmin.new(configuration, formatter) + end end end Index: hbase-shell/src/main/ruby/hbase/visibility_labels.rb =================================================================== --- hbase-shell/src/main/ruby/hbase/visibility_labels.rb (revision 0) +++ hbase-shell/src/main/ruby/hbase/visibility_labels.rb (working copy) @@ -0,0 +1,134 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +include Java +java_import org.apache.hadoop.hbase.security.visibility.VisibilityClient +java_import org.apache.hadoop.hbase.security.visibility.VisibilityConstants +java_import org.apache.hadoop.hbase.util.Bytes + +module Hbase + class VisibilityLabelsAdmin + + def initialize(configuration, formatter) + @config = configuration + @formatter = formatter + @admin = org.apache.hadoop.hbase.client.HBaseAdmin.new(configuration) + end + + def add_labels(*args) + lables_table_available? + + # Normalize args + if args.kind_of?(Array) + labels = [ args ].flatten.compact + end + + begin + response = VisibilityClient.addLabels(@config, labels.to_java(:string)) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + labelsWithException = "" + list = response.getResultList() + list.each do |result| + if result.hasException() + labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) + end + end + if labelsWithException.length > 0 + raise(ArgumentError, labelsWithException) + end + end + end + + def set_auths(user, *args) + lables_table_available? + # Normalize args + if args.kind_of?(Array) + auths = [ args ].flatten.compact + end + + begin + response = VisibilityClient.setAuths(@config, auths.to_java(:string), user) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + labelsWithException = "" + list = response.getResultList() + list.each do |result| + if result.hasException() + labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) + end + end + if labelsWithException.length > 0 + raise(ArgumentError, labelsWithException) + end + end + end + + def get_auths(user) + lables_table_available? + begin + response = VisibilityClient.getAuths(@config, user) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + if response.getAuthList.empty? + raise(ArgumentError, "No authentication set for the given user " + user) + end + return response.getAuthList + end + end + + def clear_auths(user, *args) + lables_table_available? + # Normalize args + if args.kind_of?(Array) + auths = [ args ].flatten.compact + end + + begin + response = VisibilityClient.clearAuths(@config, auths.to_java(:string), user) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + labelsWithException = "" + list = response.getResultList() + list.each do |result| + if result.hasException() + labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) + end + end + if labelsWithException.length > 0 + raise(ArgumentError, labelsWithException) + end + end + end + + # Make sure that lables table is available + def lables_table_available?() + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") \ + unless exists?(VisibilityConstants::LABELS_TABLE_NAME) + end + + # Does table exist? + def exists?(table_name) + @admin.tableExists(table_name) + end + end +end \ No newline at end of file Index: hbase-shell/src/main/ruby/shell.rb =================================================================== --- hbase-shell/src/main/ruby/shell.rb (revision 1542892) +++ hbase-shell/src/main/ruby/shell.rb (working copy) @@ -90,6 +90,10 @@ @hbase_security_admin ||= hbase.security_admin(formatter) end + def hbase_visibility_labels_admin + @hbase_visibility_labels_admin ||= hbase.visibility_labels_admin(formatter) + end + def export_commands(where) ::Shell.commands.keys.each do |cmd| # here where is the IRB namespace @@ -345,3 +349,14 @@ ] ) +Shell.load_command_group( + 'visibility labels', + :full_name => 'VISIBILITY LABEL TOOLS', + :comment => "NOTE: Above commands are only applicable if running with the VisibilityController coprocessor", + :commands => %w[ + add_labels + set_auths + get_auths + clear_auths + ] +) \ No newline at end of file Index: hbase-shell/src/main/ruby/shell/commands.rb =================================================================== --- hbase-shell/src/main/ruby/shell/commands.rb (revision 1542892) +++ hbase-shell/src/main/ruby/shell/commands.rb (working copy) @@ -62,6 +62,10 @@ @shell.hbase_security_admin end + def visibility_labels_admin + @shell.hbase_visibility_labels_admin + end + #---------------------------------------------------------------------- def formatter Index: hbase-shell/src/main/ruby/shell/commands/add_labels.rb =================================================================== --- hbase-shell/src/main/ruby/shell/commands/add_labels.rb (revision 0) +++ hbase-shell/src/main/ruby/shell/commands/add_labels.rb (working copy) @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class AddLabels < Command + def help + return <<-EOF +Add a set of visibility labels. +Syntax : add_labels [label1, label2] + +For example: + + hbase> add_labels ['SECRET','PRIVATE'] +EOF + end + + def command(*args) + format_simple_command do + visibility_labels_admin.add_labels(args) + end + end + end + end +end Index: hbase-shell/src/main/ruby/shell/commands/clear_auths.rb =================================================================== --- hbase-shell/src/main/ruby/shell/commands/clear_auths.rb (revision 0) +++ hbase-shell/src/main/ruby/shell/commands/clear_auths.rb (working copy) @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class ClearAuths < Command + def help + return <<-EOF +Add a set of visibility labels for an user that has to removed +Syntax : clear_auths 'user1',[label1, label2] + +For example: + + hbase> clear_auths 'user1', ['SECRET','PRIVATE'] +EOF + end + + def command(user, *args) + format_simple_command do + visibility_labels_admin.clear_auths(user, args) + end + end + end + end +end Index: hbase-shell/src/main/ruby/shell/commands/get_auths.rb =================================================================== --- hbase-shell/src/main/ruby/shell/commands/get_auths.rb (revision 0) +++ hbase-shell/src/main/ruby/shell/commands/get_auths.rb (working copy) @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class GetAuths < Command + def help + return <<-EOF +Get the visibility labels set for a particular user +Syntax : get_auths 'user1' + +For example: + + hbase> get_auths 'user1' +EOF + end + + def command(user) + format_simple_command do + list = visibility_labels_admin.get_auths(user) + list.each do |auths| + formatter.row([org.apache.hadoop.hbase.util.Bytes::toStringBinary(auths.toByteArray)]) + end + end + end + end + end +end Index: hbase-shell/src/main/ruby/shell/commands/set_auths.rb =================================================================== --- hbase-shell/src/main/ruby/shell/commands/set_auths.rb (revision 0) +++ hbase-shell/src/main/ruby/shell/commands/set_auths.rb (working copy) @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class SetAuths < Command + def help + return <<-EOF +Add a set of visibility labels for an user +Syntax : set_auths 'user1',[label1, label2] + +For example: + + hbase> set_auths 'user1', ['SECRET','PRIVATE'] +EOF + end + + def command(user, *args) + format_simple_command do + visibility_labels_admin.set_auths(user, args) + end + end + end + end +end