diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java index 3eee907..d8c6a12 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -34,6 +34,8 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; /** @@ -246,6 +248,17 @@ public class Get extends OperationWithAttributes } /** + * Sets the authorizations to be used by this Get + * + * @param authorizations + * @return this + */ + public Get setAuthorizations(Authorizations authorizations) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, authorizations.toBytes()); + return this; + } + + /** * Get whether blocks should be cached for this Get. * @return true if default caching should be used, false if blocks should not * be cached diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java index 41ec446..87b893b 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java @@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.security.visibility.CellVisibility; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; /** @@ -308,4 +310,13 @@ public class Put extends Mutation implements HeapSize, Comparable { } return filteredList; } + + /** + * Sets the visibility expression associated with cells in this Put. + * + * @param expression + */ + public void setCellVisibility(CellVisibility expression) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, expression.toBytes()); + } } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 0c5565b..b287af2 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -25,6 +25,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.IncompatibleFilterException; import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; import java.io.IOException; @@ -752,4 +754,15 @@ public class Scan extends OperationWithAttributes { public boolean isSmall() { return small; } + + /** + * Sets the authorizations to be used by this Scan + * + * @param authorizations + * @return this + */ + public Scan setAuthorizations(Authorizations authorizations) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, authorizations.toBytes()); + return this; + } } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java new file mode 100644 index 0000000..9bbb645 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; + +/** + * This class contains visibility labels associated with a Scan/Get deciding which all labeled data + * current scan can access. + */ +@InterfaceAudience.Public +@InterfaceStability.Stable +public class Authorizations { + + private List labels; + + public Authorizations(String... labels) { + this.labels = new ArrayList(labels.length); + for (String label : labels) { + this.labels.add(label); + } + } + + public Authorizations(List labels) { + this.labels = labels; + } + + public List getLabels() { + return this.labels; + } + + public byte[] toBytes() { + int length = 0; + for (String label : labels) { + length += label.length(); + } + ByteArrayOutputStream baos = new ByteArrayOutputStream(length + + (labels.size() * Bytes.SIZEOF_BYTE)); + for (String label : labels) { + baos.write(label.length()); + byte[] labelBytes = Bytes.toBytes(label); + for (byte b : labelBytes) { + if (!VisibilityLabelsValidator.isValidAuthChar(b)) { + throw new IllegalArgumentException("Invalid character found in visibility labels " + b); + } + } + try { + baos.write(labelBytes); + } catch (IOException e) { + // We use ByteArrayOutputStream. So IOE won't occur here. + } + } + return baos.toByteArray(); + } + + public static Authorizations fromBytes(byte[] b) { + List labels = new ArrayList(); + int index = 0; + while (index < b.length) { + int labelLen = b[index++]; + // TODO handle negative cases. + labels.add(new String(b, index, labelLen)); + index += labelLen; + } + return new Authorizations(labels); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("["); + for (int i = 0; i < this.labels.size(); i++) { + if (i != 0) { + sb.append(", "); + } + sb.append(this.labels.get(i)); + } + sb.append("]"); + return sb.toString(); + } +} \ No newline at end of file diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java new file mode 100644 index 0000000..dca1ead --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class CellVisibility { + + private String expression; + + public CellVisibility(String expression) { + this.expression = expression; + } + + public byte[] toBytes() { + return Bytes.toBytes(this.expression); + } + + public String getExpression() { + return this.expression; + } +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java new file mode 100644 index 0000000..126c3c8 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.DoNotRetryIOException; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class InvalidLabelException extends DoNotRetryIOException { + private static final long serialVersionUID = 1L; + + public InvalidLabelException(String msg) { + super(msg); + } +} + diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java new file mode 100644 index 0000000..fd07f75 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.DoNotRetryIOException; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class LabelAlreadyExistsException extends DoNotRetryIOException { + private static final long serialVersionUID = 1L; + + public LabelAlreadyExistsException(String msg) { + super(msg); + } + +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java new file mode 100644 index 0000000..24eac4d --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -0,0 +1,205 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.ByteString; +import com.google.protobuf.ServiceException; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class VisibilityClient { + + /** + * Utility method for adding labels to the system. + * + * @param conf + * @param label + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse addLabel(Configuration conf, final String label) + throws Throwable { + VisibilityLabelsResponse addLabels = addLabels(conf, new String[] { label }); + return addLabels; + } + + /** + * Utility method for adding labels to the system. + * + * @param conf + * @param labels + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse addLabels(Configuration conf, final String[] labels) + throws Throwable { + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + Batch.Call callable = + new Batch.Call() { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException { + VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder(); + for (String label : labels) { + if (label.length() > 0) { + VisibilityLabel.Builder newBuilder = VisibilityLabel.newBuilder(); + newBuilder.setLabel(ByteString.copyFrom(Bytes.toBytes(label))); + builder.addVisLabel(newBuilder.build()); + } + } + service.addLabels(controller, builder.build(), rpcCallback); + return rpcCallback.get(); + } + }; + Map result = ht.coprocessorService( + VisibilityLabelsService.class, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, + callable); + return result.values().iterator().next(); // There will be exactly one region for labels + // table and so one entry in result Map. + } finally { + if (ht != null) { + ht.close(); + } + } + } + + /** + * Sets given labels globally authorized for the user. + * @param conf + * @param auths + * @param user + * @return + * @throws Throwable + */ + public static VisibilityLabelsResponse setAuths(Configuration conf, final String[] auths, + final String user) throws Throwable { + return setOrClearAuths(conf, auths, user, true); + } + + /** + * @param conf + * @param user + * @return labels, the given user is globally authorized for. + * @throws Throwable + */ + public static GetAuthsResponse getAuths(Configuration conf, final String user) throws Throwable { + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + Batch.Call callable = + new Batch.Call() { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + public GetAuthsResponse call(VisibilityLabelsService service) throws IOException { + GetAuthsRequest.Builder getAuthReqBuilder = GetAuthsRequest.newBuilder(); + getAuthReqBuilder.setUser(ByteString.copyFrom(Bytes.toBytes(user))); + service.getAuths(controller, getAuthReqBuilder.build(), rpcCallback); + return rpcCallback.get(); + } + }; + Map result = ht.coprocessorService(VisibilityLabelsService.class, + HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, callable); + return result.values().iterator().next(); // There will be exactly one region for labels + // table and so one entry in result Map. + } finally { + if (ht != null) { + ht.close(); + } + } + } + + /** + * Removes given labels from user's globally authorized list of labels. + * @param conf + * @param auths + * @param user + * @return + * @throws Throwable + */ + public static VisibilityLabelsResponse clearAuths(Configuration conf, final String[] auths, + final String user) throws Throwable { + return setOrClearAuths(conf, auths, user, false); + } + + private static VisibilityLabelsResponse setOrClearAuths(Configuration conf, final String[] auths, + final String user, final boolean setOrClear) throws IOException, ServiceException, Throwable { + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + Batch.Call callable = + new Batch.Call() { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException { + SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder(); + setAuthReqBuilder.setUser(ByteString.copyFrom(Bytes.toBytes(user))); + for (String auth : auths) { + if (auth.length() > 0) { + setAuthReqBuilder.addAuth(ByteString.copyFrom(Bytes.toBytes(auth))); + } + } + if (setOrClear) { + service.setAuths(controller, setAuthReqBuilder.build(), rpcCallback); + } else { + service.clearAuths(controller, setAuthReqBuilder.build(), rpcCallback); + } + return rpcCallback.get(); + } + }; + Map result = ht.coprocessorService( + VisibilityLabelsService.class, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, + callable); + return result.values().iterator().next(); // There will be exactly one region for labels + // table and so one entry in result Map. + } finally { + if (ht != null) { + ht.close(); + } + } + } +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java new file mode 100644 index 0000000..301161c --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Private +public final class VisibilityConstants { + + /** + * The string that is used as key in setting the Operation attributes for visibility labels + */ + public static final String VISIBILITY_LABELS_ATTR_KEY = "VISIBILITY"; + + /** Internal storage table for visibility labels */ + public static final TableName LABELS_TABLE_NAME = TableName.valueOf( + NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "labels"); + + /** Family for the internal storage table for visibility labels */ + public static final byte[] LABELS_TABLE_FAMILY = Bytes.toBytes("f"); + + /** Qualifier for the internal storage table for visibility labels */ + public static final byte[] LABEL_QUALIFIER = new byte[1]; + +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java new file mode 100644 index 0000000..bcc5df3 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; + +/** + * A simple validator that validates the labels passed + */ +@InterfaceAudience.Private +public class VisibilityLabelsValidator { + private static final boolean[] validAuthChars = new boolean[256]; + + static { + for (int i = 0; i < 256; i++) { + validAuthChars[i] = false; + } + + for (int i = 'a'; i <= 'z'; i++) { + validAuthChars[i] = true; + } + + for (int i = 'A'; i <= 'Z'; i++) { + validAuthChars[i] = true; + } + + for (int i = '0'; i <= '9'; i++) { + validAuthChars[i] = true; + } + + validAuthChars['_'] = true; + validAuthChars['-'] = true; + validAuthChars[':'] = true; + validAuthChars['.'] = true; + validAuthChars['/'] = true; + } + + static final boolean isValidAuthChar(byte b) { + return validAuthChars[0xff & b]; + } + + static final boolean isValidLabel(byte[] label) { + for (int i = 0; i < label.length; i++) { + if (!isValidAuthChar(label[i])) { + return false; + } + } + return true; + } +} diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/security/visibility/TestAuthorizations.java hbase-client/src/test/java/org/apache/hadoop/hbase/security/visibility/TestAuthorizations.java new file mode 100644 index 0000000..f107fec --- /dev/null +++ hbase-client/src/test/java/org/apache/hadoop/hbase/security/visibility/TestAuthorizations.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.List; + +import org.apache.hadoop.hbase.SmallTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestAuthorizations { + + @Test + public void testVisibilityLabels() throws Exception { + Authorizations authorizations = new Authorizations("secret", "private", "public", "confidential"); + byte[] authBytes = authorizations.toBytes(); + Authorizations authorizations2 = Authorizations.fromBytes(authBytes); + List labels = authorizations2.getLabels(); + assertEquals(4, labels.size()); + assertTrue(labels.contains("secret")); + assertTrue(labels.contains("private")); + assertTrue(labels.contains("public")); + assertTrue(labels.contains("confidential")); + } + +} diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java new file mode 100644 index 0000000..38f83a1 --- /dev/null +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java @@ -0,0 +1,5543 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: VisibilityLabels.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class VisibilityLabelsProtos { + private VisibilityLabelsProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface VisibilityLabelsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .VisibilityLabel visLabel = 1; + /** + * repeated .VisibilityLabel visLabel = 1; + */ + java.util.List + getVisLabelList(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + int getVisLabelCount(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + java.util.List + getVisLabelOrBuilderList(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index); + } + /** + * Protobuf type {@code VisibilityLabelsRequest} + */ + public static final class VisibilityLabelsRequest extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelsRequestOrBuilder { + // Use VisibilityLabelsRequest.newBuilder() to construct. + private VisibilityLabelsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabelsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabelsRequest defaultInstance; + public static VisibilityLabelsRequest getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabelsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabelsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + visLabel_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = java.util.Collections.unmodifiableList(visLabel_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabelsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabelsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .VisibilityLabel visLabel = 1; + public static final int VISLABEL_FIELD_NUMBER = 1; + private java.util.List visLabel_; + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List getVisLabelList() { + return visLabel_; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelOrBuilderList() { + return visLabel_; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public int getVisLabelCount() { + return visLabel_.size(); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index) { + return visLabel_.get(index); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index) { + return visLabel_.get(index); + } + + private void initFields() { + visLabel_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getVisLabelCount(); i++) { + if (!getVisLabel(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < visLabel_.size(); i++) { + output.writeMessage(1, visLabel_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < visLabel_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, visLabel_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) obj; + + boolean result = true; + result = result && getVisLabelList() + .equals(other.getVisLabelList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getVisLabelCount() > 0) { + hash = (37 * hash) + VISLABEL_FIELD_NUMBER; + hash = (53 * hash) + getVisLabelList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabelsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getVisLabelFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (visLabelBuilder_ == null) { + visLabel_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + visLabelBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest(this); + int from_bitField0_ = bitField0_; + if (visLabelBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = java.util.Collections.unmodifiableList(visLabel_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.visLabel_ = visLabel_; + } else { + result.visLabel_ = visLabelBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance()) return this; + if (visLabelBuilder_ == null) { + if (!other.visLabel_.isEmpty()) { + if (visLabel_.isEmpty()) { + visLabel_ = other.visLabel_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureVisLabelIsMutable(); + visLabel_.addAll(other.visLabel_); + } + onChanged(); + } + } else { + if (!other.visLabel_.isEmpty()) { + if (visLabelBuilder_.isEmpty()) { + visLabelBuilder_.dispose(); + visLabelBuilder_ = null; + visLabel_ = other.visLabel_; + bitField0_ = (bitField0_ & ~0x00000001); + visLabelBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getVisLabelFieldBuilder() : null; + } else { + visLabelBuilder_.addAllMessages(other.visLabel_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getVisLabelCount(); i++) { + if (!getVisLabel(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .VisibilityLabel visLabel = 1; + private java.util.List visLabel_ = + java.util.Collections.emptyList(); + private void ensureVisLabelIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = new java.util.ArrayList(visLabel_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder> visLabelBuilder_; + + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List getVisLabelList() { + if (visLabelBuilder_ == null) { + return java.util.Collections.unmodifiableList(visLabel_); + } else { + return visLabelBuilder_.getMessageList(); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public int getVisLabelCount() { + if (visLabelBuilder_ == null) { + return visLabel_.size(); + } else { + return visLabelBuilder_.getCount(); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index) { + if (visLabelBuilder_ == null) { + return visLabel_.get(index); + } else { + return visLabelBuilder_.getMessage(index); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder setVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.set(index, value); + onChanged(); + } else { + visLabelBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder setVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.set(index, builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.add(value); + onChanged(); + } else { + visLabelBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.add(index, value); + onChanged(); + } else { + visLabelBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.add(builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.add(index, builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addAllVisLabel( + java.lang.Iterable values) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + super.addAll(values, visLabel_); + onChanged(); + } else { + visLabelBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder clearVisLabel() { + if (visLabelBuilder_ == null) { + visLabel_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + visLabelBuilder_.clear(); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder removeVisLabel(int index) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.remove(index); + onChanged(); + } else { + visLabelBuilder_.remove(index); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder getVisLabelBuilder( + int index) { + return getVisLabelFieldBuilder().getBuilder(index); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index) { + if (visLabelBuilder_ == null) { + return visLabel_.get(index); } else { + return visLabelBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelOrBuilderList() { + if (visLabelBuilder_ != null) { + return visLabelBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(visLabel_); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder addVisLabelBuilder() { + return getVisLabelFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder addVisLabelBuilder( + int index) { + return getVisLabelFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelBuilderList() { + return getVisLabelFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder> + getVisLabelFieldBuilder() { + if (visLabelBuilder_ == null) { + visLabelBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder>( + visLabel_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + visLabel_ = null; + } + return visLabelBuilder_; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabelsRequest) + } + + static { + defaultInstance = new VisibilityLabelsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsRequest) + } + + public interface VisibilityLabelOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes label = 1; + /** + * required bytes label = 1; + */ + boolean hasLabel(); + /** + * required bytes label = 1; + */ + com.google.protobuf.ByteString getLabel(); + + // optional uint32 ordinal = 2; + /** + * optional uint32 ordinal = 2; + */ + boolean hasOrdinal(); + /** + * optional uint32 ordinal = 2; + */ + int getOrdinal(); + } + /** + * Protobuf type {@code VisibilityLabel} + */ + public static final class VisibilityLabel extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelOrBuilder { + // Use VisibilityLabel.newBuilder() to construct. + private VisibilityLabel(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabel(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabel defaultInstance; + public static VisibilityLabel getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabel getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabel( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + label_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ordinal_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabel parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabel(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes label = 1; + public static final int LABEL_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString label_; + /** + * required bytes label = 1; + */ + public boolean hasLabel() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes label = 1; + */ + public com.google.protobuf.ByteString getLabel() { + return label_; + } + + // optional uint32 ordinal = 2; + public static final int ORDINAL_FIELD_NUMBER = 2; + private int ordinal_; + /** + * optional uint32 ordinal = 2; + */ + public boolean hasOrdinal() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint32 ordinal = 2; + */ + public int getOrdinal() { + return ordinal_; + } + + private void initFields() { + label_ = com.google.protobuf.ByteString.EMPTY; + ordinal_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLabel()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, label_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, ordinal_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, label_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, ordinal_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) obj; + + boolean result = true; + result = result && (hasLabel() == other.hasLabel()); + if (hasLabel()) { + result = result && getLabel() + .equals(other.getLabel()); + } + result = result && (hasOrdinal() == other.hasOrdinal()); + if (hasOrdinal()) { + result = result && (getOrdinal() + == other.getOrdinal()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLabel()) { + hash = (37 * hash) + LABEL_FIELD_NUMBER; + hash = (53 * hash) + getLabel().hashCode(); + } + if (hasOrdinal()) { + hash = (37 * hash) + ORDINAL_FIELD_NUMBER; + hash = (53 * hash) + getOrdinal(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabel} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + label_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + ordinal_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.label_ = label_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ordinal_ = ordinal_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()) return this; + if (other.hasLabel()) { + setLabel(other.getLabel()); + } + if (other.hasOrdinal()) { + setOrdinal(other.getOrdinal()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLabel()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes label = 1; + private com.google.protobuf.ByteString label_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes label = 1; + */ + public boolean hasLabel() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes label = 1; + */ + public com.google.protobuf.ByteString getLabel() { + return label_; + } + /** + * required bytes label = 1; + */ + public Builder setLabel(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + label_ = value; + onChanged(); + return this; + } + /** + * required bytes label = 1; + */ + public Builder clearLabel() { + bitField0_ = (bitField0_ & ~0x00000001); + label_ = getDefaultInstance().getLabel(); + onChanged(); + return this; + } + + // optional uint32 ordinal = 2; + private int ordinal_ ; + /** + * optional uint32 ordinal = 2; + */ + public boolean hasOrdinal() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint32 ordinal = 2; + */ + public int getOrdinal() { + return ordinal_; + } + /** + * optional uint32 ordinal = 2; + */ + public Builder setOrdinal(int value) { + bitField0_ |= 0x00000002; + ordinal_ = value; + onChanged(); + return this; + } + /** + * optional uint32 ordinal = 2; + */ + public Builder clearOrdinal() { + bitField0_ = (bitField0_ & ~0x00000002); + ordinal_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabel) + } + + static { + defaultInstance = new VisibilityLabel(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabel) + } + + public interface VisibilityLabelsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionActionResult result = 1; + /** + * repeated .RegionActionResult result = 1; + */ + java.util.List + getResultList(); + /** + * repeated .RegionActionResult result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index); + /** + * repeated .RegionActionResult result = 1; + */ + int getResultCount(); + /** + * repeated .RegionActionResult result = 1; + */ + java.util.List + getResultOrBuilderList(); + /** + * repeated .RegionActionResult result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index); + } + /** + * Protobuf type {@code VisibilityLabelsResponse} + */ + public static final class VisibilityLabelsResponse extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelsResponseOrBuilder { + // Use VisibilityLabelsResponse.newBuilder() to construct. + private VisibilityLabelsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabelsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabelsResponse defaultInstance; + public static VisibilityLabelsResponse getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabelsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabelsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + result_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabelsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabelsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .RegionActionResult result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List getResultList() { + return result_; + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultOrBuilderList() { + return result_; + } + /** + * repeated .RegionActionResult result = 1; + */ + public int getResultCount() { + return result_.size(); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index) { + return result_.get(index); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabelsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse(this); + int from_bitField0_ = bitField0_; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .RegionActionResult result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> resultBuilder_; + + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabelsResponse) + } + + static { + defaultInstance = new VisibilityLabelsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsResponse) + } + + public interface SetAuthsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + + // repeated bytes auth = 2; + /** + * repeated bytes auth = 2; + */ + java.util.List getAuthList(); + /** + * repeated bytes auth = 2; + */ + int getAuthCount(); + /** + * repeated bytes auth = 2; + */ + com.google.protobuf.ByteString getAuth(int index); + } + /** + * Protobuf type {@code SetAuthsRequest} + */ + public static final class SetAuthsRequest extends + com.google.protobuf.GeneratedMessage + implements SetAuthsRequestOrBuilder { + // Use SetAuthsRequest.newBuilder() to construct. + private SetAuthsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SetAuthsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SetAuthsRequest defaultInstance; + public static SetAuthsRequest getDefaultInstance() { + return defaultInstance; + } + + public SetAuthsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SetAuthsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + auth_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SetAuthsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SetAuthsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // repeated bytes auth = 2; + public static final int AUTH_FIELD_NUMBER = 2; + private java.util.List auth_; + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return auth_; + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + auth_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + for (int i = 0; i < auth_.size(); i++) { + output.writeBytes(2, auth_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + { + int dataSize = 0; + for (int i = 0; i < auth_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(auth_.get(i)); + } + size += dataSize; + size += 1 * getAuthList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && getAuthList() + .equals(other.getAuthList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (getAuthCount() > 0) { + hash = (37 * hash) + AUTH_FIELD_NUMBER; + hash = (53 * hash) + getAuthList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code SetAuthsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_SetAuthsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.auth_ = auth_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (!other.auth_.isEmpty()) { + if (auth_.isEmpty()) { + auth_ = other.auth_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAuthIsMutable(); + auth_.addAll(other.auth_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // repeated bytes auth = 2; + private java.util.List auth_ = java.util.Collections.emptyList(); + private void ensureAuthIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(auth_); + bitField0_ |= 0x00000002; + } + } + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return java.util.Collections.unmodifiableList(auth_); + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + /** + * repeated bytes auth = 2; + */ + public Builder setAuth( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.set(index, value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAuth(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.add(value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAllAuth( + java.lang.Iterable values) { + ensureAuthIsMutable(); + super.addAll(values, auth_); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder clearAuth() { + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SetAuthsRequest) + } + + static { + defaultInstance = new SetAuthsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SetAuthsRequest) + } + + public interface UserAuthorizationsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + + // repeated uint32 auth = 2; + /** + * repeated uint32 auth = 2; + */ + java.util.List getAuthList(); + /** + * repeated uint32 auth = 2; + */ + int getAuthCount(); + /** + * repeated uint32 auth = 2; + */ + int getAuth(int index); + } + /** + * Protobuf type {@code UserAuthorizations} + */ + public static final class UserAuthorizations extends + com.google.protobuf.GeneratedMessage + implements UserAuthorizationsOrBuilder { + // Use UserAuthorizations.newBuilder() to construct. + private UserAuthorizations(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private UserAuthorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final UserAuthorizations defaultInstance; + public static UserAuthorizations getDefaultInstance() { + return defaultInstance; + } + + public UserAuthorizations getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private UserAuthorizations( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 16: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + auth_.add(input.readUInt32()); + break; + } + case 18: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002) && input.getBytesUntilLimit() > 0) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + while (input.getBytesUntilLimit() > 0) { + auth_.add(input.readUInt32()); + } + input.popLimit(limit); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public UserAuthorizations parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new UserAuthorizations(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // repeated uint32 auth = 2; + public static final int AUTH_FIELD_NUMBER = 2; + private java.util.List auth_; + /** + * repeated uint32 auth = 2; + */ + public java.util.List + getAuthList() { + return auth_; + } + /** + * repeated uint32 auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated uint32 auth = 2; + */ + public int getAuth(int index) { + return auth_.get(index); + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + auth_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + for (int i = 0; i < auth_.size(); i++) { + output.writeUInt32(2, auth_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + { + int dataSize = 0; + for (int i = 0; i < auth_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeUInt32SizeNoTag(auth_.get(i)); + } + size += dataSize; + size += 1 * getAuthList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && getAuthList() + .equals(other.getAuthList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (getAuthCount() > 0) { + hash = (37 * hash) + AUTH_FIELD_NUMBER; + hash = (53 * hash) + getAuthList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code UserAuthorizations} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_UserAuthorizations_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.auth_ = auth_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (!other.auth_.isEmpty()) { + if (auth_.isEmpty()) { + auth_ = other.auth_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAuthIsMutable(); + auth_.addAll(other.auth_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // repeated uint32 auth = 2; + private java.util.List auth_ = java.util.Collections.emptyList(); + private void ensureAuthIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(auth_); + bitField0_ |= 0x00000002; + } + } + /** + * repeated uint32 auth = 2; + */ + public java.util.List + getAuthList() { + return java.util.Collections.unmodifiableList(auth_); + } + /** + * repeated uint32 auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated uint32 auth = 2; + */ + public int getAuth(int index) { + return auth_.get(index); + } + /** + * repeated uint32 auth = 2; + */ + public Builder setAuth( + int index, int value) { + ensureAuthIsMutable(); + auth_.set(index, value); + onChanged(); + return this; + } + /** + * repeated uint32 auth = 2; + */ + public Builder addAuth(int value) { + ensureAuthIsMutable(); + auth_.add(value); + onChanged(); + return this; + } + /** + * repeated uint32 auth = 2; + */ + public Builder addAllAuth( + java.lang.Iterable values) { + ensureAuthIsMutable(); + super.addAll(values, auth_); + onChanged(); + return this; + } + /** + * repeated uint32 auth = 2; + */ + public Builder clearAuth() { + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UserAuthorizations) + } + + static { + defaultInstance = new UserAuthorizations(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UserAuthorizations) + } + + public interface MultiUserAuthorizationsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .UserAuthorizations userAuths = 1; + /** + * repeated .UserAuthorizations userAuths = 1; + */ + java.util.List + getUserAuthsList(); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getUserAuths(int index); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + int getUserAuthsCount(); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + java.util.List + getUserAuthsOrBuilderList(); + /** + * repeated .UserAuthorizations userAuths = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder getUserAuthsOrBuilder( + int index); + } + /** + * Protobuf type {@code MultiUserAuthorizations} + */ + public static final class MultiUserAuthorizations extends + com.google.protobuf.GeneratedMessage + implements MultiUserAuthorizationsOrBuilder { + // Use MultiUserAuthorizations.newBuilder() to construct. + private MultiUserAuthorizations(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MultiUserAuthorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MultiUserAuthorizations defaultInstance; + public static MultiUserAuthorizations getDefaultInstance() { + return defaultInstance; + } + + public MultiUserAuthorizations getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiUserAuthorizations( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + userAuths_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = java.util.Collections.unmodifiableList(userAuths_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiUserAuthorizations parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiUserAuthorizations(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .UserAuthorizations userAuths = 1; + public static final int USERAUTHS_FIELD_NUMBER = 1; + private java.util.List userAuths_; + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List getUserAuthsList() { + return userAuths_; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List + getUserAuthsOrBuilderList() { + return userAuths_; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public int getUserAuthsCount() { + return userAuths_.size(); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getUserAuths(int index) { + return userAuths_.get(index); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder getUserAuthsOrBuilder( + int index) { + return userAuths_.get(index); + } + + private void initFields() { + userAuths_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getUserAuthsCount(); i++) { + if (!getUserAuths(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < userAuths_.size(); i++) { + output.writeMessage(1, userAuths_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < userAuths_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, userAuths_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations) obj; + + boolean result = true; + result = result && getUserAuthsList() + .equals(other.getUserAuthsList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getUserAuthsCount() > 0) { + hash = (37 * hash) + USERAUTHS_FIELD_NUMBER; + hash = (53 * hash) + getUserAuthsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MultiUserAuthorizations} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizationsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getUserAuthsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (userAuthsBuilder_ == null) { + userAuths_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + userAuthsBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_MultiUserAuthorizations_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations(this); + int from_bitField0_ = bitField0_; + if (userAuthsBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = java.util.Collections.unmodifiableList(userAuths_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.userAuths_ = userAuths_; + } else { + result.userAuths_ = userAuthsBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations.getDefaultInstance()) return this; + if (userAuthsBuilder_ == null) { + if (!other.userAuths_.isEmpty()) { + if (userAuths_.isEmpty()) { + userAuths_ = other.userAuths_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureUserAuthsIsMutable(); + userAuths_.addAll(other.userAuths_); + } + onChanged(); + } + } else { + if (!other.userAuths_.isEmpty()) { + if (userAuthsBuilder_.isEmpty()) { + userAuthsBuilder_.dispose(); + userAuthsBuilder_ = null; + userAuths_ = other.userAuths_; + bitField0_ = (bitField0_ & ~0x00000001); + userAuthsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getUserAuthsFieldBuilder() : null; + } else { + userAuthsBuilder_.addAllMessages(other.userAuths_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getUserAuthsCount(); i++) { + if (!getUserAuths(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .UserAuthorizations userAuths = 1; + private java.util.List userAuths_ = + java.util.Collections.emptyList(); + private void ensureUserAuthsIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + userAuths_ = new java.util.ArrayList(userAuths_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder> userAuthsBuilder_; + + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List getUserAuthsList() { + if (userAuthsBuilder_ == null) { + return java.util.Collections.unmodifiableList(userAuths_); + } else { + return userAuthsBuilder_.getMessageList(); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public int getUserAuthsCount() { + if (userAuthsBuilder_ == null) { + return userAuths_.size(); + } else { + return userAuthsBuilder_.getCount(); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations getUserAuths(int index) { + if (userAuthsBuilder_ == null) { + return userAuths_.get(index); + } else { + return userAuthsBuilder_.getMessage(index); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder setUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations value) { + if (userAuthsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureUserAuthsIsMutable(); + userAuths_.set(index, value); + onChanged(); + } else { + userAuthsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder setUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder builderForValue) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.set(index, builderForValue.build()); + onChanged(); + } else { + userAuthsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations value) { + if (userAuthsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureUserAuthsIsMutable(); + userAuths_.add(value); + onChanged(); + } else { + userAuthsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations value) { + if (userAuthsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureUserAuthsIsMutable(); + userAuths_.add(index, value); + onChanged(); + } else { + userAuthsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder builderForValue) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.add(builderForValue.build()); + onChanged(); + } else { + userAuthsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addUserAuths( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder builderForValue) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.add(index, builderForValue.build()); + onChanged(); + } else { + userAuthsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder addAllUserAuths( + java.lang.Iterable values) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + super.addAll(values, userAuths_); + onChanged(); + } else { + userAuthsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder clearUserAuths() { + if (userAuthsBuilder_ == null) { + userAuths_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + userAuthsBuilder_.clear(); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public Builder removeUserAuths(int index) { + if (userAuthsBuilder_ == null) { + ensureUserAuthsIsMutable(); + userAuths_.remove(index); + onChanged(); + } else { + userAuthsBuilder_.remove(index); + } + return this; + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder getUserAuthsBuilder( + int index) { + return getUserAuthsFieldBuilder().getBuilder(index); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder getUserAuthsOrBuilder( + int index) { + if (userAuthsBuilder_ == null) { + return userAuths_.get(index); } else { + return userAuthsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List + getUserAuthsOrBuilderList() { + if (userAuthsBuilder_ != null) { + return userAuthsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(userAuths_); + } + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder addUserAuthsBuilder() { + return getUserAuthsFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance()); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder addUserAuthsBuilder( + int index) { + return getUserAuthsFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.getDefaultInstance()); + } + /** + * repeated .UserAuthorizations userAuths = 1; + */ + public java.util.List + getUserAuthsBuilderList() { + return getUserAuthsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder> + getUserAuthsFieldBuilder() { + if (userAuthsBuilder_ == null) { + userAuthsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizationsOrBuilder>( + userAuths_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + userAuths_ = null; + } + return userAuthsBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiUserAuthorizations) + } + + static { + defaultInstance = new MultiUserAuthorizations(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiUserAuthorizations) + } + + public interface GetAuthsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + } + /** + * Protobuf type {@code GetAuthsRequest} + */ + public static final class GetAuthsRequest extends + com.google.protobuf.GeneratedMessage + implements GetAuthsRequestOrBuilder { + // Use GetAuthsRequest.newBuilder() to construct. + private GetAuthsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetAuthsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetAuthsRequest defaultInstance; + public static GetAuthsRequest getDefaultInstance() { + return defaultInstance; + } + + public GetAuthsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetAuthsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetAuthsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetAuthsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetAuthsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetAuthsRequest) + } + + static { + defaultInstance = new GetAuthsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetAuthsRequest) + } + + public interface GetAuthsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes user = 1; + /** + * required bytes user = 1; + */ + boolean hasUser(); + /** + * required bytes user = 1; + */ + com.google.protobuf.ByteString getUser(); + + // repeated bytes auth = 2; + /** + * repeated bytes auth = 2; + */ + java.util.List getAuthList(); + /** + * repeated bytes auth = 2; + */ + int getAuthCount(); + /** + * repeated bytes auth = 2; + */ + com.google.protobuf.ByteString getAuth(int index); + } + /** + * Protobuf type {@code GetAuthsResponse} + */ + public static final class GetAuthsResponse extends + com.google.protobuf.GeneratedMessage + implements GetAuthsResponseOrBuilder { + // Use GetAuthsResponse.newBuilder() to construct. + private GetAuthsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private GetAuthsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final GetAuthsResponse defaultInstance; + public static GetAuthsResponse getDefaultInstance() { + return defaultInstance; + } + + public GetAuthsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetAuthsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + user_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + auth_.add(input.readBytes()); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public GetAuthsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetAuthsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes user = 1; + public static final int USER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString user_; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + + // repeated bytes auth = 2; + public static final int AUTH_FIELD_NUMBER = 2; + private java.util.List auth_; + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return auth_; + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + + private void initFields() { + user_ = com.google.protobuf.ByteString.EMPTY; + auth_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasUser()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, user_); + } + for (int i = 0; i < auth_.size(); i++) { + output.writeBytes(2, auth_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, user_); + } + { + int dataSize = 0; + for (int i = 0; i < auth_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(auth_.get(i)); + } + size += dataSize; + size += 1 * getAuthList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) obj; + + boolean result = true; + result = result && (hasUser() == other.hasUser()); + if (hasUser()) { + result = result && getUser() + .equals(other.getUser()); + } + result = result && getAuthList() + .equals(other.getAuthList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasUser()) { + hash = (37 * hash) + USER_FIELD_NUMBER; + hash = (53 * hash) + getUser().hashCode(); + } + if (getAuthCount() > 0) { + hash = (37 * hash) + AUTH_FIELD_NUMBER; + hash = (53 * hash) + getAuthList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code GetAuthsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + user_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_GetAuthsResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.user_ = user_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = java.util.Collections.unmodifiableList(auth_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.auth_ = auth_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance()) return this; + if (other.hasUser()) { + setUser(other.getUser()); + } + if (!other.auth_.isEmpty()) { + if (auth_.isEmpty()) { + auth_ = other.auth_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAuthIsMutable(); + auth_.addAll(other.auth_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasUser()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes user = 1; + private com.google.protobuf.ByteString user_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes user = 1; + */ + public boolean hasUser() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes user = 1; + */ + public com.google.protobuf.ByteString getUser() { + return user_; + } + /** + * required bytes user = 1; + */ + public Builder setUser(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + user_ = value; + onChanged(); + return this; + } + /** + * required bytes user = 1; + */ + public Builder clearUser() { + bitField0_ = (bitField0_ & ~0x00000001); + user_ = getDefaultInstance().getUser(); + onChanged(); + return this; + } + + // repeated bytes auth = 2; + private java.util.List auth_ = java.util.Collections.emptyList(); + private void ensureAuthIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + auth_ = new java.util.ArrayList(auth_); + bitField0_ |= 0x00000002; + } + } + /** + * repeated bytes auth = 2; + */ + public java.util.List + getAuthList() { + return java.util.Collections.unmodifiableList(auth_); + } + /** + * repeated bytes auth = 2; + */ + public int getAuthCount() { + return auth_.size(); + } + /** + * repeated bytes auth = 2; + */ + public com.google.protobuf.ByteString getAuth(int index) { + return auth_.get(index); + } + /** + * repeated bytes auth = 2; + */ + public Builder setAuth( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.set(index, value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAuth(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureAuthIsMutable(); + auth_.add(value); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder addAllAuth( + java.lang.Iterable values) { + ensureAuthIsMutable(); + super.addAll(values, auth_); + onChanged(); + return this; + } + /** + * repeated bytes auth = 2; + */ + public Builder clearAuth() { + auth_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetAuthsResponse) + } + + static { + defaultInstance = new GetAuthsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetAuthsResponse) + } + + /** + * Protobuf service {@code VisibilityLabelsService} + */ + public static abstract class VisibilityLabelsService + implements com.google.protobuf.Service { + protected VisibilityLabelsService() {} + + public interface Interface { + /** + * rpc addLabels(.VisibilityLabelsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc setAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc clearAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc getAuths(.GetAuthsRequest) returns (.GetAuthsResponse); + */ + public abstract void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new VisibilityLabelsService() { + @java.lang.Override + public void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done) { + impl.addLabels(controller, request, done); + } + + @java.lang.Override + public void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + impl.setAuths(controller, request, done); + } + + @java.lang.Override + public void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + impl.clearAuths(controller, request, done); + } + + @java.lang.Override + public void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + impl.getAuths(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.addLabels(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)request); + case 1: + return impl.setAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request); + case 2: + return impl.clearAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request); + case 3: + return impl.getAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc addLabels(.VisibilityLabelsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc setAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc clearAuths(.SetAuthsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc getAuths(.GetAuthsRequest) returns (.GetAuthsResponse); + */ + public abstract void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.addLabels(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.setAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.clearAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.getAuths(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance())); + } + + public void setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance())); + } + + public void clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance())); + } + + public void getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse setAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse clearAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse getAuths( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabelsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabelsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabel_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabel_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabelsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabelsResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SetAuthsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SetAuthsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UserAuthorizations_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UserAuthorizations_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiUserAuthorizations_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiUserAuthorizations_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetAuthsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetAuthsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetAuthsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetAuthsResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\026VisibilityLabels.proto\032\014Client.proto\"=" + + "\n\027VisibilityLabelsRequest\022\"\n\010visLabel\030\001 " + + "\003(\0132\020.VisibilityLabel\"1\n\017VisibilityLabel" + + "\022\r\n\005label\030\001 \002(\014\022\017\n\007ordinal\030\002 \001(\r\"?\n\030Visi" + + "bilityLabelsResponse\022#\n\006result\030\001 \003(\0132\023.R" + + "egionActionResult\"-\n\017SetAuthsRequest\022\014\n\004" + + "user\030\001 \002(\014\022\014\n\004auth\030\002 \003(\014\"0\n\022UserAuthoriz" + + "ations\022\014\n\004user\030\001 \002(\014\022\014\n\004auth\030\002 \003(\r\"A\n\027Mu" + + "ltiUserAuthorizations\022&\n\tuserAuths\030\001 \003(\013" + + "2\023.UserAuthorizations\"\037\n\017GetAuthsRequest", + "\022\014\n\004user\030\001 \002(\014\".\n\020GetAuthsResponse\022\014\n\004us" + + "er\030\001 \002(\014\022\014\n\004auth\030\002 \003(\0142\200\002\n\027VisibilityLab" + + "elsService\022@\n\taddLabels\022\030.VisibilityLabe" + + "lsRequest\032\031.VisibilityLabelsResponse\0227\n\010" + + "setAuths\022\020.SetAuthsRequest\032\031.VisibilityL" + + "abelsResponse\0229\n\nclearAuths\022\020.SetAuthsRe" + + "quest\032\031.VisibilityLabelsResponse\022/\n\010getA" + + "uths\022\020.GetAuthsRequest\032\021.GetAuthsRespons" + + "eBL\n*org.apache.hadoop.hbase.protobuf.ge" + + "neratedB\026VisibilityLabelsProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_VisibilityLabelsRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_VisibilityLabelsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabelsRequest_descriptor, + new java.lang.String[] { "VisLabel", }); + internal_static_VisibilityLabel_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_VisibilityLabel_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabel_descriptor, + new java.lang.String[] { "Label", "Ordinal", }); + internal_static_VisibilityLabelsResponse_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_VisibilityLabelsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabelsResponse_descriptor, + new java.lang.String[] { "Result", }); + internal_static_SetAuthsRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_SetAuthsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SetAuthsRequest_descriptor, + new java.lang.String[] { "User", "Auth", }); + internal_static_UserAuthorizations_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_UserAuthorizations_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UserAuthorizations_descriptor, + new java.lang.String[] { "User", "Auth", }); + internal_static_MultiUserAuthorizations_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_MultiUserAuthorizations_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiUserAuthorizations_descriptor, + new java.lang.String[] { "UserAuths", }); + internal_static_GetAuthsRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_GetAuthsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetAuthsRequest_descriptor, + new java.lang.String[] { "User", }); + internal_static_GetAuthsResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_GetAuthsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetAuthsResponse_descriptor, + new java.lang.String[] { "User", "Auth", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-protocol/src/main/protobuf/VisibilityLabels.proto hbase-protocol/src/main/protobuf/VisibilityLabels.proto new file mode 100644 index 0000000..f62dfa7 --- /dev/null +++ hbase-protocol/src/main/protobuf/VisibilityLabels.proto @@ -0,0 +1,72 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "VisibilityLabelsProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "Client.proto"; + +message VisibilityLabelsRequest { + repeated VisibilityLabel visLabel = 1; +} + +message VisibilityLabel { + required bytes label = 1; + optional uint32 ordinal = 2; +} + +message VisibilityLabelsResponse { + repeated RegionActionResult result = 1; +} + +message SetAuthsRequest { + required bytes user = 1; + repeated bytes auth = 2; +} + +message UserAuthorizations { + required bytes user = 1; + repeated uint32 auth = 2; +} + +message MultiUserAuthorizations { + repeated UserAuthorizations userAuths = 1; +} + +message GetAuthsRequest { + required bytes user = 1; +} + +message GetAuthsResponse { + required bytes user = 1; + repeated bytes auth = 2; +} + +service VisibilityLabelsService { + rpc addLabels(VisibilityLabelsRequest) + returns (VisibilityLabelsResponse); + rpc setAuths(SetAuthsRequest) + returns (VisibilityLabelsResponse); + rpc clearAuths(SetAuthsRequest) + returns (VisibilityLabelsResponse); + rpc getAuths(GetAuthsRequest) + returns (GetAuthsResponse); +} \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java index 53d6e78..4093c05 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java @@ -21,7 +21,9 @@ package org.apache.hadoop.hbase.rest; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; +import java.util.ArrayList; import java.util.Collection; +import java.util.List; import java.util.TreeSet; import org.apache.hadoop.classification.InterfaceAudience; @@ -43,6 +45,7 @@ public class RowSpec { private byte[] endRow = null; private TreeSet columns = new TreeSet(Bytes.BYTES_COMPARATOR); + private List labels = new ArrayList(); private long startTime = DEFAULT_START_TIMESTAMP; private long endTime = DEFAULT_END_TIMESTAMP; private int maxVersions = 1; @@ -277,6 +280,13 @@ public class RowSpec { } public RowSpec(byte[] startRow, byte[] endRow, Collection columns, + long startTime, long endTime, int maxVersions, Collection labels) { + this(startRow, endRow, columns, startTime, endTime, maxVersions); + if(labels != null) { + this.labels.addAll(labels); + } + } + public RowSpec(byte[] startRow, byte[] endRow, Collection columns, long startTime, long endTime, int maxVersions) { this.row = startRow; this.endRow = endRow; @@ -311,6 +321,10 @@ public class RowSpec { public boolean hasColumns() { return !columns.isEmpty(); } + + public boolean hasLabels() { + return !labels.isEmpty(); + } public byte[] getRow() { return row; @@ -335,6 +349,10 @@ public class RowSpec { public byte[][] getColumns() { return columns.toArray(new byte[columns.size()][]); } + + public List getLabels() { + return labels; + } public boolean hasTimestamp() { return (startTime == 0) && (endTime != Long.MAX_VALUE); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java index ae91281..81be6fc 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java @@ -82,9 +82,14 @@ public class ScannerResource extends ResourceBase { .build(); } byte[] endRow = model.hasEndRow() ? model.getEndRow() : null; - RowSpec spec = new RowSpec(model.getStartRow(), endRow, - model.getColumns(), model.getStartTime(), model.getEndTime(), - model.getMaxVersions()); + RowSpec spec = null; + if (model.getLabels() != null) { + spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(), + model.getEndTime(), model.getMaxVersions(), model.getLabels()); + } else { + spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(), + model.getEndTime(), model.getMaxVersions()); + } try { Filter filter = ScannerResultGenerator.buildFilterFromModel(model); String tableName = tableResource.getName(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java index 09fb7d4..aeefb0e 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.rest.model.ScannerModel; +import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private @@ -95,6 +96,9 @@ public class ScannerResultGenerator extends ResultGenerator { if (caching > 0 ) { scan.setCaching(caching); } + if(rowspec.hasLabels()) { + scan.setAuthorizations(new Authorizations(rowspec.getLabels())); + } scanner = table.getScanner(scan); cached = null; id = Long.toString(System.currentTimeMillis()) + diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index 001a64c..17d7b90 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -35,15 +35,46 @@ import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.filter.*; +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.BitComparator; +import org.apache.hadoop.hbase.filter.ByteArrayComparable; +import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; +import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; +import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.ColumnRangeFilter; +import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.DependentColumnFilter; +import org.apache.hadoop.hbase.filter.FamilyFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; +import org.apache.hadoop.hbase.filter.InclusiveStopFilter; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.NullComparator; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.RandomRowFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; +import org.apache.hadoop.hbase.filter.SkipFilter; +import org.apache.hadoop.hbase.filter.SubstringComparator; +import org.apache.hadoop.hbase.filter.TimestampsFilter; +import org.apache.hadoop.hbase.filter.ValueFilter; +import org.apache.hadoop.hbase.filter.WhileMatchFilter; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import com.google.protobuf.ByteString; - import com.sun.jersey.api.json.JSONConfiguration; import com.sun.jersey.api.json.JSONJAXBContext; import com.sun.jersey.api.json.JSONMarshaller; @@ -83,6 +114,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { private String filter = null; private int maxVersions = Integer.MAX_VALUE; private int caching = -1; + private List labels = new ArrayList(); @XmlRootElement static class FilterModel { @@ -488,6 +520,15 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { if (filter != null) { model.setFilter(stringifyFilter(filter)); } + // Add the visbility labels if found in the attributes + if(scan.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY) != null) { + byte[] b = scan.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + Authorizations authorizations = Authorizations.fromBytes(b); + List labels = authorizations.getLabels(); + for(String label : labels) { + model.addLabel(label); + } + } return model; } @@ -556,6 +597,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { } /** + * Add a visibility label to the scan + */ + public void addLabel(String label) { + labels.add(label); + } + + /** * @return true if a start row was specified */ public boolean hasStartRow() { @@ -593,6 +641,11 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { return columns; } + @XmlElement(name="label") + public List getLabels() { + return labels; + } + /** * @return the number of cells to return in batch */ @@ -730,6 +783,10 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { if (filter != null) { builder.setFilter(filter); } + if (labels != null && labels.size() > 0) { + for (String label : labels) + builder.addLabels(label); + } return builder.build().toByteArray(); } @@ -765,6 +822,12 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { if (builder.hasFilter()) { filter = builder.getFilter(); } + if(builder.getLabelsList() != null) { + List labels = builder.getLabelsList(); + for(String label : labels) { + addLabel(label); + } + } return this; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java index 493463a..b76e4e0 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java @@ -109,6 +109,26 @@ public final class ScannerMessage { * optional int32 caching = 9; */ int getCaching(); + + // repeated string labels = 10; + /** + * repeated string labels = 10; + */ + java.util.List + getLabelsList(); + /** + * repeated string labels = 10; + */ + int getLabelsCount(); + /** + * repeated string labels = 10; + */ + java.lang.String getLabels(int index); + /** + * repeated string labels = 10; + */ + com.google.protobuf.ByteString + getLabelsBytes(int index); } /** * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Scanner} @@ -209,6 +229,14 @@ public final class ScannerMessage { caching_ = input.readInt32(); break; } + case 82: { + if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000200; + } + labels_.add(input.readBytes()); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -220,6 +248,9 @@ public final class ScannerMessage { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columns_ = java.util.Collections.unmodifiableList(columns_); } + if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.UnmodifiableLazyStringList(labels_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } @@ -430,6 +461,36 @@ public final class ScannerMessage { return caching_; } + // repeated string labels = 10; + public static final int LABELS_FIELD_NUMBER = 10; + private com.google.protobuf.LazyStringList labels_; + /** + * repeated string labels = 10; + */ + public java.util.List + getLabelsList() { + return labels_; + } + /** + * repeated string labels = 10; + */ + public int getLabelsCount() { + return labels_.size(); + } + /** + * repeated string labels = 10; + */ + public java.lang.String getLabels(int index) { + return labels_.get(index); + } + /** + * repeated string labels = 10; + */ + public com.google.protobuf.ByteString + getLabelsBytes(int index) { + return labels_.getByteString(index); + } + private void initFields() { startRow_ = com.google.protobuf.ByteString.EMPTY; endRow_ = com.google.protobuf.ByteString.EMPTY; @@ -440,6 +501,7 @@ public final class ScannerMessage { maxVersions_ = 0; filter_ = ""; caching_ = 0; + labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -480,6 +542,9 @@ public final class ScannerMessage { if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeInt32(9, caching_); } + for (int i = 0; i < labels_.size(); i++) { + output.writeBytes(10, labels_.getByteString(i)); + } getUnknownFields().writeTo(output); } @@ -530,6 +595,15 @@ public final class ScannerMessage { size += com.google.protobuf.CodedOutputStream .computeInt32Size(9, caching_); } + { + int dataSize = 0; + for (int i = 0; i < labels_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(labels_.getByteString(i)); + } + size += dataSize; + size += 1 * getLabelsList().size(); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -664,6 +738,8 @@ public final class ScannerMessage { bitField0_ = (bitField0_ & ~0x00000080); caching_ = 0; bitField0_ = (bitField0_ & ~0x00000100); + labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000200); return this; } @@ -729,6 +805,12 @@ public final class ScannerMessage { to_bitField0_ |= 0x00000080; } result.caching_ = caching_; + if (((bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.UnmodifiableLazyStringList( + labels_); + bitField0_ = (bitField0_ & ~0x00000200); + } + result.labels_ = labels_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -781,6 +863,16 @@ public final class ScannerMessage { if (other.hasCaching()) { setCaching(other.getCaching()); } + if (!other.labels_.isEmpty()) { + if (labels_.isEmpty()) { + labels_ = other.labels_; + bitField0_ = (bitField0_ & ~0x00000200); + } else { + ensureLabelsIsMutable(); + labels_.addAll(other.labels_); + } + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -1113,7 +1205,7 @@ public final class ScannerMessage { getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { - com.google.protobuf.ByteString b = + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); filter_ = b; @@ -1191,6 +1283,99 @@ public final class ScannerMessage { return this; } + // repeated string labels = 10; + private com.google.protobuf.LazyStringList labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureLabelsIsMutable() { + if (!((bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.LazyStringArrayList(labels_); + bitField0_ |= 0x00000200; + } + } + /** + * repeated string labels = 10; + */ + public java.util.List + getLabelsList() { + return java.util.Collections.unmodifiableList(labels_); + } + /** + * repeated string labels = 10; + */ + public int getLabelsCount() { + return labels_.size(); + } + /** + * repeated string labels = 10; + */ + public java.lang.String getLabels(int index) { + return labels_.get(index); + } + /** + * repeated string labels = 10; + */ + public com.google.protobuf.ByteString + getLabelsBytes(int index) { + return labels_.getByteString(index); + } + /** + * repeated string labels = 10; + */ + public Builder setLabels( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelsIsMutable(); + labels_.set(index, value); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder addLabels( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelsIsMutable(); + labels_.add(value); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder addAllLabels( + java.lang.Iterable values) { + ensureLabelsIsMutable(); + super.addAll(values, labels_); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder clearLabels() { + labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000200); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder addLabelsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelsIsMutable(); + labels_.add(value); + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Scanner) } @@ -1217,11 +1402,12 @@ public final class ScannerMessage { static { java.lang.String[] descriptorData = { "\n\024ScannerMessage.proto\022/org.apache.hadoo" + - "p.hbase.rest.protobuf.generated\"\245\001\n\007Scan" + + "p.hbase.rest.protobuf.generated\"\265\001\n\007Scan" + "ner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(\014\022\017\n" + "\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tstartTi" + "me\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003\022\023\n\013maxVersions" + - "\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t\022\017\n\007caching\030\t \001(\005" + "\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t\022\017\n\007caching\030\t \001(\005\022" + + "\016\n\006labels\030\n \003(\t" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -1233,7 +1419,7 @@ public final class ScannerMessage { internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor, - new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", "Caching", }); + new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", "Caching", "Labels", }); return null; } }; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java new file mode 100644 index 0000000..4b1c029 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.security.User; + +/** + * This is the default implementation for ScanLabelGenerator. It will extract labels passed via + * Scan#OPAttributes and cross check against the global auths set for the user. The labels for which + * user is not authenticated will be dropped even if it is passed via Scan OpAttributes. + */ +@InterfaceAudience.Private +public class DefaultScanLabelGenerator implements ScanLabelGenerator { + + private static final Log LOG = LogFactory.getLog(DefaultScanLabelGenerator.class); + + private Configuration conf; + + private VisibilityLabelsManager labelsManager; + + public DefaultScanLabelGenerator() { + this.labelsManager = VisibilityLabelsManager.get(); + } + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return this.conf; + } + + @Override + public List getLabels(User user, OperationWithAttributes op) { + byte[] authorizations = op.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + if (authorizations != null && authorizations.length > 0) { + List labels = Authorizations.fromBytes(authorizations).getLabels(); + String userName = user.getName(); + List auths = this.labelsManager.getAuths(userName); + dropLabelsNotInUserAuths(labels, auths, userName); + return labels; + } + return null; + } + + private void dropLabelsNotInUserAuths(List labels, List auths, String userName) { + List dropped = new ArrayList(); + Iterator itr = labels.iterator(); + while (itr.hasNext()) { + String label = itr.next(); + if (!auths.contains(label)) { + itr.remove(); + dropped.add(label); + } + } + for (String label : labels) { + if (!auths.contains(label)) { + dropped.add(label); + } + } + if (!dropped.isEmpty()) { + LOG.info("Labels " + dropped + " in Scan/Get visibility attributes dropped as user " + + userName + " having no auth set for those."); + } + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java new file mode 100644 index 0000000..445b12f --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java @@ -0,0 +1,184 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; + +@InterfaceAudience.Private +public class ExpressionExpander { + + public ExpressionNode expand(ExpressionNode src) { + if (!src.isSingleNode()) { + NonLeafExpressionNode nlExp = (NonLeafExpressionNode) src; + List childExps = nlExp.getChildExps(); + Operator outerOp = nlExp.getOperator(); + if (isToBeExpanded(childExps)) { + // Any of the child exp is a non leaf exp with & or | operator + NonLeafExpressionNode newNode = new NonLeafExpressionNode(nlExp.getOperator()); + for (ExpressionNode exp : childExps) { + if (exp.isSingleNode()) { + newNode.addChildExp(exp); + } else { + newNode.addChildExp(expand(exp)); + } + } + nlExp = expandNonLeaf(newNode, outerOp); + } + return nlExp; + } + if (src instanceof NonLeafExpressionNode + && ((NonLeafExpressionNode) src).getOperator() == Operator.NOT) { + // Negate the exp + return negate((NonLeafExpressionNode) src); + } + return src; + } + + private ExpressionNode negate(NonLeafExpressionNode nlExp) { + ExpressionNode notChild = nlExp.getChildExps().get(0); + if (notChild instanceof LeafExpressionNode) { + return nlExp; + } + NonLeafExpressionNode nlNotChild = (NonLeafExpressionNode) notChild; + if (nlNotChild.getOperator() == Operator.NOT) { + // negate the negate + return nlNotChild.getChildExps().get(0); + } + Operator negateOp = nlNotChild.getOperator() == Operator.AND ? Operator.OR : Operator.AND; + NonLeafExpressionNode newNode = new NonLeafExpressionNode(negateOp); + for (ExpressionNode expNode : nlNotChild.getChildExps()) { + NonLeafExpressionNode negateNode = new NonLeafExpressionNode(Operator.NOT); + negateNode.addChildExp(expNode.deepClone()); + newNode.addChildExp(expand(negateNode)); + } + return newNode; + } + + private boolean isToBeExpanded(List childExps) { + for (ExpressionNode exp : childExps) { + if (!exp.isSingleNode()) { + return true; + } + } + return false; + } + + private NonLeafExpressionNode expandNonLeaf(NonLeafExpressionNode newNode, Operator outerOp) { + // Now go for the merge or expansion across brackets + List newChildExps = newNode.getChildExps(); + assert newChildExps.size() == 2; + ExpressionNode leftChild = newChildExps.get(0); + ExpressionNode rightChild = newChildExps.get(1); + if (rightChild.isSingleNode()) { + // Merge the single right node into the left side + assert leftChild instanceof NonLeafExpressionNode; + newNode = mergeChildNodes(newNode, outerOp, rightChild, (NonLeafExpressionNode) leftChild); + } else if (leftChild.isSingleNode()) { + // Merge the single left node into the right side + assert rightChild instanceof NonLeafExpressionNode; + newNode = mergeChildNodes(newNode, outerOp, leftChild, (NonLeafExpressionNode) rightChild); + } else { + // Both the child exp nodes are non single. + NonLeafExpressionNode leftChildNLE = (NonLeafExpressionNode) leftChild; + NonLeafExpressionNode rightChildNLE = (NonLeafExpressionNode) rightChild; + if (outerOp == leftChildNLE.getOperator() && outerOp == rightChildNLE.getOperator()) { + // Merge + NonLeafExpressionNode leftChildNLEClone = leftChildNLE.deepClone(); + leftChildNLEClone.addChildExps(rightChildNLE.getChildExps()); + newNode = leftChildNLEClone; + } else { + // (a | b) & (c & d) ... + if (outerOp == Operator.OR) { + // (a | b) | (c & d) + if (leftChildNLE.getOperator() == Operator.OR + && rightChildNLE.getOperator() == Operator.AND) { + leftChildNLE.addChildExp(rightChildNLE); + newNode = leftChildNLE; + } else if (leftChildNLE.getOperator() == Operator.AND + && rightChildNLE.getOperator() == Operator.OR) { + // (a & b) | (c | d) + rightChildNLE.addChildExp(leftChildNLE); + newNode = rightChildNLE; + } + // (a & b) | (c & d) + // This case no need to do any thing + } else { + // outer op is & + // (a | b) & (c & d) => (a & c & d) | (b & c & d) + if (leftChildNLE.getOperator() == Operator.OR + && rightChildNLE.getOperator() == Operator.AND) { + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : leftChildNLE.getChildExps()) { + NonLeafExpressionNode rightChildNLEClone = rightChildNLE.deepClone(); + rightChildNLEClone.addChildExp(exp); + newNode.addChildExp(rightChildNLEClone); + } + } else if (leftChildNLE.getOperator() == Operator.AND + && rightChildNLE.getOperator() == Operator.OR) { + // (a & b) & (c | d) => (a & b & c) | (a & b & d) + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : rightChildNLE.getChildExps()) { + NonLeafExpressionNode leftChildNLEClone = leftChildNLE.deepClone(); + leftChildNLEClone.addChildExp(exp); + newNode.addChildExp(leftChildNLEClone); + } + } else { + // (a | b) & (c | d) => (a & c) | (a & d) | (b & c) | (b & d) + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode leftExp : leftChildNLE.getChildExps()) { + for (ExpressionNode rightExp : rightChildNLE.getChildExps()) { + NonLeafExpressionNode newChild = new NonLeafExpressionNode(Operator.AND); + newChild.addChildExp(leftExp.deepClone()); + newChild.addChildExp(rightExp.deepClone()); + newNode.addChildExp(newChild); + } + } + } + } + } + } + return newNode; + } + + private NonLeafExpressionNode mergeChildNodes(NonLeafExpressionNode newOuterNode, + Operator outerOp, ExpressionNode lChild, NonLeafExpressionNode nlChild) { + // Merge the single right/left node into the other side + if (nlChild.getOperator() == outerOp) { + NonLeafExpressionNode leftChildNLEClone = nlChild.deepClone(); + leftChildNLEClone.addChildExp(lChild); + newOuterNode = leftChildNLEClone; + } else if (outerOp == Operator.AND) { + assert nlChild.getOperator() == Operator.OR; + // outerOp is & here. We need to expand the node here + // (a | b) & c -> (a & c) | (b & c) + // OR + // c & (a | b) -> (c & a) | (c & b) + newOuterNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : nlChild.getChildExps()) { + newOuterNode.addChildExp(new NonLeafExpressionNode(Operator.AND, exp, lChild)); + } + } + return newOuterNode; + } +} \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java new file mode 100644 index 0000000..f6ddf75 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java @@ -0,0 +1,273 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.Stack; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Private +public class ExpressionParser { + + private static final char CLOSE_PARAN = ')'; + private static final char OPEN_PARAN = '('; + private static final char OR = '|'; + private static final char AND = '&'; + private static final char NOT = '!'; + private static final char SPACE = ' '; + + public ExpressionNode parse(String expS) throws ParseException { + expS = expS.trim(); + Stack expStack = new Stack(); + int index = 0; + int endPos = expS.length(); + byte[] exp = Bytes.toBytes(expS); + while (index < endPos) { + byte b = exp[index]; + switch (b) { + case OPEN_PARAN: + processOpenParan(expStack, expS, index); + index = skipSpaces(exp, index); + break; + case CLOSE_PARAN: + processCloseParan(expStack, expS, index); + index = skipSpaces(exp, index); + break; + case AND: + case OR: + processANDorOROp(getOperator(b), expStack, expS, index); + index = skipSpaces(exp, index); + break; + case NOT: + processNOTOp(expStack, expS, index); + break; + default: + int labelOffset = index; + do { + if (!VisibilityLabelsValidator.isValidAuthChar(exp[index])) { + throw new ParseException("Error parsing expression " + expS + " at column : " + + index); + } + index++; + } while (index < endPos && !isEndOfLabel(exp[index])); + String leafExp = new String(exp, labelOffset, index - labelOffset).trim(); + if (leafExp.isEmpty()) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + processLabelExpNode(new LeafExpressionNode(leafExp), expStack, expS, index); + // We already crossed the label node index. So need to reduce 1 here. + index--; + index = skipSpaces(exp, index); + } + index++; + } + if (expStack.size() != 1) { + throw new ParseException("Error parsing expression " + expS); + } + ExpressionNode top = expStack.pop(); + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS); + } + if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if (nlTop.getOperator() == Operator.NOT) { + if (nlTop.getChildExps().size() != 1) { + throw new ParseException("Error parsing expression " + expS); + } + } else if (nlTop.getChildExps().size() != 2) { + throw new ParseException("Error parsing expression " + expS); + } + } + return top; + } + + private int skipSpaces(byte[] exp, int index) { + while (index < exp.length -1 && exp[index+1] == SPACE) { + index++; + } + return index; + } + + private void processCloseParan(Stack expStack, String expS, int index) + throws ParseException { + if (expStack.size() < 2) { + // When ) comes we expect atleast a ( node and another leaf/non leaf node + // in stack. + throw new ParseException(); + } else { + ExpressionNode top = expStack.pop(); + ExpressionNode secondTop = expStack.pop(); + // The second top must be a ( node and top should not be a ). Top can be + // any thing else + if (top == LeafExpressionNode.OPEN_PARAN_NODE + || secondTop != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + // a&(b|) is not valid. + // The top can be a ! node but with exactly child nodes. !).. is invalid + // Other NonLeafExpressionNode , then there should be exactly 2 child. + // (a&) is not valid. + if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if ((nlTop.getOperator() == Operator.NOT && nlTop.getChildExps().size() != 1) + || (nlTop.getOperator() != Operator.NOT && nlTop.getChildExps().size() != 2)) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + // When (a|b)&(c|d) comes while processing the second ) there will be + // already (a|b)& node + // avail in the stack. The top will be c|d node. We need to take it out + // and combine as one + // node. + if (!expStack.isEmpty()) { + ExpressionNode thirdTop = expStack.peek(); + if (thirdTop instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlThirdTop = (NonLeafExpressionNode) expStack.pop(); + nlThirdTop.addChildExp(top); + if (nlThirdTop.getOperator() == Operator.NOT) { + // It is a NOT node. So there may be a NonLeafExpressionNode below + // it to which the + // completed NOT can be added now. + if (!expStack.isEmpty()) { + ExpressionNode fourthTop = expStack.peek(); + if (fourthTop instanceof NonLeafExpressionNode) { + // Its Operator will be OR or AND + NonLeafExpressionNode nlFourthTop = (NonLeafExpressionNode) fourthTop; + assert nlFourthTop.getOperator() != Operator.NOT; + // Also for sure its number of children will be 1 + assert nlFourthTop.getChildExps().size() == 1; + nlFourthTop.addChildExp(nlThirdTop); + return;// This case no need to add back the nlThirdTop. + } + } + } + top = nlThirdTop; + } + } + expStack.push(top); + } + } + + private void processOpenParan(Stack expStack, String expS, int index) + throws ParseException { + if (!expStack.isEmpty()) { + ExpressionNode top = expStack.peek(); + // Top can not be a Label Node. a(.. is not valid. but ((a.. is fine. + if (top instanceof LeafExpressionNode && top != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } else if (top instanceof NonLeafExpressionNode) { + // Top is non leaf. + // It can be ! node but with out any child nodes. !a(.. is invalid + // Other NonLeafExpressionNode , then there should be exactly 1 child. + // a&b( is not valid. + // a&( is valid though. Also !( is valid + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if ((nlTop.getOperator() == Operator.NOT && nlTop.getChildExps().size() != 0) + || (nlTop.getOperator() != Operator.NOT && nlTop.getChildExps().size() != 1)) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + } + expStack.push(LeafExpressionNode.OPEN_PARAN_NODE); + } + + private void processLabelExpNode(LeafExpressionNode node, Stack expStack, + String expS, int index) throws ParseException { + if (expStack.isEmpty()) { + expStack.push(node); + } else { + ExpressionNode top = expStack.peek(); + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + expStack.push(node); + } else if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) expStack.pop(); + nlTop.addChildExp(node); + if (nlTop.getOperator() == Operator.NOT && !expStack.isEmpty()) { + ExpressionNode secondTop = expStack.peek(); + if (secondTop == LeafExpressionNode.OPEN_PARAN_NODE) { + expStack.push(nlTop); + } else if (secondTop instanceof NonLeafExpressionNode) { + ((NonLeafExpressionNode) secondTop).addChildExp(nlTop); + } + } else { + expStack.push(nlTop); + } + } else { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + } + + private void processANDorOROp(Operator op, Stack expStack, String expS, int index) + throws ParseException { + if (expStack.isEmpty()) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + ExpressionNode top = expStack.pop(); + if (top.isSingleNode()) { + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + expStack.push(new NonLeafExpressionNode(op, top)); + } else { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if (nlTop.getChildExps().size() != 2) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + expStack.push(new NonLeafExpressionNode(op, nlTop)); + } + } + + private void processNOTOp(Stack expStack, String expS, int index) + throws ParseException { + // When ! comes, the stack can be empty or top ( or top can be some exp like + // a& + // !!.., a!, a&b!, !a! are invalid + if (!expStack.isEmpty()) { + ExpressionNode top = expStack.peek(); + if (top.isSingleNode() && top != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + if (!top.isSingleNode() && ((NonLeafExpressionNode) top).getChildExps().size() != 1) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + expStack.push(new NonLeafExpressionNode(Operator.NOT)); + } + + private static boolean isEndOfLabel(byte b) { + return (b == OPEN_PARAN || b == CLOSE_PARAN || b == OR || b == AND || b == NOT || b == SPACE); + } + + private static Operator getOperator(byte op) { + switch (op) { + case AND: + return Operator.AND; + case OR: + return Operator.OR; + case NOT: + return Operator.NOT; + } + return null; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java new file mode 100644 index 0000000..54a8c5b --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class ParseException extends Exception { + + private static final long serialVersionUID = 1725986524206989173L; + + public ParseException() { + + } + + public ParseException(String msg) { + super(msg); + } + + public ParseException(Throwable t) { + super(t); + } + + public ParseException(String msg, Throwable t) { + super(msg, t); + } + +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java new file mode 100644 index 0000000..7b54a1a --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.security.User; + +/** + * This would be the interface which would be used add labels to the RPC context + * and this would be stored against the UGI. + * + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public interface ScanLabelGenerator extends Configurable { + + /** + * Helps to get a list of lables associated with an UGI + * @param user + * @param op + * @return The labels + */ + public List getLabels(User user, OperationWithAttributes op); +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/SimpleScanLabelGenerator.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/SimpleScanLabelGenerator.java new file mode 100644 index 0000000..be4a435 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/SimpleScanLabelGenerator.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.security.User; + +/** + * This is a simple implementation for ScanLabelGenerator. It will just extract labels passed via + * Scan#OPAttributes. + */ +@InterfaceAudience.Private +public class SimpleScanLabelGenerator implements ScanLabelGenerator { + + private Configuration conf; + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return this.conf; + } + + @Override + public List getLabels(User user, OperationWithAttributes op) { + byte[] authorizations = op.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + if (authorizations != null && authorizations.length > 0) { + List labels = Authorizations.fromBytes(authorizations).getLabels(); + return labels; + } + return null; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java new file mode 100644 index 0000000..b50a8e0 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -0,0 +1,1127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SANITY_CHECK_FAILURE; +import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SUCCESS; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABEL_QUALIFIER; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY; +import static org.apache.hadoop.hbase.security.visibility.VisibilityUtils.SYSTEM_LABEL; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.catalog.MetaReader; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.MasterObserver; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.RegionObserver; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.ipc.RequestContext; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.master.RegionPlan; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.SetAuthsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; +import org.apache.hadoop.hbase.regionserver.OperationStatus; +import org.apache.hadoop.hbase.regionserver.RegionScanner; +import org.apache.hadoop.hbase.security.AccessDeniedException; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.hadoop.io.WritableUtils; + +import com.google.common.collect.Lists; +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +/** + * Coprocessor that has both the MasterObserver and RegionObserver implemented that would support in + * visibility labels + */ +@InterfaceAudience.Private +public class VisibilityController extends BaseRegionObserver implements MasterObserver, + RegionObserver, VisibilityLabelsService.Interface, CoprocessorService { + + private static final Log LOG = LogFactory.getLog(VisibilityController.class); + private static final byte[] DUMMY_VALUE = new byte[0]; + // "system" label is having an ordinal value 1. + private static final int SYSTEM_LABEL_ORDINAL = 1; + private static final Tag[] LABELS_TABLE_TAGS = new Tag[1]; + + private final ExpressionParser expressionParser = new ExpressionParser(); + private final ExpressionExpander expressionExpander = new ExpressionExpander(); + private VisibilityLabelsManager visibilityManager; + // defined only for Endpoint implementation, so it can have way to access region services. + private RegionCoprocessorEnvironment regionEnv; + private ScanLabelGenerator scanLabelGenerator; + + private int ordinalCounter = -1; + // flags if we are running on a region of the 'labels' table + private boolean labelsRegion = false; + // Flag denoting whether AcessController is available or not. + private boolean acOn = false; + private Configuration conf; + + static { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream dos = new DataOutputStream(baos); + try { + WritableUtils.writeVInt(dos, SYSTEM_LABEL_ORDINAL); + } catch (IOException e) { + // We write to a byte array. No Exception can happen. + } + LABELS_TABLE_TAGS[0] = new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()); + } + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + this.conf = env.getConfiguration(); + ZooKeeperWatcher zk = null; + if (env instanceof MasterCoprocessorEnvironment) { + // if running on HMaster + MasterCoprocessorEnvironment mEnv = (MasterCoprocessorEnvironment) env; + zk = mEnv.getMasterServices().getZooKeeper(); + } else if (env instanceof RegionCoprocessorEnvironment) { + // if running at region + regionEnv = (RegionCoprocessorEnvironment) env; + zk = regionEnv.getRegionServerServices().getZooKeeper(); + } + + // If zk is null or IOException while obtaining auth manager, + // throw RuntimeException so that the coprocessor is unloaded. + if (zk == null) { + throw new RuntimeException("Error obtaining VisibilityLabelsManager, zk found null."); + } + try { + this.visibilityManager = VisibilityLabelsManager.get(zk, this.conf); + } catch (IOException ioe) { + throw new RuntimeException("Error obtaining VisibilityLabelsManager", ioe); + } + if (env instanceof RegionCoprocessorEnvironment) { + // ScanLabelGenerator to be instantiated only with Region Observer. + scanLabelGenerator = VisibilityUtils.getScanLabelGenerator(this.conf); + } + // AccessController should be the 1st CP configured in the chain + acOn = CoprocessorHost.getLoadedCoprocessors().contains(AccessController.class.getName()); + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + + } + + /********************************* Master related hooks **********************************/ + + @Override + public void postStartMaster(ObserverContext ctx) throws IOException { + // Need to create the new system table for labels here + MasterServices master = ctx.getEnvironment().getMasterServices(); + if (!MetaReader.tableExists(master.getCatalogTracker(), LABELS_TABLE_NAME)) { + HTableDescriptor labelsTable = new HTableDescriptor(LABELS_TABLE_NAME); + HColumnDescriptor labelsColumn = new HColumnDescriptor(LABELS_TABLE_FAMILY); + labelsColumn.setBloomFilterType(BloomType.NONE); + labelsColumn.setBlockCacheEnabled(false); // We will cache all the labels. No need of normal + // table block cache. + labelsTable.addFamily(labelsColumn); + // Let the "labels" table having only one region always. We are not expecting too many labels in + // the system. + labelsTable.setValue(HTableDescriptor.SPLIT_POLICY, + DisabledRegionSplitPolicy.class.getName()); + master.createTable(labelsTable, null); + } + } + + @Override + public void preCreateTable(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void postCreateTable(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void preCreateTableHandler(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void postCreateTableHandler(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void preDeleteTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postDeleteTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void preDeleteTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postDeleteTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preModifyTable(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void postModifyTable(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void preModifyTableHandler(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void postModifyTableHandler(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void preAddColumn(ObserverContext ctx, TableName tableName, + HColumnDescriptor column) throws IOException { + } + + @Override + public void postAddColumn(ObserverContext ctx, TableName tableName, + HColumnDescriptor column) throws IOException { + } + + @Override + public void preAddColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor column) throws IOException { + } + + @Override + public void postAddColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor column) throws IOException { + } + + @Override + public void preModifyColumn(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void postModifyColumn(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void preModifyColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void postModifyColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void preDeleteColumn(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void postDeleteColumn(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void preDeleteColumnHandler(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void postDeleteColumnHandler(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void preEnableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postEnableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void preEnableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postEnableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preDisableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postDisableTable(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preDisableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postDisableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preMove(ObserverContext ctx, HRegionInfo region, + ServerName srcServer, ServerName destServer) throws IOException { + } + + @Override + public void postMove(ObserverContext ctx, HRegionInfo region, + ServerName srcServer, ServerName destServer) throws IOException { + } + + @Override + public void preAssign(ObserverContext ctx, HRegionInfo regionInfo) + throws IOException { + } + + @Override + public void postAssign(ObserverContext ctx, HRegionInfo regionInfo) + throws IOException { + } + + @Override + public void preUnassign(ObserverContext ctx, + HRegionInfo regionInfo, boolean force) throws IOException { + } + + @Override + public void postUnassign(ObserverContext ctx, + HRegionInfo regionInfo, boolean force) throws IOException { + } + + @Override + public void preRegionOffline(ObserverContext ctx, + HRegionInfo regionInfo) throws IOException { + } + + @Override + public void postRegionOffline(ObserverContext ctx, + HRegionInfo regionInfo) throws IOException { + } + + @Override + public void preBalance(ObserverContext ctx) throws IOException { + } + + @Override + public void postBalance(ObserverContext ctx, List plans) + throws IOException { + } + + @Override + public boolean preBalanceSwitch(ObserverContext ctx, + boolean newValue) throws IOException { + return false; + } + + @Override + public void postBalanceSwitch(ObserverContext ctx, + boolean oldValue, boolean newValue) throws IOException { + } + + @Override + public void preShutdown(ObserverContext ctx) throws IOException { + } + + @Override + public void preStopMaster(ObserverContext ctx) throws IOException { + } + + @Override + public void preSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preCloneSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postCloneSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preRestoreSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postRestoreSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preDeleteSnapshot(ObserverContext ctx, + SnapshotDescription snapshot) throws IOException { + } + + @Override + public void postDeleteSnapshot(ObserverContext ctx, + SnapshotDescription snapshot) throws IOException { + } + + @Override + public void preGetTableDescriptors(ObserverContext ctx, + List tableNamesList, List descriptors) throws IOException { + } + + @Override + public void postGetTableDescriptors(ObserverContext ctx, + List descriptors) throws IOException { + } + + @Override + public void preCreateNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void postCreateNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void preDeleteNamespace(ObserverContext ctx, + String namespace) throws IOException { + } + + @Override + public void postDeleteNamespace(ObserverContext ctx, + String namespace) throws IOException { + } + + @Override + public void preModifyNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void postModifyNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void preMasterInitialization(ObserverContext ctx) + throws IOException { + + } + + /****************************** Region related hooks ******************************/ + + @Override + public void postOpen(ObserverContext e) { + // Read the entire labels table and populate the zk + if (e.getEnvironment().getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) { + this.labelsRegion = true; + try { + Pair, Map>> labelsAndUserAuths = + extractLabelsAndAuths(getExistingLabelsWithAuths()); + Map labels = labelsAndUserAuths.getFirst(); + Map> userAuths = labelsAndUserAuths.getSecond(); + // Add the "system" label if it is not added into the system yet + addSystemLabel(e.getEnvironment().getRegion(), labels, userAuths); + int ordinal = 1; // Ordinal 1 is reserved for "system" label. + for (Integer i : labels.values()) { + if (i > ordinal) { + ordinal = i; + } + } + this.ordinalCounter = ordinal + 1; + if (labels.size() > 0) { + // If there is no data need not write to zk + byte[] serialized = VisibilityUtils.getDataToWriteToZooKeeper(labels); + this.visibilityManager.writeToZookeeper(serialized, true); + } + if (userAuths.size() > 0) { + byte[] serialized = VisibilityUtils.getUserAuthsDataToWriteToZooKeeper(userAuths); + this.visibilityManager.writeToZookeeper(serialized, false); + } + } catch (IOException ioe) { + LOG.error("Error while updating the zk with the exisiting labels data", ioe); + } + } + } + + private void addSystemLabel(HRegion region, Map labels, + Map> userAuths) throws IOException { + if (!labels.containsKey(SYSTEM_LABEL)) { + Put p = new Put(Bytes.toBytes(SYSTEM_LABEL_ORDINAL)); + p.add(LABELS_TABLE_FAMILY, LABEL_QUALIFIER, Bytes.toBytes(SYSTEM_LABEL)); + // Set auth for "system" label for all super users. + List superUsers = getSyetmAndSuperUsers(); + for (String superUser : superUsers) { + p.add(LABELS_TABLE_FAMILY, Bytes.toBytes(superUser), DUMMY_VALUE, LABELS_TABLE_TAGS); + } + region.put(p); + labels.put(SYSTEM_LABEL, SYSTEM_LABEL_ORDINAL); + for (String superUser : superUsers) { + List auths = userAuths.get(superUser); + if (auths == null) { + auths = new ArrayList(1); + userAuths.put(superUser, auths); + } + auths.add(SYSTEM_LABEL_ORDINAL); + } + } + } + + @Override + public void preBatchMutate(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + if (c.getEnvironment().getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) { + performACLCheck(); + } else if (!c.getEnvironment().getRegion().getRegionInfo().getTable().isSystemTable()) { + // TODO this can be made as a global LRU cache at HRS level? + Map> labelCache = new HashMap>(); + for (int i = 0; i < miniBatchOp.size(); i++) { + Mutation m = miniBatchOp.getOperation(i); + if (m instanceof Put) { + Put p = (Put) m; + boolean sanityFailure = false; + for (List cells : p.getFamilyCellMap().values()) { + for (Cell cell : cells) { + if (!checkForReservedVisibilityTagPresence(cell)) { + miniBatchOp.setOperationStatus(i, new OperationStatus(SANITY_CHECK_FAILURE, + "Mutation contains cell with reserved type tag")); + sanityFailure = true; + break; + } + } + if (sanityFailure) { + break; + } + } + if (!sanityFailure) { + byte[] labelsExp = m.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + if (labelsExp != null) { + String labelsExpStr = Bytes.toString(labelsExp); + List visibilityTags = labelCache.get(labelsExpStr); + if (visibilityTags == null) { + try { + visibilityTags = createVisibilityTags(labelsExpStr); + } catch (ParseException e) { + miniBatchOp.setOperationStatus(i, + new OperationStatus(SANITY_CHECK_FAILURE, e.getMessage())); + } catch (InvalidLabelException e) { + miniBatchOp.setOperationStatus(i, + new OperationStatus(SANITY_CHECK_FAILURE, e.getMessage())); + } + } + if (visibilityTags != null) { + labelCache.put(labelsExpStr, visibilityTags); + List updatedCells = new ArrayList(); + for (List cells : p.getFamilyCellMap().values()) { + for (Cell cell : cells) { + List tags = Tag.createTags(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); + tags.addAll(visibilityTags); + Cell updatedCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength(), cell.getQualifierArray(), + cell.getQualifierOffset(), cell.getQualifierLength(), cell.getTimestamp(), + Type.codeToType(cell.getTypeByte()), cell.getValueArray(), + cell.getValueOffset(), cell.getValueLength(), tags); + updatedCells.add(updatedCell); + } + } + p.getFamilyCellMap().clear(); + // Clear and add new Cells to the Mutation. + for(Cell cell : updatedCells){ + p.add(cell); + } + } + } + } + } + } + } + } + + @Override + public void postBatchMutate(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + if (this.labelsRegion) { + // We will add to zookeeper here. + Pair, Map>> labelsAndUserAuths = + extractLabelsAndAuths(getExistingLabelsWithAuths()); + Map existingLabels = labelsAndUserAuths.getFirst(); + Map> userAuths = labelsAndUserAuths.getSecond(); + boolean isNewLabels = false; + boolean isUserAuthsChange = false; + for (int i = 0; i < miniBatchOp.size(); i++) { + Mutation m = miniBatchOp.getOperation(i); + if (miniBatchOp.getOperationStatus(i).getOperationStatusCode() == SUCCESS) { + for (List cells : m.getFamilyCellMap().values()) { + for (Cell cell : cells) { + int labelOrdinal = Bytes.toInt(cell.getRowArray(), cell.getRowOffset()); + if (Bytes.equals(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength(), LABEL_QUALIFIER, 0, + LABEL_QUALIFIER.length)) { + if (m instanceof Put) { + existingLabels.put( + Bytes.toString(cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength()), labelOrdinal); + isNewLabels = true; + } + } else { + String user = Bytes.toString(cell.getQualifierArray(), + cell.getQualifierOffset(), cell.getQualifierLength()); + List auths = userAuths.get(user); + if (auths == null) { + auths = new ArrayList(); + userAuths.put(user, auths); + } + if (m instanceof Delete) { + auths.remove(Integer.valueOf(labelOrdinal)); + } else { + auths.add(labelOrdinal); + } + isUserAuthsChange = true; + } + } + } + } + } + if (isNewLabels) { + byte[] serialized = VisibilityUtils.getDataToWriteToZooKeeper(existingLabels); + this.visibilityManager.writeToZookeeper(serialized, true); + } + if (isUserAuthsChange) { + byte[] serialized = VisibilityUtils.getUserAuthsDataToWriteToZooKeeper(userAuths); + this.visibilityManager.writeToZookeeper(serialized, false); + } + } + } + + private Pair, Map>> extractLabelsAndAuths( + List> labelDetails) { + Map labels = new HashMap(); + Map> userAuths = new HashMap>(); + for (List cells : labelDetails) { + for (Cell cell : cells) { + if (Bytes.equals(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength(), LABEL_QUALIFIER, 0, LABEL_QUALIFIER.length)) { + labels.put( + Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()), + Bytes.toInt(cell.getRowArray(), cell.getRowOffset())); + } else { + // These are user cells who has authorization for this label + String user = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength()); + List auths = userAuths.get(user); + if (auths == null) { + auths = new ArrayList(); + userAuths.put(user, auths); + } + auths.add(Bytes.toInt(cell.getRowArray(), cell.getRowOffset())); + } + } + } + return new Pair, Map>>(labels, userAuths); + } + + // Checks whether cell contains any tag with type as VISIBILITY_TAG_TYPE. + // This tag type is reserved and should not be explicitly set by user. + private boolean checkForReservedVisibilityTagPresence(Cell cell) throws IOException { + if (cell.getTagsLength() > 0) { + KeyValue kv = KeyValueUtil.ensureKeyValue(cell); + Iterator tagsIterator = kv.tagsIterator(); + while (tagsIterator.hasNext()) { + if (tagsIterator.next().getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) { + return false; + } + } + } + return true; + } + + private List createVisibilityTags(String visibilityLabelsExp) throws IOException, + ParseException, InvalidLabelException { + ExpressionNode node = null; + node = this.expressionParser.parse(visibilityLabelsExp); + node = this.expressionExpander.expand(node); + List tags = new ArrayList(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream dos = new DataOutputStream(baos); + if (node.isSingleNode()) { + writeLabelOrdinalsToStream(node, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } else { + NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node; + if (nlNode.getOperator() == Operator.OR) { + for (ExpressionNode child : nlNode.getChildExps()) { + writeLabelOrdinalsToStream(child, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } + } else { + writeLabelOrdinalsToStream(nlNode, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } + } + return tags; + } + + private void writeLabelOrdinalsToStream(ExpressionNode node, DataOutputStream dos) + throws IOException, InvalidLabelException { + if (node.isSingleNode()) { + String identifier = null; + int labelOrdinal = 0; + if (node instanceof LeafExpressionNode) { + identifier = ((LeafExpressionNode) node) + .getIdentifier(); + labelOrdinal = this.visibilityManager.getLabelOrdinal(identifier); + } else { + // This is a NOT node. + LeafExpressionNode lNode = (LeafExpressionNode) ((NonLeafExpressionNode) node) + .getChildExps().get(0); + identifier = lNode.getIdentifier(); + labelOrdinal = this.visibilityManager.getLabelOrdinal(identifier); + labelOrdinal = -1 * labelOrdinal; // Store NOT node as -ve ordinal. + } + if (labelOrdinal == 0) { + throw new InvalidLabelException("Invalid visibility label " + identifier); + } + WritableUtils.writeVInt(dos, labelOrdinal); + } else { + List childExps = ((NonLeafExpressionNode) node).getChildExps(); + for (ExpressionNode child : childExps) { + writeLabelOrdinalsToStream(child, dos); + } + } + } + + @Override + public RegionScanner preScannerOpen(ObserverContext e, Scan scan, + RegionScanner s) throws IOException { + Filter visibilityLabelFilter = createVisibilityLabelFilter(e.getEnvironment().getRegion(), scan); + if (visibilityLabelFilter != null) { + Filter filter = scan.getFilter(); + if (filter != null) { + scan.setFilter(new FilterList(filter, visibilityLabelFilter)); + } else { + scan.setFilter(visibilityLabelFilter); + } + } + return s; + } + + @Override + public void preGetOp(ObserverContext e, Get get, List results) + throws IOException { + Filter visibilityLabelFilter = createVisibilityLabelFilter(e.getEnvironment().getRegion(), get); + if (visibilityLabelFilter != null) { + Filter filter = get.getFilter(); + if (filter != null) { + get.setFilter(new FilterList(filter, visibilityLabelFilter)); + } else { + get.setFilter(visibilityLabelFilter); + } + } + } + + private Filter createVisibilityLabelFilter(HRegion region, OperationWithAttributes op) { + Filter visibilityLabelFilter = null; + if (region.getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) { + if (op.getAttribute(VISIBILITY_LABELS_ATTR_KEY) == null) { + // No Authorizations present for this scan/Get! Create an empty auth set + visibilityLabelFilter = new VisibilityLabelFilter(new BitSet(0)); + } + } else if (!region.getRegionInfo().getTable().isSystemTable()) { + if (this.scanLabelGenerator != null) { + List labels = null; + try { + labels = this.scanLabelGenerator.getLabels(getActiveUser(), op); + } catch (Throwable t) { + LOG.error(t); + } + if (labels != null) { + int labelsCount = this.visibilityManager.getLabelsCount(); + BitSet bs = new BitSet(labelsCount + 1); // ordinal is index 1 based + for (String label : labels) { + int labelOrdinal = this.visibilityManager.getLabelOrdinal(label); + if (labelOrdinal != 0) { + bs.set(labelOrdinal); + } + } + visibilityLabelFilter = new VisibilityLabelFilter(bs); + } + } + } + return visibilityLabelFilter; + } + + private User getActiveUser() throws IOException { + User user = RequestContext.getRequestUser(); + if (!RequestContext.isInRequestContext()) { + // for non-rpc handling, fallback to system user + user = User.getCurrent(); + } + return user; + } + + private List getSyetmAndSuperUsers() throws IOException { + User user = User.getCurrent(); + if (user == null) { + throw new IOException("Unable to obtain the current user, " + + "authorization checks for internal operations will not work correctly!"); + } + + String currentUser = user.getShortName(); + List superUsers = Lists.asList(currentUser, + this.conf.getStrings(AccessControlLists.SUPERUSER_CONF_KEY, new String[0])); + return superUsers; + } + + private boolean isSystemOrSuperUser() throws IOException { + List superUsers = getSyetmAndSuperUsers(); + User activeUser = getActiveUser(); + return superUsers.contains(activeUser.getShortName()); + } + + @Override + public Service getService() { + return VisibilityLabelsProtos.VisibilityLabelsService.newReflectiveService(this); + } + + /****************************** VisibilityEndpoint service related methods ******************************/ + @Override + public synchronized void addLabels(RpcController controller, VisibilityLabelsRequest request, + RpcCallback done) { + VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder(); + List labels = request.getVisLabelList(); + try { + List puts = new ArrayList(labels.size()); + RegionActionResult successResult = RegionActionResult.newBuilder().build(); + for (VisibilityLabel visLabel : labels) { + byte[] label = visLabel.getLabel().toByteArray(); + String labelStr = Bytes.toString(label); + if (VisibilityLabelsValidator.isValidLabel(label)) { + if (this.visibilityManager.getLabelOrdinal(labelStr) > 0) { + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new LabelAlreadyExistsException("Label '" + labelStr + + "' already exists"))); + response.addResult(failureResultBuilder.build()); + } else { + Put p = new Put(Bytes.toBytes(ordinalCounter)); + p.add(LABELS_TABLE_FAMILY, LABEL_QUALIFIER, label, LABELS_TABLE_TAGS); + puts.add(p); + ordinalCounter++; + response.addResult(successResult); + } + } else { + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new InvalidLabelException("Invalid visibility label '" + labelStr + + "'"))); + response.addResult(failureResultBuilder.build()); + } + } + OperationStatus[] opStatus = this.regionEnv.getRegion().batchMutate( + puts.toArray(new Mutation[puts.size()])); + int i = 0; + for (OperationStatus status : opStatus) { + if (status.getOperationStatusCode() != SUCCESS) { + while (response.getResult(i) != successResult) + i++; + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new DoNotRetryIOException(status.getExceptionMsg()))); + response.setResult(i, failureResultBuilder.build()); + } + i++; + } + } catch (IOException e) { + LOG.error(e); + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter.buildException(e)); + RegionActionResult failureResult = failureResultBuilder.build(); + for (int i = 0; i < labels.size(); i++) { + response.setResult(i, failureResult); + } + } + done.run(response.build()); + } + + private void performACLCheck() + throws IOException { + // Do ACL check only when the security is enabled. + if (this.acOn && !isSystemOrSuperUser()) { + User user = getActiveUser(); + throw new AccessDeniedException("User '" + (user != null ? user.getShortName() : "null") + + " is not authorized to perform this action."); + } + } + + private List> getExistingLabelsWithAuths() throws IOException { + Scan scan = new Scan(); + RegionScanner scanner = this.regionEnv.getRegion().getScanner(scan); + List> existingLabels = new ArrayList>(); + try { + while (true) { + List cells = new ArrayList(); + scanner.next(cells); + if (cells.isEmpty()) { + break; + } + existingLabels.add(cells); + } + } finally { + scanner.close(); + } + return existingLabels; + } + + @Override + public synchronized void setAuths(RpcController controller, SetAuthsRequest request, + RpcCallback done) { + VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder(); + List auths = request.getAuthList(); + byte[] user = request.getUser().toByteArray(); + try { + List puts = new ArrayList(auths.size()); + RegionActionResult successResult = RegionActionResult.newBuilder().build(); + for (ByteString authBS : auths) { + byte[] auth = authBS.toByteArray(); + String authStr = Bytes.toString(auth); + int labelOrdinal = this.visibilityManager.getLabelOrdinal(authStr); + if (labelOrdinal == 0) { + // This label is not yet added. 1st this should be added to the system + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new InvalidLabelException("Label '" + authStr + "' doesn't exist"))); + response.addResult(failureResultBuilder.build()); + } else { + Put p = new Put(Bytes.toBytes(labelOrdinal)); + p.add(LABELS_TABLE_FAMILY, user, DUMMY_VALUE, LABELS_TABLE_TAGS); + puts.add(p); + response.addResult(successResult); + } + } + OperationStatus[] opStatus = this.regionEnv.getRegion().batchMutate( + puts.toArray(new Mutation[puts.size()])); + int i = 0; + for (OperationStatus status : opStatus) { + if (status.getOperationStatusCode() != SUCCESS) { + while (response.getResult(i) != successResult) i++; + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new DoNotRetryIOException(status.getExceptionMsg()))); + response.setResult(i, failureResultBuilder.build()); + } + i++; + } + } catch (IOException e) { + LOG.error(e); + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter.buildException(e)); + RegionActionResult failureResult = failureResultBuilder.build(); + for (int i = 0; i < auths.size(); i++) { + response.setResult(i, failureResult); + } + } + done.run(response.build()); + } + + @Override + public synchronized void getAuths(RpcController controller, GetAuthsRequest request, + RpcCallback done) { + byte[] user = request.getUser().toByteArray(); + GetAuthsResponse.Builder response = GetAuthsResponse.newBuilder(); + response.setUser(request.getUser()); + + Scan s = new Scan(); + s.addColumn(LABELS_TABLE_FAMILY, user); + s.setAuthorizations(new Authorizations(SYSTEM_LABEL)); + try { + performACLCheck(); + RegionScanner scanner = this.regionEnv.getRegion().getScanner(s); + List results = new ArrayList(1); + while (true) { + scanner.next(results); + if (results.isEmpty()) break; + Cell cell = results.get(0); + int ordinal = Bytes.toInt(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); + String label = this.visibilityManager.getLabel(ordinal); + if (label != null) { + response.addAuth(ByteString.copyFrom(Bytes.toBytes(label))); + } + results.clear(); + } + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + } + done.run(response.build()); + } + + @Override + public synchronized void clearAuths(RpcController controller, SetAuthsRequest request, + RpcCallback done) { + VisibilityLabelsResponse.Builder response = VisibilityLabelsResponse.newBuilder(); + List auths = request.getAuthList(); + byte[] user = request.getUser().toByteArray(); + List currentAuths = this.visibilityManager.getAuths(Bytes.toString(user)); + try { + List deletes = new ArrayList(auths.size()); + RegionActionResult successResult = RegionActionResult.newBuilder().build(); + for (ByteString authBS : auths) { + byte[] auth = authBS.toByteArray(); + String authStr = Bytes.toString(auth); + if (currentAuths.contains(authStr)) { + int labelOrdinal = this.visibilityManager.getLabelOrdinal(authStr); + assert labelOrdinal > 0; + Delete d = new Delete(Bytes.toBytes(labelOrdinal)); + d.deleteColumns(LABELS_TABLE_FAMILY, user); + deletes.add(d); + response.addResult(successResult); + } else { + // This label is not set for the user. + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new InvalidLabelException("Label '" + authStr + + "' is not set for the user " + Bytes.toString(user)))); + response.addResult(failureResultBuilder.build()); + } + } + OperationStatus[] opStatus = this.regionEnv.getRegion().batchMutate( + deletes.toArray(new Mutation[deletes.size()])); + int i = 0; + for (OperationStatus status : opStatus) { + if (status.getOperationStatusCode() != SUCCESS) { + while (response.getResult(i) != successResult) i++; + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new DoNotRetryIOException(status.getExceptionMsg()))); + response.setResult(i, failureResultBuilder.build()); + } + i++; + } + } catch (IOException e) { + LOG.error(e); + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter.buildException(e)); + RegionActionResult failureResult = failureResultBuilder.build(); + for (int i = 0; i < auths.size(); i++) { + response.setResult(i, failureResult); + } + } + done.run(response.build()); + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java new file mode 100644 index 0000000..9e84398 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; +import java.util.BitSet; +import java.util.Iterator; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.filter.FilterBase; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.WritableUtils; + +/** + * This Filter checks the visibility expression with each KV against visibility labels associated + * with the scan. Based on the check the KV is included in the scan result or gets filtered out. + */ +@InterfaceAudience.Private +class VisibilityLabelFilter extends FilterBase { + + private BitSet authLabels; + + public VisibilityLabelFilter(BitSet authLabels) { + this.authLabels = authLabels; + } + + @Override + public ReturnCode filterKeyValue(Cell cell) throws IOException { + KeyValue kv = KeyValueUtil.ensureKeyValue(cell); + Iterator tagsItr = kv.tagsIterator(); + while (tagsItr.hasNext()) { + boolean includeKV = true; + Tag tag = tagsItr.next(); + if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) { + int offset = tag.getTagOffset(); + int endOffset = offset + tag.getTagLength(); + while (offset < endOffset) { + int currLabelOrdinal = (int) Bytes.readVLong(tag.getBuffer(), offset); + if (currLabelOrdinal < 0) { + // check for the absence of this label in the Scan Auth labels + // ie. to check BitSet corresponding bit is 0 + int temp = -currLabelOrdinal; + if (this.authLabels.get(temp)) { + includeKV = false; + break; + } + } else { + if (!this.authLabels.get(currLabelOrdinal)) { + includeKV = false; + break; + } + } + offset += WritableUtils.getVIntSize(currLabelOrdinal); + } + if (includeKV) { + // We got one visibility expression getting evaluated to true. Good to include this KV in + // the result then. + return ReturnCode.INCLUDE; + } + return ReturnCode.SKIP; + } + } + return ReturnCode.INCLUDE; + } +} \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java new file mode 100644 index 0000000..7f1f278 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.zookeeper.KeeperException; + +/** + * Maintains the cache for visibility labels and also uses the zookeeper to update the labels in the + * system. The cache updation happens based on the data change event that happens on the zookeeper + * znode for labels table + */ +@InterfaceAudience.Private +public class VisibilityLabelsManager { + + private static final Log LOG = LogFactory.getLog(VisibilityLabelsManager.class); + private static final List EMPTY_LIST = new ArrayList(0); + private static VisibilityLabelsManager instance; + + private ZKVisibilityLabelWatcher zkVisibilityWatcher; + private Map labels = new HashMap(); + private Map ordinalVsLabels = new HashMap(); + private Map> userAuths = new HashMap>(); + private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + + private VisibilityLabelsManager(ZooKeeperWatcher watcher, Configuration conf) throws IOException { + zkVisibilityWatcher = new ZKVisibilityLabelWatcher(watcher, this, conf); + try { + zkVisibilityWatcher.start(); + } catch (KeeperException ke) { + LOG.error("ZooKeeper initialization failed", ke); + throw new IOException(ke); + } + } + + public synchronized static VisibilityLabelsManager get(ZooKeeperWatcher watcher, + Configuration conf) throws IOException { + if (instance == null) { + instance = new VisibilityLabelsManager(watcher, conf); + } + return instance; + } + + public static VisibilityLabelsManager get() { + return instance; + } + + public void refreshLabelsCache(byte[] data) throws IOException { + List visibilityLabels = null; + try { + visibilityLabels = VisibilityUtils.readLabelsFromZKData(data); + } catch (DeserializationException dse) { + throw new IOException(dse); + } + this.lock.writeLock().lock(); + try { + for (VisibilityLabel visLabel : visibilityLabels) { + String label = Bytes.toString(visLabel.getLabel().toByteArray()); + labels.put(label, visLabel.getOrdinal()); + ordinalVsLabels.put(visLabel.getOrdinal(), label); + } + } finally { + this.lock.writeLock().unlock(); + } + } + + public void refreshUserAuthsCache(byte[] data) throws IOException { + MultiUserAuthorizations multiUserAuths = null; + try { + multiUserAuths = VisibilityUtils.readUserAuthsFromZKData(data); + } catch (DeserializationException dse) { + throw new IOException(dse); + } + this.lock.writeLock().lock(); + try { + for (UserAuthorizations userAuths : multiUserAuths.getUserAuthsList()) { + String user = Bytes.toString(userAuths.getUser().toByteArray()); + this.userAuths.put(user, new HashSet(userAuths.getAuthList())); + } + } finally { + this.lock.writeLock().unlock(); + } + } + + /** + * @param label + * @return The ordinal for the label. The ordinal starts from 1. Returns 0 when the passed a non + * existing label. + */ + public int getLabelOrdinal(String label) { + Integer ordinal = null; + this.lock.readLock().lock(); + try { + ordinal = labels.get(label); + } finally { + this.lock.readLock().unlock(); + } + if (ordinal != null) { + return ordinal.intValue(); + } + // 0 denotes not available + return 0; + } + + public String getLabel(int ordinal) { + this.lock.readLock().lock(); + try { + return this.ordinalVsLabels.get(ordinal); + } finally { + this.lock.readLock().unlock(); + } + } + + /** + * @return The total number of visibility labels. + */ + public int getLabelsCount(){ + return this.labels.size(); + } + + /** + * @param user + * @return The labels that the given user is authorized for. + */ + public List getAuths(String user) { + List auths = EMPTY_LIST; + this.lock.readLock().lock(); + try { + Set authOrdinals = userAuths.get(user); + if (authOrdinals != null) { + auths = new ArrayList(authOrdinals.size()); + for (Integer authOrdinal : authOrdinals) { + auths.add(ordinalVsLabels.get(authOrdinal)); + } + } + } finally { + this.lock.readLock().unlock(); + } + return auths; + } + + /** + * Writes the labels data to zookeeper node. + * @param data + * @param labelsOrUserAuths true for writing labels and false for user auths. + */ + public void writeToZookeeper(byte[] data, boolean labelsOrUserAuths) { + this.zkVisibilityWatcher.writeToZookeeper(data, labelsOrUserAuths); + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java new file mode 100644 index 0000000..fab5d54 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -0,0 +1,133 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.util.ReflectionUtils; + +import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; + +/** + * Utility method to support visibility + */ +@InterfaceAudience.Private +public class VisibilityUtils { + + public static final String VISIBILITY_LABEL_GENERATOR_CLASS = + "hbase.regionserver.scan.visibility.label.generator.class"; + public static final byte VISIBILITY_TAG_TYPE = (byte) 2; + public static final String SYSTEM_LABEL = "system"; + + /** + * Creates the labels data to be written to zookeeper. + * @param existingLabels + * @return + */ + public static byte[] getDataToWriteToZooKeeper(Map existingLabels) { + VisibilityLabelsRequest.Builder visReqBuilder = VisibilityLabelsRequest.newBuilder(); + for (Entry entry : existingLabels.entrySet()) { + VisibilityLabel.Builder visLabBuilder = VisibilityLabel.newBuilder(); + visLabBuilder.setLabel(ByteString.copyFrom(Bytes.toBytes(entry.getKey()))); + visLabBuilder.setOrdinal(entry.getValue()); + visReqBuilder.addVisLabel(visLabBuilder.build()); + } + return ProtobufUtil.prependPBMagic(visReqBuilder.build().toByteArray()); + } + + /** + * Creates the user auth data to be written to zookeeper. + * @param userAuths + * @return + */ + public static byte[] getUserAuthsDataToWriteToZooKeeper(Map> userAuths) { + MultiUserAuthorizations.Builder builder = MultiUserAuthorizations.newBuilder(); + for (Entry> entry : userAuths.entrySet()) { + UserAuthorizations.Builder userAuthsBuilder = UserAuthorizations.newBuilder(); + userAuthsBuilder.setUser(ByteString.copyFrom(Bytes.toBytes(entry.getKey()))); + for (Integer label : entry.getValue()) { + userAuthsBuilder.addAuth(label); + } + builder.addUserAuths(userAuthsBuilder.build()); + } + return ProtobufUtil.prependPBMagic(builder.build().toByteArray()); + } + + /** + * Reads back from the zookeeper. The data read here is of the form written by + * writeToZooKeeper(Map entries). + * + * @param data + * @return + * @throws DeserializationException + */ + public static List readLabelsFromZKData(byte[] data) + throws DeserializationException { + if (ProtobufUtil.isPBMagicPrefix(data)) { + int pblen = ProtobufUtil.lengthOfPBMagic(); + try { + VisibilityLabelsRequest request = VisibilityLabelsRequest.newBuilder() + .mergeFrom(data, pblen, data.length - pblen).build(); + return request.getVisLabelList(); + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + } + return null; + } + + /** + * Reads back User auth data written to zookeeper. + * @param data + * @return + * @throws DeserializationException + */ + public static MultiUserAuthorizations readUserAuthsFromZKData(byte[] data) + throws DeserializationException { + if (ProtobufUtil.isPBMagicPrefix(data)) { + int pblen = ProtobufUtil.lengthOfPBMagic(); + try { + MultiUserAuthorizations multiUserAuths = MultiUserAuthorizations.newBuilder() + .mergeFrom(data, pblen, data.length - pblen).build(); + return multiUserAuths; + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + } + return null; + } + + public static ScanLabelGenerator getScanLabelGenerator(Configuration conf) { + Class scanLabelGeneratorKlass = conf + .getClass(VISIBILITY_LABEL_GENERATOR_CLASS, DefaultScanLabelGenerator.class, + ScanLabelGenerator.class); + return ReflectionUtils.newInstance(scanLabelGeneratorKlass, conf); + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java new file mode 100644 index 0000000..94c2bfe --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java @@ -0,0 +1,142 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.zookeeper.ZKUtil; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.zookeeper.KeeperException; + +/** + * A zk watcher that watches the labels table znode. This would create a znode + * /hbase/visibility_labels and will have a serialized form of a set of labels in the system. + */ +@InterfaceAudience.Private +public class ZKVisibilityLabelWatcher extends ZooKeeperListener { + + private static final Log LOG = LogFactory.getLog(ZKVisibilityLabelWatcher.class); + private static final String VISIBILITY_LABEL_ZK_PATH = "zookeeper.znode.visibility.label.parent"; + private static final String DEFAULT_VISIBILITY_LABEL_NODE = "visibility/labels"; + private static final String VISIBILITY_USER_AUTHS_ZK_PATH = + "zookeeper.znode.visibility.user.auths.parent"; + private static final String DEFAULT_VISIBILITY_USER_AUTHS_NODE = "visibility/user_auths"; + + private VisibilityLabelsManager labelsManager; + private String labelZnode; + private String userAuthsZnode; + + public ZKVisibilityLabelWatcher(ZooKeeperWatcher watcher, VisibilityLabelsManager labelsManager, + Configuration conf) { + super(watcher); + this.labelsManager = labelsManager; + String labelZnodeParent = conf.get(VISIBILITY_LABEL_ZK_PATH, DEFAULT_VISIBILITY_LABEL_NODE); + String userAuthsZnodeParent = conf.get(VISIBILITY_USER_AUTHS_ZK_PATH, + DEFAULT_VISIBILITY_USER_AUTHS_NODE); + this.labelZnode = ZKUtil.joinZNode(watcher.baseZNode, labelZnodeParent); + this.userAuthsZnode = ZKUtil.joinZNode(watcher.baseZNode, userAuthsZnodeParent); + } + + public void start() throws KeeperException { + watcher.registerListener(this); + ZKUtil.watchAndCheckExists(watcher, labelZnode); + ZKUtil.watchAndCheckExists(watcher, userAuthsZnode); + } + + private void refreshVisibilityLabelsCache(byte[] data) { + try { + this.labelsManager.refreshLabelsCache(data); + } catch (IOException ioe) { + LOG.error("Failed parsing data from labels table " + " from zk", ioe); + } + } + + private void refreshUserAuthsCache(byte[] data) { + try { + this.labelsManager.refreshUserAuthsCache(data); + } catch (IOException ioe) { + LOG.error("Failed parsing data from labels table " + " from zk", ioe); + } + } + + @Override + public void nodeCreated(String path) { + if (path.equals(labelZnode) || path.equals(userAuthsZnode)) { + try { + ZKUtil.watchAndCheckExists(watcher, path); + } catch (KeeperException ke) { + LOG.error("Error setting watcher on node " + path, ke); + // only option is to abort + watcher.abort("Zookeeper error obtaining label node children", ke); + } + } + } + + @Override + public void nodeDeleted(String path) { + // There is no case of visibility labels path to get deleted. + } + + @Override + public void nodeDataChanged(String path) { + if (path.equals(labelZnode) || path.equals(userAuthsZnode)) { + try { + byte[] data = ZKUtil.getDataAndWatch(watcher, path); + if (path.equals(labelZnode)) { + refreshVisibilityLabelsCache(data); + } else { + refreshUserAuthsCache(data); + } + } catch (KeeperException ke) { + LOG.error("Error reading data from zookeeper for node " + path, ke); + // only option is to abort + watcher.abort("Zookeeper error getting data for node " + path, ke); + } + } + } + + @Override + public void nodeChildrenChanged(String path) { + // We are not dealing with child nodes under the label znode or userauths znode. + } + + /** + * Write a labels mirror or user auths mirror into zookeeper + * + * @param data + * @param labelsOrUserAuths true for writing labels and false for user auths. + */ + public void writeToZookeeper(byte[] data, boolean labelsOrUserAuths) { + String znode = this.labelZnode; + if (!labelsOrUserAuths) { + znode = this.userAuthsZnode; + } + try { + ZKUtil.createWithParents(watcher, znode); + ZKUtil.updateExistingNodeData(watcher, znode, data, -1); + } catch (KeeperException e) { + LOG.error("Failed labels entry '" + data + "'", e); + watcher.abort("Failed writing node " + znode + " to zookeeper", e); + } + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java new file mode 100644 index 0000000..fb39e2d --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public interface ExpressionNode { + boolean isSingleNode(); + + ExpressionNode deepClone(); +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java new file mode 100644 index 0000000..4e2d351 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class LeafExpressionNode implements ExpressionNode { + public static final LeafExpressionNode OPEN_PARAN_NODE = new LeafExpressionNode("("); + public static final LeafExpressionNode CLOSE_PARAN_NODE = new LeafExpressionNode(")"); + + private String identifier; + + public LeafExpressionNode(String identifier) { + this.identifier = identifier; + } + + public String getIdentifier() { + return this.identifier; + } + + @Override + public int hashCode() { + return this.identifier.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof LeafExpressionNode) { + LeafExpressionNode that = (LeafExpressionNode) obj; + return this.identifier.equals(that.identifier); + } + return false; + } + + @Override + public String toString() { + return this.identifier; + } + + @Override + public boolean isSingleNode() { + return true; + } + + public LeafExpressionNode deepClone() { + LeafExpressionNode clone = new LeafExpressionNode(this.identifier); + return clone; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java new file mode 100644 index 0000000..03def94 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class NonLeafExpressionNode implements ExpressionNode { + private Operator op; + private List childExps = new ArrayList(2); + + public NonLeafExpressionNode() { + + } + + public NonLeafExpressionNode(Operator op) { + this.op = op; + } + + public NonLeafExpressionNode(Operator op, List exps) { + this.op = op; + if (op == Operator.NOT && exps.size() > 1) { + throw new IllegalArgumentException(Operator.NOT + " should be on 1 child expression"); + } + this.childExps = exps; + } + + public NonLeafExpressionNode(Operator op, ExpressionNode... exps) { + this.op = op; + List expLst = new ArrayList(); + for (ExpressionNode exp : exps) { + expLst.add(exp); + } + this.childExps = expLst; + } + + public Operator getOperator() { + return op; + } + + public List getChildExps() { + return childExps; + } + + public void addChildExp(ExpressionNode exp) { + if (op == Operator.NOT && this.childExps.size() == 1) { + throw new IllegalStateException(Operator.NOT + " should be on 1 child expression"); + } + this.childExps.add(exp); + } + + public void addChildExps(List exps) { + this.childExps.addAll(exps); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("("); + if (this.op == Operator.NOT) { + sb.append(this.op); + } + for (int i = 0; i < this.childExps.size(); i++) { + sb.append(childExps.get(i)); + if (i < this.childExps.size() - 1) { + sb.append(" " + this.op + " "); + } + } + sb.append(")"); + return sb.toString(); + } + + @Override + public boolean isSingleNode() { + return this.op == Operator.NOT; + } + + public NonLeafExpressionNode deepClone() { + NonLeafExpressionNode clone = new NonLeafExpressionNode(this.op); + for (ExpressionNode exp : this.childExps) { + clone.addChildExp(exp.deepClone()); + } + return clone; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java new file mode 100644 index 0000000..6f47b50 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +public enum Operator { + AND('&'), OR('|'), NOT('!'); + + private char rep; + + private Operator(char rep) { + this.rep = rep; + } + + public String toString() { + return String.valueOf(this.rep); + }; +} diff --git hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto index 85d6024..ed8f14a 100644 --- hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto +++ hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto @@ -27,4 +27,5 @@ message Scanner { optional int32 maxVersions = 7; optional string filter = 8; optional int32 caching = 9; + optional repeated string labels = 10; } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java new file mode 100644 index 0000000..f629aa4 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java @@ -0,0 +1,210 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */package org.apache.hadoop.hbase.rest; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.StringWriter; +import java.util.Iterator; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; +import javax.xml.bind.Unmarshaller; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.rest.client.Client; +import org.apache.hadoop.hbase.rest.client.Cluster; +import org.apache.hadoop.hbase.rest.client.Response; +import org.apache.hadoop.hbase.rest.model.CellModel; +import org.apache.hadoop.hbase.rest.model.CellSetModel; +import org.apache.hadoop.hbase.rest.model.RowModel; +import org.apache.hadoop.hbase.rest.model.ScannerModel; +import org.apache.hadoop.hbase.security.visibility.VisibilityClient; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; +import org.apache.hadoop.hbase.security.visibility.VisibilityController; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(MediumTests.class) +public class TestScannersWithLabels { + private static final String TABLE = "TestScannersWithLabels"; + private static final String CFA = "a"; + private static final String CFB = "b"; + private static final String COLUMN_1 = CFA + ":1"; + private static final String COLUMN_2 = CFB + ":2"; + private final static String TOPSECRET = "topsecret"; + private final static String PUBLIC = "public"; + private final static String PRIVATE = "private"; + private final static String CONFIDENTIAL = "confidential"; + private final static String SECRET = "secret"; + + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final HBaseRESTTestingUtility REST_TEST_UTIL = new HBaseRESTTestingUtility(); + private static Client client; + private static JAXBContext context; + private static Marshaller marshaller; + private static Unmarshaller unmarshaller; + private static Configuration conf; + + private static int insertData(String tableName, String column, double prob) throws IOException { + int count = 0; + HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + byte[] k = new byte[3]; + byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); + + for (int i = 0; i < 9; i++) { + Put put = new Put(Bytes.toBytes("row" + i)); + put.setDurability(Durability.SKIP_WAL); + put.add(famAndQf[0], famAndQf[1], k); + put.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, + Bytes.toBytes("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!" + TOPSECRET)); + table.put(put); + count++; + } + table.flushCommits(); + return count; + } + + private static int countCellSet(CellSetModel model) { + int count = 0; + Iterator rows = model.getRows().iterator(); + while (rows.hasNext()) { + RowModel row = rows.next(); + Iterator cells = row.getCells().iterator(); + while (cells.hasNext()) { + cells.next(); + count++; + } + } + return count; + } + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + conf = TEST_UTIL.getConfiguration(); + conf = TEST_UTIL.getConfiguration(); + conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName()); + TEST_UTIL.startMiniCluster(1); + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(VisibilityConstants.LABELS_TABLE_NAME.getName(), 50000); + createLabels(); + REST_TEST_UTIL.startServletContainer(conf); + client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort())); + context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class, + ScannerModel.class); + marshaller = context.createMarshaller(); + unmarshaller = context.createUnmarshaller(); + HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); + if (admin.tableExists(TABLE)) { + return; + } + HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE)); + htd.addFamily(new HColumnDescriptor(CFA)); + htd.addFamily(new HColumnDescriptor(CFB)); + admin.createTable(htd); + insertData(TABLE, COLUMN_1, 1.0); + insertData(TABLE, COLUMN_2, 0.5); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + REST_TEST_UTIL.shutdownServletContainer(); + TEST_UTIL.shutdownMiniCluster(); + } + + private static void createLabels() throws IOException { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + } + + @Test + public void testSimpleScannerXMLWithLabelsThatReceivesNoData() throws IOException, JAXBException { + final int BATCH_SIZE = 5; + // new scanner + ScannerModel model = new ScannerModel(); + model.setBatch(BATCH_SIZE); + model.addColumn(Bytes.toBytes(COLUMN_1)); + model.addLabel(PUBLIC); + StringWriter writer = new StringWriter(); + marshaller.marshal(model, writer); + byte[] body = Bytes.toBytes(writer.toString()); + // recall previous put operation with read-only off + conf.set("hbase.rest.readonly", "false"); + Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); + assertEquals(response.getCode(), 201); + String scannerURI = response.getLocation(); + assertNotNull(scannerURI); + + // get a cell set + response = client.get(scannerURI, Constants.MIMETYPE_XML); + // Respond with 204 as there are no cells to be retrieved + assertEquals(response.getCode(), 204); + assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); + } + + @Test + public void testSimpleScannerXMLWithLabelsThatReceivesData() throws IOException, JAXBException { + // new scanner + ScannerModel model = new ScannerModel(); + model.setBatch(5); + model.addColumn(Bytes.toBytes(COLUMN_1)); + model.addLabel(SECRET); + StringWriter writer = new StringWriter(); + marshaller.marshal(model, writer); + byte[] body = Bytes.toBytes(writer.toString()); + + // recall previous put operation with read-only off + conf.set("hbase.rest.readonly", "false"); + Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); + assertEquals(response.getCode(), 201); + String scannerURI = response.getLocation(); + assertNotNull(scannerURI); + + // get a cell set + response = client.get(scannerURI, Constants.MIMETYPE_XML); + // Respond with 204 as there are no cells to be retrieved + assertEquals(response.getCode(), 200); + assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); + CellSetModel cellSet = (CellSetModel) unmarshaller.unmarshal(new ByteArrayInputStream(response + .getBody())); + // as the label does not match, none of the rows would be retrieved + assertEquals(countCellSet(cellSet), 5); + } + +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java index 2e2b304..c84a058 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java @@ -19,22 +19,14 @@ package org.apache.hadoop.hbase.rest.model; -import java.io.IOException; -import java.io.StringReader; -import java.io.StringWriter; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; - import org.apache.hadoop.hbase.SmallTests; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; - -import junit.framework.TestCase; import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestScannerModel extends TestModelBase { + private static final String PRIVATE = "private"; + private static final String PUBLIC = "public"; private static final byte[] START_ROW = Bytes.toBytes("abracadabra"); private static final byte[] END_ROW = Bytes.toBytes("zzyzx"); private static final byte[] COLUMN1 = Bytes.toBytes("column1"); @@ -46,20 +38,20 @@ public class TestScannerModel extends TestModelBase { public TestScannerModel() throws Exception { super(ScannerModel.class); - AS_XML = - "" + - "" + - "Y29sdW1uMQ==Y29sdW1uMjpmb28="; + AS_XML = "" + + "" + + "Y29sdW1uMQ==Y29sdW1uMjpmb28=" + + ""; - AS_JSON = - "{\"batch\":100,\"caching\":1000,\"endRow\":\"enp5eng=\",\"endTime\":1245393318192,"+ - "\"maxVersions\":2147483647,\"startRow\":\"YWJyYWNhZGFicmE=\",\"startTime\":1245219839331,"+ - "\"column\":[\"Y29sdW1uMQ==\",\"Y29sdW1uMjpmb28=\"]}"; + AS_JSON = "{\"batch\":100,\"caching\":1000,\"endRow\":\"enp5eng=\",\"endTime\":1245393318192," + + "\"maxVersions\":2147483647,\"startRow\":\"YWJyYWNhZGFicmE=\",\"startTime\":1245219839331," + + "\"column\":[\"Y29sdW1uMQ==\",\"Y29sdW1uMjpmb28=\"]," + +"\"labels\":[\"private\",\"public\"]}"; - AS_PB = - "CgthYnJhY2FkYWJyYRIFenp5engaB2NvbHVtbjEaC2NvbHVtbjI6Zm9vIGQo47qL554kMLDi57mf" + - "JDj/////B0joBw=="; + // TODO + AS_PB = "CgthYnJhY2FkYWJyYRIFenp5engaB2NvbHVtbjEaC2NvbHVtbjI6Zm9vIGQo47qL554kMLDi57mf" + + "JDj/////B0joBw=="; } protected ScannerModel buildTestModel() { @@ -72,6 +64,8 @@ public class TestScannerModel extends TestModelBase { model.setEndTime(END_TIME); model.setBatch(BATCH); model.setCaching(CACHING); + model.addLabel(PRIVATE); + model.addLabel(PUBLIC); return model; } @@ -79,7 +73,7 @@ public class TestScannerModel extends TestModelBase { assertTrue(Bytes.equals(model.getStartRow(), START_ROW)); assertTrue(Bytes.equals(model.getEndRow(), END_ROW)); boolean foundCol1 = false, foundCol2 = false; - for (byte[] column: model.getColumns()) { + for (byte[] column : model.getColumns()) { if (Bytes.equals(column, COLUMN1)) { foundCol1 = true; } else if (Bytes.equals(column, COLUMN2)) { @@ -92,7 +86,19 @@ public class TestScannerModel extends TestModelBase { assertEquals(model.getEndTime(), END_TIME); assertEquals(model.getBatch(), BATCH); assertEquals(model.getCaching(), CACHING); + boolean foundLabel1 = false; + boolean foundLabel2 = false; + if (model.getLabels() != null && model.getLabels().size() > 0) { + for (String label : model.getLabels()) { + if (label.equals(PRIVATE)) { + foundLabel1 = true; + } else if (label.equals(PUBLIC)) { + foundLabel2 = true; + } + } + assertTrue(foundLabel1); + assertTrue(foundLabel2); + } } } - diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java new file mode 100644 index 0000000..ea3e892 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java @@ -0,0 +1,393 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestExpressionExpander { + + @Test + public void testPositiveCases() throws Exception { + ExpressionExpander expander = new ExpressionExpander(); + + // (!a) -> (!a) + NonLeafExpressionNode exp1 = new NonLeafExpressionNode(Operator.NOT, + new LeafExpressionNode("a")); + ExpressionNode result = expander.expand(exp1); + assertTrue(result instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.NOT, nlResult.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + + // (a | b) -> (a | b) + NonLeafExpressionNode exp2 = new NonLeafExpressionNode(Operator.OR, + new LeafExpressionNode("a"), new LeafExpressionNode("b")); + result = expander.expand(exp2); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // (a & b) -> (a & b) + NonLeafExpressionNode exp3 = new NonLeafExpressionNode(Operator.AND, + new LeafExpressionNode("a"), new LeafExpressionNode("b")); + result = expander.expand(exp3); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // ((a | b) | c) -> (a | b | c) + NonLeafExpressionNode exp4 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode( + Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp4); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + + // ((a & b) & c) -> (a & b & c) + NonLeafExpressionNode exp5 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode( + Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp5); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + + // (a | b) & c -> ((a & c) | (b & c)) + NonLeafExpressionNode exp6 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode( + Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp6); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + NonLeafExpressionNode temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a & b) | c -> ((a & b) | c) + NonLeafExpressionNode exp7 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode( + Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp7); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + nlResult = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // ((a & b) | c) & d -> (((a & b) & d) | (c & d)) + NonLeafExpressionNode exp8 = new NonLeafExpressionNode(Operator.AND); + exp8.addChildExp(new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode(Operator.AND, + new LeafExpressionNode("a"), new LeafExpressionNode("b")), new LeafExpressionNode("c"))); + exp8.addChildExp(new LeafExpressionNode("d")); + result = expander.expand(exp8); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a | b) | (c | d) -> (a | b | c | d) + NonLeafExpressionNode exp9 = new NonLeafExpressionNode(Operator.OR); + exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp9); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier()); + + // (a & b) & (c & d) -> (a & b & c & d) + NonLeafExpressionNode exp10 = new NonLeafExpressionNode(Operator.AND); + exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp10); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier()); + + // (a | b) & (c | d) -> ((a & c) | (a & d) | (b & c) | (b & d)) + NonLeafExpressionNode exp11 = new NonLeafExpressionNode(Operator.AND); + exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp11); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (((a | b) | c) | d) & e -> ((a & e) | (b & e) | (c & e) | (d & e)) + NonLeafExpressionNode exp12 = new NonLeafExpressionNode(Operator.AND); + NonLeafExpressionNode tempExp1 = new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode( + "a"), new LeafExpressionNode("b")); + NonLeafExpressionNode tempExp2 = new NonLeafExpressionNode(Operator.OR, tempExp1, + new LeafExpressionNode("c")); + NonLeafExpressionNode tempExp3 = new NonLeafExpressionNode(Operator.OR, tempExp2, + new LeafExpressionNode("d")); + exp12.addChildExp(tempExp3); + exp12.addChildExp(new LeafExpressionNode("e")); + result = expander.expand(exp12); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a | b | c) & d -> ((a & d) | (b & d) | (c & d)) + NonLeafExpressionNode exp13 = new NonLeafExpressionNode(Operator.AND, + new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode( + "b"), new LeafExpressionNode("c")), new LeafExpressionNode("d")); + result = expander.expand(exp13); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // ((a | b) & (c | d)) & (e | f) -> (((a & c) & e) | ((a & c) & f) | ((a & d) & e) | ((a & d) & + // f) | ((b & c) & e) | ((b & c) & f) | ((b & d) & e) | ((b & d) & f)) + NonLeafExpressionNode exp15 = new NonLeafExpressionNode(Operator.AND); + NonLeafExpressionNode temp1 = new NonLeafExpressionNode(Operator.AND); + temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + exp15.addChildExp(temp1); + exp15.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("e"), + new LeafExpressionNode("f"))); + result = expander.expand(exp15); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(8, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(4); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(5); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(6); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(7); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // !(a | b) -> ((!a) & (!b)) + NonLeafExpressionNode exp16 = new NonLeafExpressionNode(Operator.NOT, + new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode( + "b"))); + result = expander.expand(exp16); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.NOT, temp.getOperator()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.NOT, temp.getOperator()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java new file mode 100644 index 0000000..f7a8dfd --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java @@ -0,0 +1,318 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestExpressionParser { + + private ExpressionParser parser = new ExpressionParser(); + + @Test + public void testPositiveCases() throws Exception { + // abc -> (abc) + ExpressionNode node = parser.parse("abc"); + assertTrue(node instanceof LeafExpressionNode); + assertEquals("abc", ((LeafExpressionNode) node).getIdentifier()); + + // a&b|c&d -> (((a & b) | c) & ) + node = parser.parse("a&b|c&d"); + assertTrue(node instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // (a) -> (a) + node = parser.parse("(a)"); + assertTrue(node instanceof LeafExpressionNode); + assertEquals("a", ((LeafExpressionNode) node).getIdentifier()); + + // (a&b) -> (a & b) + node = parser.parse(" ( a & b )"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // ((((a&b)))) -> (a & b) + node = parser.parse("((((a&b))))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a|b)&(cc|def) -> ((a | b) & (cc | def)) + node = parser.parse("( a | b ) & (cc|def)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + NonLeafExpressionNode nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNodeLeft.getOperator()); + assertEquals(2, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(1)).getIdentifier()); + assertEquals(Operator.OR, nlNodeRight.getOperator()); + assertEquals(2, nlNodeRight.getChildExps().size()); + assertEquals("cc", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + assertEquals("def", ((LeafExpressionNode) nlNodeRight.getChildExps().get(1)).getIdentifier()); + + // a&(cc|de) -> (a & (cc | de)) + node = parser.parse("a&(cc|de)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("cc", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("de", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a&b)|c -> ((a & b) | c) + node = parser.parse("(a&b)|c"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a&b&c)|d -> (((a & b) & c) | d) + node = parser.parse("(a&b&c)|d"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // a&(b|(c|d)) -> (a & (b | (c | d))) + node = parser.parse("a&(b|(c|d))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (!a) -> (!a) + node = parser.parse("(!a)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // a&(!b) -> (a & (!b)) + node = parser.parse("a&(!b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !a&b -> ((!a) & b) + node = parser.parse("!a&b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !a&(!b) -> ((!a) & (!b)) + node = parser.parse("!a&(!b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeLeft.getOperator()); + assertEquals(1, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + + // !a&!b -> ((!a) & (!b)) + node = parser.parse("!a&!b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeLeft.getOperator()); + assertEquals(1, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + + // !(a&b) -> (!(a & b)) + node = parser.parse("!(a&b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // a&!b -> (a & (!b)) + node = parser.parse("a&!b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !((a|b)&!(c&!b)) -> (!((a | b) & (!(c & (!b))))) + node = parser.parse("!((a | b) & !(c & !b))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNodeLeft.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(1)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + nlNodeRight = (NonLeafExpressionNode) nlNodeRight.getChildExps().get(0); + assertEquals(Operator.AND, nlNodeRight.getOperator()); + assertEquals(2, nlNodeRight.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNodeRight.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeRight = (NonLeafExpressionNode) nlNodeRight.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + } + + @Test + public void testNegativeCases() throws Exception { + executeNegativeCase("("); + executeNegativeCase(")"); + executeNegativeCase("()"); + executeNegativeCase("(a"); + executeNegativeCase("a&"); + executeNegativeCase("a&|b"); + executeNegativeCase("!"); + executeNegativeCase("a!"); + executeNegativeCase("a!&"); + executeNegativeCase("&"); + executeNegativeCase("|"); + executeNegativeCase("!(a|(b&c)&!b"); + executeNegativeCase("!!a"); + executeNegativeCase("( a & b ) | ( c & d e)"); + executeNegativeCase("! a"); + } + + private void executeNegativeCase(String exp) { + try { + parser.parse(exp); + fail("Expected ParseException for expression " + exp); + } catch (ParseException e) { + } + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java new file mode 100644 index 0000000..5593325 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java @@ -0,0 +1,562 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABEL_QUALIFIER; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.HRegionServer; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; + +import com.google.protobuf.ByteString; + +/** + * Test class that tests the visibility labels + */ +@Category(MediumTests.class) +public class TestVisibilityLabels { + + private static final String TOPSECRET = "topsecret"; + private static final String PUBLIC = "public"; + private static final String PRIVATE = "private"; + private static final String CONFIDENTIAL = "confidential"; + private static final String SECRET = "secret"; + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final byte[] row1 = Bytes.toBytes("row1"); + private static final byte[] row2 = Bytes.toBytes("row2"); + private static final byte[] row3 = Bytes.toBytes("row3"); + private static final byte[] row4 = Bytes.toBytes("row4"); + private final static byte[] fam = Bytes.toBytes("info"); + private final static byte[] qual = Bytes.toBytes("qual"); + private final static byte[] value = Bytes.toBytes("value"); + private static Configuration conf; + + private volatile boolean killedRS = false; + @Rule + public final TestName TEST_NAME = new TestName(); + + @BeforeClass + public static void setupBeforeClass() throws Exception { + // setup configuration + conf = TEST_UTIL.getConfiguration(); + conf.setInt("hfile.format.version", 3); + conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName()); + conf.setClass(VisibilityUtils.VISIBILITY_LABEL_GENERATOR_CLASS, SimpleScanLabelGenerator.class, + ScanLabelGenerator.class); + TEST_UTIL.startMiniCluster(2); + + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + addLabels(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @After + public void tearDown() throws Exception { + killedRS = false; + } + + @Test + public void testSimpleVisibilityLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "|" + CONFIDENTIAL, + PRIVATE + "|" + CONFIDENTIAL); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL, PRIVATE)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + + assertTrue(next.length == 2); + CellScanner cellScanner = next[0].cellScanner(); + cellScanner.advance(); + Cell current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row1, 0, row1.length)); + cellScanner = next[1].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row2, 0, row2.length)); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsWithComplexLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")" + "&" + "!" + TOPSECRET, "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + PRIVATE + "&" + CONFIDENTIAL + + "&" + SECRET + ")"); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(TOPSECRET, CONFIDENTIAL, PRIVATE, PUBLIC, SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertEquals(3, next.length); + CellScanner cellScanner = next[0].cellScanner(); + cellScanner.advance(); + Cell current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row2, 0, row2.length)); + cellScanner = next[1].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row3, 0, row3.length)); + cellScanner = next[2].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row4, 0, row4.length)); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsThatDoesNotPassTheCriteria() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(PUBLIC)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 0); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsInPutsThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + try { + createTableAndWriteDataWithLabels(tableName, "SAMPLE_LABEL", "TEST"); + fail("Should have failed with failed sanity check exception"); + } catch (Exception e) { + } + } + + @Test + public void testVisibilityLabelsInScanThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations("SAMPLE")); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 0); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsWithGet() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + + PRIVATE, SECRET + "&" + CONFIDENTIAL + "&" + PRIVATE); + try { + Get get = new Get(row1); + get.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); + Result result = table.get(get); + assertTrue(!result.isEmpty()); + Cell cell = result.getColumnLatestCell(fam, qual); + assertTrue(Bytes.equals(value, 0, value.length, cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength())); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsOnKillingOfRSContainingLabelsTable() throws Exception { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + int liveRS = 0; + for (RegionServerThread rsThreads : regionServerThreads) { + if (!rsThreads.getRegionServer().isAborted()) { + liveRS++; + } + } + if (liveRS == 1) { + TEST_UTIL.getHBaseCluster().startRegionServer(); + } + Thread t1 = new Thread() { + public void run() { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + List onlineRegions = rsThread.getRegionServer().getOnlineRegions( + LABELS_TABLE_NAME); + if (onlineRegions.size() > 0) { + rsThread.getRegionServer().abort("Aborting "); + killedRS = true; + break; + } + } + } + + }; + t1.start(); + final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + Thread t = new Thread() { + public void run() { + try { + while (!killedRS) { + Thread.sleep(1); + } + createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", + PRIVATE); + } catch (Exception e) { + } + } + }; + t.start(); + regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); + while (!killedRS) { + Thread.sleep(10); + } + regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + while (true) { + if (!rsThread.getRegionServer().isAborted()) { + List onlineRegions = rsThread.getRegionServer().getOnlineRegions( + LABELS_TABLE_NAME); + if (onlineRegions.size() > 0) { + break; + } else { + Thread.sleep(10); + } + } else { + break; + } + } + } + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + t.join(); + HTable table = null; + try { + table = new HTable(TEST_UTIL.getConfiguration(), tableName); + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 1); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsOnRSRestart() throws Exception { + final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + rsThread.getRegionServer().abort("Aborting "); + } + // Start one new RS + RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); + HRegionServer regionServer = rs.getRegionServer(); + while (!regionServer.isOnline()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + } + } + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 1); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testAddVisibilityLabelsOnRSRestart() throws Exception { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + rsThread.getRegionServer().abort("Aborting "); + } + // Start one new RS + RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); + HRegionServer regionServer = rs.getRegionServer(); + while (!regionServer.isOnline()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + } + } + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, "ABC", "XYZ" }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + // Scan the visibility label + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL)); + HTable ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + int i = 0; + try { + ResultScanner scanner = ht.getScanner(s); + while (true) { + Result next = scanner.next(); + if (next == null) { + break; + } + i++; + } + } finally { + if (ht != null) { + ht.close(); + } + } + // One label is the "system" label. + Assert.assertEquals("The count should be 8", 8, i); + } + + @Test + public void testVisibilityLabelsInGetThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Get get = new Get(row1); + get.setAuthorizations(new Authorizations("SAMPLE")); + Result result = table.get(get); + assertTrue(result.isEmpty()); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testAddLabels() throws Throwable { + String[] labels = { "L1", SECRET, "L2", "invalid~", "L3" }; + VisibilityLabelsResponse response = VisibilityClient.addLabels(conf, labels); + List resultList = response.getResultList(); + assertEquals(5, resultList.size()); + assertTrue(resultList.get(0).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.LabelAlreadyExistsException", + resultList.get(1).getException().getName()); + assertTrue(resultList.get(2).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.InvalidLabelException", resultList + .get(3).getException().getName()); + assertTrue(resultList.get(4).getException().getValue().isEmpty()); + } + + @Test + public void testSetAndGetUserAuths() throws Throwable { + String[] auths = { SECRET, CONFIDENTIAL }; + String user = "user1"; + VisibilityClient.setAuths(conf, auths, user); + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME); + ResultScanner scanner = ht.getScanner(new Scan()); + Result result = null; + while ((result = scanner.next()) != null) { + Cell label = result.getColumnLatestCell(LABELS_TABLE_FAMILY, LABEL_QUALIFIER); + Cell userAuth = result.getColumnLatestCell(LABELS_TABLE_FAMILY, user.getBytes()); + if (Bytes.equals(SECRET.getBytes(), 0, SECRET.getBytes().length, label.getValueArray(), + label.getValueOffset(), label.getValueLength()) + || Bytes.equals(CONFIDENTIAL.getBytes(), 0, CONFIDENTIAL.getBytes().length, + label.getValueArray(), label.getValueOffset(), label.getValueLength())) { + assertNotNull(userAuth); + } else { + assertNull(userAuth); + } + } + } finally { + if (ht != null) { + ht.close(); + } + } + GetAuthsResponse authsResponse = VisibilityClient.getAuths(conf, user); + List authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(2, authsList.size()); + assertTrue(authsList.contains(SECRET)); + assertTrue(authsList.contains(CONFIDENTIAL)); + + // Try doing setAuths once again and there should not be any duplicates + String[] auths1 = { SECRET, CONFIDENTIAL }; + user = "user1"; + VisibilityClient.setAuths(conf, auths1, user); + + authsResponse = VisibilityClient.getAuths(conf, user); + authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(2, authsList.size()); + assertTrue(authsList.contains(SECRET)); + assertTrue(authsList.contains(CONFIDENTIAL)); + } + + @Test + public void testClearUserAuths() throws Throwable { + String[] auths = { SECRET, CONFIDENTIAL, PRIVATE }; + String user = "testUser"; + VisibilityClient.setAuths(conf, auths, user); + // Removing the auths for SECRET and CONFIDENTIAL for the user. + // Passing a non existing auth also. + auths = new String[] { SECRET, PUBLIC, CONFIDENTIAL }; + VisibilityLabelsResponse response = VisibilityClient.clearAuths(conf, auths, user); + List resultList = response.getResultList(); + assertEquals(3, resultList.size()); + assertTrue(resultList.get(0).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.InvalidLabelException", + resultList.get(1).getException().getName()); + assertTrue(resultList.get(2).getException().getValue().isEmpty()); + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME); + ResultScanner scanner = ht.getScanner(new Scan()); + Result result = null; + while ((result = scanner.next()) != null) { + Cell label = result.getColumnLatestCell(LABELS_TABLE_FAMILY, LABEL_QUALIFIER); + Cell userAuth = result.getColumnLatestCell(LABELS_TABLE_FAMILY, user.getBytes()); + if (Bytes.equals(PRIVATE.getBytes(), 0, PRIVATE.getBytes().length, label.getValueArray(), + label.getValueOffset(), label.getValueLength())) { + assertNotNull(userAuth); + } else { + assertNull(userAuth); + } + } + } finally { + if (ht != null) { + ht.close(); + } + } + + GetAuthsResponse authsResponse = VisibilityClient.getAuths(conf, user); + List authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(1, authsList.size()); + assertTrue(authsList.contains(PRIVATE)); + } + + private static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) + throws Exception { + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + int i = 1; + List puts = new ArrayList(); + for (String labelExp : labelExps) { + Put put = new Put(Bytes.toBytes("row" + i)); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value); + put.setCellVisibility(new CellVisibility(labelExp)); + puts.add(put); + i++; + } + table.put(puts); + } finally { + if (table != null) { + table.close(); + } + } + return table; + } + + private static void addLabels() throws IOException { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java new file mode 100644 index 0000000..ededa6d --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java @@ -0,0 +1,275 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; + +import com.google.protobuf.ByteString; + +@Category(MediumTests.class) +public class TestVisibilityLabelsWithACL { + + private static final String PRIVATE = "private"; + private static final String CONFIDENTIAL = "confidential"; + private static final String SECRET = "secret"; + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final byte[] row1 = Bytes.toBytes("row1"); + private final static byte[] fam = Bytes.toBytes("info"); + private final static byte[] qual = Bytes.toBytes("qual"); + private final static byte[] value = Bytes.toBytes("value"); + private static Configuration conf; + + @Rule + public final TestName TEST_NAME = new TestName(); + private static User SUPERUSER; + private static User NORMAL_USER; + + @BeforeClass + public static void setupBeforeClass() throws Exception { + // setup configuration + conf = TEST_UTIL.getConfiguration(); + conf.setInt("hfile.format.version", 3); + SecureTestUtil.enableSecurity(conf); + + conf.set("hbase.coprocessor.master.classes", AccessController.class.getName() + "," + + VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", AccessController.class.getName() + "," + + VisibilityController.class.getName()); + conf.setClass(VisibilityUtils.VISIBILITY_LABEL_GENERATOR_CLASS, + DefaultScanLabelGenerator.class, ScanLabelGenerator.class); + TEST_UTIL.startMiniCluster(2); + + TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName(), 50000); + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" }); + NORMAL_USER = User.createUserForTesting(conf, "user1", new String[] {}); + addLabels(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testScanForUserWithFewerLabelAuthsThanLabelsInScanAuthorizations() throws Throwable { + String[] auths = { SECRET }; + String user = "admin"; + VisibilityClient.setAuths(conf, auths, user); + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + + "&!" + PRIVATE, SECRET + "&!" + PRIVATE); + PrivilegedExceptionAction scanAction = new PrivilegedExceptionAction() { + public Void run() throws Exception { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); + HTable t = new HTable(conf, table.getTableName()); + try { + ResultScanner scanner = t.getScanner(s); + Result result = scanner.next(); + assertTrue(!result.isEmpty()); + assertTrue(Bytes.equals(Bytes.toBytes("row2"), result.getRow())); + result = scanner.next(); + assertNull(result); + } finally { + t.close(); + } + return null; + } + }; + SUPERUSER.runAs(scanAction); + } + + @Test + public void testVisibilityLabelsForUserWithNoAuths() throws Throwable { + String user = "admin"; + String[] auths = { SECRET }; + VisibilityClient.clearAuths(conf, auths, user); // Removing all auths if any. + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET); + PrivilegedExceptionAction getAction = new PrivilegedExceptionAction() { + public Void run() throws Exception { + Get g = new Get(row1); + g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); + HTable t = new HTable(conf, table.getTableName()); + try { + Result result = t.get(g); + assertTrue(result.isEmpty()); + } finally { + t.close(); + } + return null; + } + }; + SUPERUSER.runAs(getAction); + } + + @Test + public void testLabelsTableOpsWithDifferentUsers() throws Throwable { + PrivilegedExceptionAction action = + new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.addLabels(conf, new String[] { "l1", "l2" }); + } catch (Throwable e) { + } + return null; + } + }; + VisibilityLabelsResponse response = NORMAL_USER.runAs(action); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(0).getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(1).getException().getName()); + + action = new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + response = NORMAL_USER.runAs(action); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(0).getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response + .getResult(1).getException().getName()); + + action = new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + response = SUPERUSER.runAs(action); + assertTrue(response.getResult(0).getException().getValue().isEmpty()); + assertTrue(response.getResult(1).getException().getValue().isEmpty()); + + action = new PrivilegedExceptionAction() { + public VisibilityLabelsResponse run() throws Exception { + try { + return VisibilityClient.clearAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + } catch (Throwable e) { + } + return null; + } + }; + response = NORMAL_USER.runAs(action); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response.getResult(0) + .getException().getName()); + assertEquals("org.apache.hadoop.hbase.security.AccessDeniedException", response.getResult(1) + .getException().getName()); + + response = VisibilityClient.clearAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user1"); + assertTrue(response.getResult(0).getException().getValue().isEmpty()); + assertTrue(response.getResult(1).getException().getValue().isEmpty()); + + VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE }, "user2"); + PrivilegedExceptionAction action1 = + new PrivilegedExceptionAction() { + public GetAuthsResponse run() throws Exception { + try { + return VisibilityClient.getAuths(conf, "user2"); + } catch (Throwable e) { + } + return null; + } + }; + GetAuthsResponse authsResponse = NORMAL_USER.runAs(action1); + assertNull(authsResponse); + authsResponse = SUPERUSER.runAs(action1); + List authsList = new ArrayList(); + for (ByteString authBS : authsResponse.getAuthList()) { + authsList.add(Bytes.toString(authBS.toByteArray())); + } + assertEquals(2, authsList.size()); + assertTrue(authsList.contains(CONFIDENTIAL)); + assertTrue(authsList.contains(PRIVATE)); + } + + private static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) + throws Exception { + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + int i = 1; + List puts = new ArrayList(); + for (String labelExp : labelExps) { + Put put = new Put(Bytes.toBytes("row" + i)); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value); + put.setCellVisibility(new CellVisibility(labelExp)); + puts.add(put); + i++; + } + table.put(puts); + } finally { + if (table != null) { + table.close(); + } + } + return table; + } + + private static void addLabels() throws IOException { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + } +} diff --git hbase-shell/src/main/ruby/hbase.rb hbase-shell/src/main/ruby/hbase.rb index 87512bf..2b32369 100644 --- hbase-shell/src/main/ruby/hbase.rb +++ hbase-shell/src/main/ruby/hbase.rb @@ -78,3 +78,4 @@ require 'hbase/admin' require 'hbase/table' require 'hbase/replication_admin' require 'hbase/security' +require 'hbase/visibility_labels' \ No newline at end of file diff --git hbase-shell/src/main/ruby/hbase/hbase.rb hbase-shell/src/main/ruby/hbase/hbase.rb index 64482c5..8c8d2b1 100644 --- hbase-shell/src/main/ruby/hbase/hbase.rb +++ hbase-shell/src/main/ruby/hbase/hbase.rb @@ -22,6 +22,7 @@ include Java require 'hbase/admin' require 'hbase/table' require 'hbase/security' +require 'hbase/visibility_labels' module Hbase class Hbase @@ -55,5 +56,9 @@ module Hbase def security_admin(formatter) ::Hbase::SecurityAdmin.new(configuration, formatter) end + + def visibility_labels_admin(formatter) + ::Hbase::VisibilityLabelsAdmin.new(configuration, formatter) + end end end diff --git hbase-shell/src/main/ruby/hbase/visibility_labels.rb hbase-shell/src/main/ruby/hbase/visibility_labels.rb new file mode 100644 index 0000000..627cdca --- /dev/null +++ hbase-shell/src/main/ruby/hbase/visibility_labels.rb @@ -0,0 +1,134 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +include Java +java_import org.apache.hadoop.hbase.security.visibility.VisibilityClient +java_import org.apache.hadoop.hbase.security.visibility.VisibilityConstants +java_import org.apache.hadoop.hbase.util.Bytes + +module Hbase + class VisibilityLabelsAdmin + + def initialize(configuration, formatter) + @config = configuration + @formatter = formatter + @admin = org.apache.hadoop.hbase.client.HBaseAdmin.new(configuration) + end + + def add_labels(*args) + lables_table_available? + + # Normalize args + if args.kind_of?(Array) + labels = [ args ].flatten.compact + end + + begin + response = VisibilityClient.addLabels(@config, labels.to_java(:string)) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + labelsWithException = "" + list = response.getResultList() + list.each do |result| + if result.hasException() + labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) + end + end + if labelsWithException.length > 0 + raise(ArgumentError, labelsWithException) + end + end + end + + def set_auths(user, *args) + lables_table_available? + # Normalize args + if args.kind_of?(Array) + auths = [ args ].flatten.compact + end + + begin + response = VisibilityClient.setAuths(@config, auths.to_java(:string), user) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + labelsWithException = "" + list = response.getResultList() + list.each do |result| + if result.hasException() + labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) + end + end + if labelsWithException.length > 0 + raise(ArgumentError, labelsWithException) + end + end + end + + def get_auths(user) + lables_table_available? + begin + response = VisibilityClient.getAuths(@config, user) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + if response.getAuthList.empty? + raise(ArgumentError, "No authentication set for the given user " + user) + end + return response.getAuthList + end + end + + def clear_auths(user, *args) + lables_table_available? + # Normalize args + if args.kind_of?(Array) + auths = [ args ].flatten.compact + end + + begin + response = VisibilityClient.clearAuths(@config, auths.to_java(:string), user) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + labelsWithException = "" + list = response.getResultList() + list.each do |result| + if result.hasException() + labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) + end + end + if labelsWithException.length > 0 + raise(ArgumentError, labelsWithException) + end + end + end + + # Make sure that lables table is available + def lables_table_available?() + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") \ + unless exists?(VisibilityConstants::LABELS_TABLE_NAME) + end + + # Does table exist? + def exists?(table_name) + @admin.tableExists(table_name) + end + end +end \ No newline at end of file diff --git hbase-shell/src/main/ruby/shell.rb hbase-shell/src/main/ruby/shell.rb index 8576dae..5c02806 100644 --- hbase-shell/src/main/ruby/shell.rb +++ hbase-shell/src/main/ruby/shell.rb @@ -90,6 +90,10 @@ module Shell @hbase_security_admin ||= hbase.security_admin(formatter) end + def hbase_visibility_labels_admin + @hbase_visibility_labels_admin ||= hbase.visibility_labels_admin(formatter) + end + def export_commands(where) ::Shell.commands.keys.each do |cmd| # here where is the IRB namespace @@ -345,3 +349,14 @@ Shell.load_command_group( ] ) +Shell.load_command_group( + 'visibility labels', + :full_name => 'VISIBILITY LABEL TOOLS', + :comment => "NOTE: Above commands are only applicable if running with the VisibilityController coprocessor", + :commands => %w[ + add_labels + set_auths + get_auths + clear_auths + ] +) \ No newline at end of file diff --git hbase-shell/src/main/ruby/shell/commands.rb hbase-shell/src/main/ruby/shell/commands.rb index 72f6eb2..75f4797 100644 --- hbase-shell/src/main/ruby/shell/commands.rb +++ hbase-shell/src/main/ruby/shell/commands.rb @@ -62,6 +62,10 @@ module Shell @shell.hbase_security_admin end + def visibility_labels_admin + @shell.hbase_visibility_labels_admin + end + #---------------------------------------------------------------------- def formatter diff --git hbase-shell/src/main/ruby/shell/commands/add_labels.rb hbase-shell/src/main/ruby/shell/commands/add_labels.rb new file mode 100644 index 0000000..65a1140 --- /dev/null +++ hbase-shell/src/main/ruby/shell/commands/add_labels.rb @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class AddLabels < Command + def help + return <<-EOF +Add a set of visibility labels. +Syntax : add_labels [label1, label2] + +For example: + + hbase> add_labels ['SECRET','PRIVATE'] +EOF + end + + def command(*args) + format_simple_command do + visibility_labels_admin.add_labels(args) + end + end + end + end +end diff --git hbase-shell/src/main/ruby/shell/commands/clear_auths.rb hbase-shell/src/main/ruby/shell/commands/clear_auths.rb new file mode 100644 index 0000000..7bf4252 --- /dev/null +++ hbase-shell/src/main/ruby/shell/commands/clear_auths.rb @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class ClearAuths < Command + def help + return <<-EOF +Add a set of visibility labels for an user that has to removed +Syntax : clear_auths 'user1',[label1, label2] + +For example: + + hbase> clear_auths 'user1', ['SECRET','PRIVATE'] +EOF + end + + def command(user, *args) + format_simple_command do + visibility_labels_admin.clear_auths(user, args) + end + end + end + end +end diff --git hbase-shell/src/main/ruby/shell/commands/get_auths.rb hbase-shell/src/main/ruby/shell/commands/get_auths.rb new file mode 100644 index 0000000..2bc3e09 --- /dev/null +++ hbase-shell/src/main/ruby/shell/commands/get_auths.rb @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class GetAuths < Command + def help + return <<-EOF +Get the visibility labels set for a particular user +Syntax : get_auths 'user1' + +For example: + + hbase> get_auths 'user1' +EOF + end + + def command(user) + format_simple_command do + list = visibility_labels_admin.get_auths(user) + list.each do |auths| + formatter.row([org.apache.hadoop.hbase.util.Bytes::toStringBinary(auths.toByteArray)]) + end + end + end + end + end +end diff --git hbase-shell/src/main/ruby/shell/commands/set_auths.rb hbase-shell/src/main/ruby/shell/commands/set_auths.rb new file mode 100644 index 0000000..6679719 --- /dev/null +++ hbase-shell/src/main/ruby/shell/commands/set_auths.rb @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class SetAuths < Command + def help + return <<-EOF +Add a set of visibility labels for an user +Syntax : set_auths 'user1',[label1, label2] + +For example: + + hbase> set_auths 'user1', ['SECRET','PRIVATE'] +EOF + end + + def command(user, *args) + format_simple_command do + visibility_labels_admin.set_auths(user, args) + end + end + end + end +end diff --git hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 7c47b9f..3090688 100644 --- hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -22,40 +22,14 @@ import static org.apache.hadoop.hbase.util.Bytes.getBytes; import java.io.IOException; import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import java.util.*; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.client.Delete; -import org.apache.hadoop.hbase.client.Durability; -import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.Increment; -import org.apache.hadoop.hbase.client.OperationWithAttributes; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.RowMutations; -import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.filter.ParseFilter; -import org.apache.hadoop.hbase.thrift2.generated.TColumn; -import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement; -import org.apache.hadoop.hbase.thrift2.generated.TColumnValue; -import org.apache.hadoop.hbase.thrift2.generated.TDelete; -import org.apache.hadoop.hbase.thrift2.generated.TDeleteType; -import org.apache.hadoop.hbase.thrift2.generated.TDurability; -import org.apache.hadoop.hbase.thrift2.generated.TGet; -import org.apache.hadoop.hbase.thrift2.generated.TIncrement; -import org.apache.hadoop.hbase.thrift2.generated.TMutation; -import org.apache.hadoop.hbase.thrift2.generated.TPut; -import org.apache.hadoop.hbase.thrift2.generated.TResult; -import org.apache.hadoop.hbase.thrift2.generated.TRowMutations; -import org.apache.hadoop.hbase.thrift2.generated.TScan; -import org.apache.hadoop.hbase.thrift2.generated.TTimeRange; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.thrift2.generated.*; import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private @@ -111,6 +85,9 @@ public class ThriftUtilities { } } + if (in.isSetAuthorizations()) { + out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels())); + } return out; } @@ -400,6 +377,10 @@ public class ThriftUtilities { addAttributes(out,in.getAttributes()); } + if (in.isSetAuthorizations()) { + out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels())); + } + return out; } diff --git hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java new file mode 100644 index 0000000..3067b59 --- /dev/null +++ hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java @@ -0,0 +1,435 @@ +/** + * Autogenerated by Thrift Compiler (0.9.1) + * + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING + * @generated + */ +package org.apache.hadoop.hbase.thrift2.generated; + +import org.apache.thrift.scheme.IScheme; +import org.apache.thrift.scheme.SchemeFactory; +import org.apache.thrift.scheme.StandardScheme; + +import org.apache.thrift.scheme.TupleScheme; +import org.apache.thrift.protocol.TTupleProtocol; +import org.apache.thrift.protocol.TProtocolException; +import org.apache.thrift.EncodingUtils; +import org.apache.thrift.TException; +import org.apache.thrift.async.AsyncMethodCallback; +import org.apache.thrift.server.AbstractNonblockingServer.*; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.EnumMap; +import java.util.Set; +import java.util.HashSet; +import java.util.EnumSet; +import java.util.Collections; +import java.util.BitSet; +import java.nio.ByteBuffer; +import java.util.Arrays; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TAuthorization implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAuthorization"); + + private static final org.apache.thrift.protocol.TField LABELS_FIELD_DESC = new org.apache.thrift.protocol.TField("labels", org.apache.thrift.protocol.TType.LIST, (short)1); + + private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); + static { + schemes.put(StandardScheme.class, new TAuthorizationStandardSchemeFactory()); + schemes.put(TupleScheme.class, new TAuthorizationTupleSchemeFactory()); + } + + public List labels; // optional + + /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ + public enum _Fields implements org.apache.thrift.TFieldIdEnum { + LABELS((short)1, "labels"); + + private static final Map byName = new HashMap(); + + static { + for (_Fields field : EnumSet.allOf(_Fields.class)) { + byName.put(field.getFieldName(), field); + } + } + + /** + * Find the _Fields constant that matches fieldId, or null if its not found. + */ + public static _Fields findByThriftId(int fieldId) { + switch(fieldId) { + case 1: // LABELS + return LABELS; + default: + return null; + } + } + + /** + * Find the _Fields constant that matches fieldId, throwing an exception + * if it is not found. + */ + public static _Fields findByThriftIdOrThrow(int fieldId) { + _Fields fields = findByThriftId(fieldId); + if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); + return fields; + } + + /** + * Find the _Fields constant that matches name, or null if its not found. + */ + public static _Fields findByName(String name) { + return byName.get(name); + } + + private final short _thriftId; + private final String _fieldName; + + _Fields(short thriftId, String fieldName) { + _thriftId = thriftId; + _fieldName = fieldName; + } + + public short getThriftFieldId() { + return _thriftId; + } + + public String getFieldName() { + return _fieldName; + } + } + + // isset id assignments + private _Fields optionals[] = {_Fields.LABELS}; + public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; + static { + Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); + tmpMap.put(_Fields.LABELS, new org.apache.thrift.meta_data.FieldMetaData("labels", org.apache.thrift.TFieldRequirementType.OPTIONAL, + new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); + metaDataMap = Collections.unmodifiableMap(tmpMap); + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TAuthorization.class, metaDataMap); + } + + public TAuthorization() { + } + + /** + * Performs a deep copy on other. + */ + public TAuthorization(TAuthorization other) { + if (other.isSetLabels()) { + List __this__labels = new ArrayList(other.labels); + this.labels = __this__labels; + } + } + + public TAuthorization deepCopy() { + return new TAuthorization(this); + } + + @Override + public void clear() { + this.labels = null; + } + + public int getLabelsSize() { + return (this.labels == null) ? 0 : this.labels.size(); + } + + public java.util.Iterator getLabelsIterator() { + return (this.labels == null) ? null : this.labels.iterator(); + } + + public void addToLabels(String elem) { + if (this.labels == null) { + this.labels = new ArrayList(); + } + this.labels.add(elem); + } + + public List getLabels() { + return this.labels; + } + + public TAuthorization setLabels(List labels) { + this.labels = labels; + return this; + } + + public void unsetLabels() { + this.labels = null; + } + + /** Returns true if field labels is set (has been assigned a value) and false otherwise */ + public boolean isSetLabels() { + return this.labels != null; + } + + public void setLabelsIsSet(boolean value) { + if (!value) { + this.labels = null; + } + } + + public void setFieldValue(_Fields field, Object value) { + switch (field) { + case LABELS: + if (value == null) { + unsetLabels(); + } else { + setLabels((List)value); + } + break; + + } + } + + public Object getFieldValue(_Fields field) { + switch (field) { + case LABELS: + return getLabels(); + + } + throw new IllegalStateException(); + } + + /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ + public boolean isSet(_Fields field) { + if (field == null) { + throw new IllegalArgumentException(); + } + + switch (field) { + case LABELS: + return isSetLabels(); + } + throw new IllegalStateException(); + } + + @Override + public boolean equals(Object that) { + if (that == null) + return false; + if (that instanceof TAuthorization) + return this.equals((TAuthorization)that); + return false; + } + + public boolean equals(TAuthorization that) { + if (that == null) + return false; + + boolean this_present_labels = true && this.isSetLabels(); + boolean that_present_labels = true && that.isSetLabels(); + if (this_present_labels || that_present_labels) { + if (!(this_present_labels && that_present_labels)) + return false; + if (!this.labels.equals(that.labels)) + return false; + } + + return true; + } + + @Override + public int hashCode() { + return 0; + } + + @Override + public int compareTo(TAuthorization other) { + if (!getClass().equals(other.getClass())) { + return getClass().getName().compareTo(other.getClass().getName()); + } + + int lastComparison = 0; + + lastComparison = Boolean.valueOf(isSetLabels()).compareTo(other.isSetLabels()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetLabels()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.labels, other.labels); + if (lastComparison != 0) { + return lastComparison; + } + } + return 0; + } + + public _Fields fieldForId(int fieldId) { + return _Fields.findByThriftId(fieldId); + } + + public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { + schemes.get(iprot.getScheme()).getScheme().read(iprot, this); + } + + public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { + schemes.get(oprot.getScheme()).getScheme().write(oprot, this); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("TAuthorization("); + boolean first = true; + + if (isSetLabels()) { + sb.append("labels:"); + if (this.labels == null) { + sb.append("null"); + } else { + sb.append(this.labels); + } + first = false; + } + sb.append(")"); + return sb.toString(); + } + + public void validate() throws org.apache.thrift.TException { + // check for required fields + // check for sub-struct validity + } + + private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { + try { + write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + + private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { + try { + read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); + } catch (org.apache.thrift.TException te) { + throw new java.io.IOException(te); + } + } + + private static class TAuthorizationStandardSchemeFactory implements SchemeFactory { + public TAuthorizationStandardScheme getScheme() { + return new TAuthorizationStandardScheme(); + } + } + + private static class TAuthorizationStandardScheme extends StandardScheme { + + public void read(org.apache.thrift.protocol.TProtocol iprot, TAuthorization struct) throws org.apache.thrift.TException { + org.apache.thrift.protocol.TField schemeField; + iprot.readStructBegin(); + while (true) + { + schemeField = iprot.readFieldBegin(); + if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { + break; + } + switch (schemeField.id) { + case 1: // LABELS + if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { + { + org.apache.thrift.protocol.TList _list98 = iprot.readListBegin(); + struct.labels = new ArrayList(_list98.size); + for (int _i99 = 0; _i99 < _list98.size; ++_i99) + { + String _elem100; + _elem100 = iprot.readString(); + struct.labels.add(_elem100); + } + iprot.readListEnd(); + } + struct.setLabelsIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; + default: + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + iprot.readFieldEnd(); + } + iprot.readStructEnd(); + + // check for required fields of primitive type, which can't be checked in the validate method + struct.validate(); + } + + public void write(org.apache.thrift.protocol.TProtocol oprot, TAuthorization struct) throws org.apache.thrift.TException { + struct.validate(); + + oprot.writeStructBegin(STRUCT_DESC); + if (struct.labels != null) { + if (struct.isSetLabels()) { + oprot.writeFieldBegin(LABELS_FIELD_DESC); + { + oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.labels.size())); + for (String _iter101 : struct.labels) + { + oprot.writeString(_iter101); + } + oprot.writeListEnd(); + } + oprot.writeFieldEnd(); + } + } + oprot.writeFieldStop(); + oprot.writeStructEnd(); + } + + } + + private static class TAuthorizationTupleSchemeFactory implements SchemeFactory { + public TAuthorizationTupleScheme getScheme() { + return new TAuthorizationTupleScheme(); + } + } + + private static class TAuthorizationTupleScheme extends TupleScheme { + + @Override + public void write(org.apache.thrift.protocol.TProtocol prot, TAuthorization struct) throws org.apache.thrift.TException { + TTupleProtocol oprot = (TTupleProtocol) prot; + BitSet optionals = new BitSet(); + if (struct.isSetLabels()) { + optionals.set(0); + } + oprot.writeBitSet(optionals, 1); + if (struct.isSetLabels()) { + { + oprot.writeI32(struct.labels.size()); + for (String _iter102 : struct.labels) + { + oprot.writeString(_iter102); + } + } + } + } + + @Override + public void read(org.apache.thrift.protocol.TProtocol prot, TAuthorization struct) throws org.apache.thrift.TException { + TTupleProtocol iprot = (TTupleProtocol) prot; + BitSet incoming = iprot.readBitSet(1); + if (incoming.get(0)) { + { + org.apache.thrift.protocol.TList _list103 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); + struct.labels = new ArrayList(_list103.size); + for (int _i104 = 0; _i104 < _list103.size; ++_i104) + { + String _elem105; + _elem105 = iprot.readString(); + struct.labels.add(_elem105); + } + } + struct.setLabelsIsSet(true); + } + } + } + +} + diff --git hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java index ba1bcd4..2e6646b 100644 --- hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java +++ hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.0) + * Autogenerated by Thrift Compiler (0.9.1) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -15,6 +15,8 @@ import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; +import org.apache.thrift.async.AsyncMethodCallback; +import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; @@ -43,7 +45,7 @@ import org.slf4j.LoggerFactory; * If you specify a time range and a timestamp the range is ignored. * Timestamps on TColumns are ignored. */ -public class TGet implements org.apache.thrift.TBase, java.io.Serializable, Cloneable { +public class TGet implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); @@ -53,6 +55,7 @@ public class TGet implements org.apache.thrift.TBase, java.i private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)5); private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)6); private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)7); + private static final org.apache.thrift.protocol.TField AUTHORIZATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("authorizations", org.apache.thrift.protocol.TType.STRUCT, (short)8); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -67,6 +70,7 @@ public class TGet implements org.apache.thrift.TBase, java.i public int maxVersions; // optional public ByteBuffer filterString; // optional public Map attributes; // optional + public TAuthorization authorizations; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { @@ -76,7 +80,8 @@ public class TGet implements org.apache.thrift.TBase, java.i TIME_RANGE((short)4, "timeRange"), MAX_VERSIONS((short)5, "maxVersions"), FILTER_STRING((short)6, "filterString"), - ATTRIBUTES((short)7, "attributes"); + ATTRIBUTES((short)7, "attributes"), + AUTHORIZATIONS((short)8, "authorizations"); private static final Map byName = new HashMap(); @@ -105,6 +110,8 @@ public class TGet implements org.apache.thrift.TBase, java.i return FILTER_STRING; case 7: // ATTRIBUTES return ATTRIBUTES; + case 8: // AUTHORIZATIONS + return AUTHORIZATIONS; default: return null; } @@ -148,7 +155,7 @@ public class TGet implements org.apache.thrift.TBase, java.i private static final int __TIMESTAMP_ISSET_ID = 0; private static final int __MAXVERSIONS_ISSET_ID = 1; private byte __isset_bitfield = 0; - private _Fields optionals[] = {_Fields.COLUMNS,_Fields.TIMESTAMP,_Fields.TIME_RANGE,_Fields.MAX_VERSIONS,_Fields.FILTER_STRING,_Fields.ATTRIBUTES}; + private _Fields optionals[] = {_Fields.COLUMNS,_Fields.TIMESTAMP,_Fields.TIME_RANGE,_Fields.MAX_VERSIONS,_Fields.FILTER_STRING,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); @@ -169,6 +176,8 @@ public class TGet implements org.apache.thrift.TBase, java.i new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); + tmpMap.put(_Fields.AUTHORIZATIONS, new org.apache.thrift.meta_data.FieldMetaData("authorizations", org.apache.thrift.TFieldRequirementType.OPTIONAL, + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TAuthorization.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TGet.class, metaDataMap); } @@ -193,7 +202,7 @@ public class TGet implements org.apache.thrift.TBase, java.i ; } if (other.isSetColumns()) { - List __this__columns = new ArrayList(); + List __this__columns = new ArrayList(other.columns.size()); for (TColumn other_element : other.columns) { __this__columns.add(new TColumn(other_element)); } @@ -209,22 +218,12 @@ public class TGet implements org.apache.thrift.TBase, java.i ; } if (other.isSetAttributes()) { - Map __this__attributes = new HashMap(); - for (Map.Entry other_element : other.attributes.entrySet()) { - - ByteBuffer other_element_key = other_element.getKey(); - ByteBuffer other_element_value = other_element.getValue(); - - ByteBuffer __this__attributes_copy_key = org.apache.thrift.TBaseHelper.copyBinary(other_element_key); -; - - ByteBuffer __this__attributes_copy_value = org.apache.thrift.TBaseHelper.copyBinary(other_element_value); -; - - __this__attributes.put(__this__attributes_copy_key, __this__attributes_copy_value); - } + Map __this__attributes = new HashMap(other.attributes); this.attributes = __this__attributes; } + if (other.isSetAuthorizations()) { + this.authorizations = new TAuthorization(other.authorizations); + } } public TGet deepCopy() { @@ -242,6 +241,7 @@ public class TGet implements org.apache.thrift.TBase, java.i this.maxVersions = 0; this.filterString = null; this.attributes = null; + this.authorizations = null; } public byte[] getRow() { @@ -456,6 +456,30 @@ public class TGet implements org.apache.thrift.TBase, java.i } } + public TAuthorization getAuthorizations() { + return this.authorizations; + } + + public TGet setAuthorizations(TAuthorization authorizations) { + this.authorizations = authorizations; + return this; + } + + public void unsetAuthorizations() { + this.authorizations = null; + } + + /** Returns true if field authorizations is set (has been assigned a value) and false otherwise */ + public boolean isSetAuthorizations() { + return this.authorizations != null; + } + + public void setAuthorizationsIsSet(boolean value) { + if (!value) { + this.authorizations = null; + } + } + public void setFieldValue(_Fields field, Object value) { switch (field) { case ROW: @@ -514,6 +538,14 @@ public class TGet implements org.apache.thrift.TBase, java.i } break; + case AUTHORIZATIONS: + if (value == null) { + unsetAuthorizations(); + } else { + setAuthorizations((TAuthorization)value); + } + break; + } } @@ -540,6 +572,9 @@ public class TGet implements org.apache.thrift.TBase, java.i case ATTRIBUTES: return getAttributes(); + case AUTHORIZATIONS: + return getAuthorizations(); + } throw new IllegalStateException(); } @@ -565,6 +600,8 @@ public class TGet implements org.apache.thrift.TBase, java.i return isSetFilterString(); case ATTRIBUTES: return isSetAttributes(); + case AUTHORIZATIONS: + return isSetAuthorizations(); } throw new IllegalStateException(); } @@ -645,6 +682,15 @@ public class TGet implements org.apache.thrift.TBase, java.i return false; } + boolean this_present_authorizations = true && this.isSetAuthorizations(); + boolean that_present_authorizations = true && that.isSetAuthorizations(); + if (this_present_authorizations || that_present_authorizations) { + if (!(this_present_authorizations && that_present_authorizations)) + return false; + if (!this.authorizations.equals(that.authorizations)) + return false; + } + return true; } @@ -653,80 +699,90 @@ public class TGet implements org.apache.thrift.TBase, java.i return 0; } + @Override public int compareTo(TGet other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; - TGet typedOther = (TGet)other; - lastComparison = Boolean.valueOf(isSetRow()).compareTo(typedOther.isSetRow()); + lastComparison = Boolean.valueOf(isSetRow()).compareTo(other.isSetRow()); if (lastComparison != 0) { return lastComparison; } if (isSetRow()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.row, typedOther.row); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.row, other.row); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetColumns()).compareTo(typedOther.isSetColumns()); + lastComparison = Boolean.valueOf(isSetColumns()).compareTo(other.isSetColumns()); if (lastComparison != 0) { return lastComparison; } if (isSetColumns()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.columns, typedOther.columns); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.columns, other.columns); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetTimestamp()).compareTo(typedOther.isSetTimestamp()); + lastComparison = Boolean.valueOf(isSetTimestamp()).compareTo(other.isSetTimestamp()); if (lastComparison != 0) { return lastComparison; } if (isSetTimestamp()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timestamp, typedOther.timestamp); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timestamp, other.timestamp); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetTimeRange()).compareTo(typedOther.isSetTimeRange()); + lastComparison = Boolean.valueOf(isSetTimeRange()).compareTo(other.isSetTimeRange()); if (lastComparison != 0) { return lastComparison; } if (isSetTimeRange()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timeRange, typedOther.timeRange); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timeRange, other.timeRange); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetMaxVersions()).compareTo(typedOther.isSetMaxVersions()); + lastComparison = Boolean.valueOf(isSetMaxVersions()).compareTo(other.isSetMaxVersions()); if (lastComparison != 0) { return lastComparison; } if (isSetMaxVersions()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.maxVersions, typedOther.maxVersions); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.maxVersions, other.maxVersions); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetFilterString()).compareTo(typedOther.isSetFilterString()); + lastComparison = Boolean.valueOf(isSetFilterString()).compareTo(other.isSetFilterString()); if (lastComparison != 0) { return lastComparison; } if (isSetFilterString()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterString, typedOther.filterString); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterString, other.filterString); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetAttributes()).compareTo(typedOther.isSetAttributes()); + lastComparison = Boolean.valueOf(isSetAttributes()).compareTo(other.isSetAttributes()); if (lastComparison != 0) { return lastComparison; } if (isSetAttributes()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.attributes, typedOther.attributes); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.attributes, other.attributes); + if (lastComparison != 0) { + return lastComparison; + } + } + lastComparison = Boolean.valueOf(isSetAuthorizations()).compareTo(other.isSetAuthorizations()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetAuthorizations()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.authorizations, other.authorizations); if (lastComparison != 0) { return lastComparison; } @@ -810,6 +866,16 @@ public class TGet implements org.apache.thrift.TBase, java.i } first = false; } + if (isSetAuthorizations()) { + if (!first) sb.append(", "); + sb.append("authorizations:"); + if (this.authorizations == null) { + sb.append("null"); + } else { + sb.append(this.authorizations); + } + first = false; + } sb.append(")"); return sb.toString(); } @@ -823,6 +889,9 @@ public class TGet implements org.apache.thrift.TBase, java.i if (timeRange != null) { timeRange.validate(); } + if (authorizations != null) { + authorizations.validate(); + } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { @@ -872,14 +941,14 @@ public class TGet implements org.apache.thrift.TBase, java.i case 2: // COLUMNS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { - org.apache.thrift.protocol.TList _list8 = iprot.readListBegin(); - struct.columns = new ArrayList(_list8.size); - for (int _i9 = 0; _i9 < _list8.size; ++_i9) + org.apache.thrift.protocol.TList _list16 = iprot.readListBegin(); + struct.columns = new ArrayList(_list16.size); + for (int _i17 = 0; _i17 < _list16.size; ++_i17) { - TColumn _elem10; // required - _elem10 = new TColumn(); - _elem10.read(iprot); - struct.columns.add(_elem10); + TColumn _elem18; + _elem18 = new TColumn(); + _elem18.read(iprot); + struct.columns.add(_elem18); } iprot.readListEnd(); } @@ -924,15 +993,15 @@ public class TGet implements org.apache.thrift.TBase, java.i case 7: // ATTRIBUTES if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { - org.apache.thrift.protocol.TMap _map11 = iprot.readMapBegin(); - struct.attributes = new HashMap(2*_map11.size); - for (int _i12 = 0; _i12 < _map11.size; ++_i12) + org.apache.thrift.protocol.TMap _map19 = iprot.readMapBegin(); + struct.attributes = new HashMap(2*_map19.size); + for (int _i20 = 0; _i20 < _map19.size; ++_i20) { - ByteBuffer _key13; // required - ByteBuffer _val14; // required - _key13 = iprot.readBinary(); - _val14 = iprot.readBinary(); - struct.attributes.put(_key13, _val14); + ByteBuffer _key21; + ByteBuffer _val22; + _key21 = iprot.readBinary(); + _val22 = iprot.readBinary(); + struct.attributes.put(_key21, _val22); } iprot.readMapEnd(); } @@ -941,6 +1010,15 @@ public class TGet implements org.apache.thrift.TBase, java.i org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; + case 8: // AUTHORIZATIONS + if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { + struct.authorizations = new TAuthorization(); + struct.authorizations.read(iprot); + struct.setAuthorizationsIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -966,9 +1044,9 @@ public class TGet implements org.apache.thrift.TBase, java.i oprot.writeFieldBegin(COLUMNS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.columns.size())); - for (TColumn _iter15 : struct.columns) + for (TColumn _iter23 : struct.columns) { - _iter15.write(oprot); + _iter23.write(oprot); } oprot.writeListEnd(); } @@ -1004,16 +1082,23 @@ public class TGet implements org.apache.thrift.TBase, java.i oprot.writeFieldBegin(ATTRIBUTES_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.attributes.size())); - for (Map.Entry _iter16 : struct.attributes.entrySet()) + for (Map.Entry _iter24 : struct.attributes.entrySet()) { - oprot.writeBinary(_iter16.getKey()); - oprot.writeBinary(_iter16.getValue()); + oprot.writeBinary(_iter24.getKey()); + oprot.writeBinary(_iter24.getValue()); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } } + if (struct.authorizations != null) { + if (struct.isSetAuthorizations()) { + oprot.writeFieldBegin(AUTHORIZATIONS_FIELD_DESC); + struct.authorizations.write(oprot); + oprot.writeFieldEnd(); + } + } oprot.writeFieldStop(); oprot.writeStructEnd(); } @@ -1051,13 +1136,16 @@ public class TGet implements org.apache.thrift.TBase, java.i if (struct.isSetAttributes()) { optionals.set(5); } - oprot.writeBitSet(optionals, 6); + if (struct.isSetAuthorizations()) { + optionals.set(6); + } + oprot.writeBitSet(optionals, 7); if (struct.isSetColumns()) { { oprot.writeI32(struct.columns.size()); - for (TColumn _iter17 : struct.columns) + for (TColumn _iter25 : struct.columns) { - _iter17.write(oprot); + _iter25.write(oprot); } } } @@ -1076,13 +1164,16 @@ public class TGet implements org.apache.thrift.TBase, java.i if (struct.isSetAttributes()) { { oprot.writeI32(struct.attributes.size()); - for (Map.Entry _iter18 : struct.attributes.entrySet()) + for (Map.Entry _iter26 : struct.attributes.entrySet()) { - oprot.writeBinary(_iter18.getKey()); - oprot.writeBinary(_iter18.getValue()); + oprot.writeBinary(_iter26.getKey()); + oprot.writeBinary(_iter26.getValue()); } } } + if (struct.isSetAuthorizations()) { + struct.authorizations.write(oprot); + } } @Override @@ -1090,17 +1181,17 @@ public class TGet implements org.apache.thrift.TBase, java.i TTupleProtocol iprot = (TTupleProtocol) prot; struct.row = iprot.readBinary(); struct.setRowIsSet(true); - BitSet incoming = iprot.readBitSet(6); + BitSet incoming = iprot.readBitSet(7); if (incoming.get(0)) { { - org.apache.thrift.protocol.TList _list19 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); - struct.columns = new ArrayList(_list19.size); - for (int _i20 = 0; _i20 < _list19.size; ++_i20) + org.apache.thrift.protocol.TList _list27 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); + struct.columns = new ArrayList(_list27.size); + for (int _i28 = 0; _i28 < _list27.size; ++_i28) { - TColumn _elem21; // required - _elem21 = new TColumn(); - _elem21.read(iprot); - struct.columns.add(_elem21); + TColumn _elem29; + _elem29 = new TColumn(); + _elem29.read(iprot); + struct.columns.add(_elem29); } } struct.setColumnsIsSet(true); @@ -1124,19 +1215,24 @@ public class TGet implements org.apache.thrift.TBase, java.i } if (incoming.get(5)) { { - org.apache.thrift.protocol.TMap _map22 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32()); - struct.attributes = new HashMap(2*_map22.size); - for (int _i23 = 0; _i23 < _map22.size; ++_i23) + org.apache.thrift.protocol.TMap _map30 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32()); + struct.attributes = new HashMap(2*_map30.size); + for (int _i31 = 0; _i31 < _map30.size; ++_i31) { - ByteBuffer _key24; // required - ByteBuffer _val25; // required - _key24 = iprot.readBinary(); - _val25 = iprot.readBinary(); - struct.attributes.put(_key24, _val25); + ByteBuffer _key32; + ByteBuffer _val33; + _key32 = iprot.readBinary(); + _val33 = iprot.readBinary(); + struct.attributes.put(_key32, _val33); } } struct.setAttributesIsSet(true); } + if (incoming.get(6)) { + struct.authorizations = new TAuthorization(); + struct.authorizations.read(iprot); + struct.setAuthorizationsIsSet(true); + } } } diff --git hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java index 05d01a9..3995493 100644 --- hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java +++ hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java @@ -1,5 +1,5 @@ /** - * Autogenerated by Thrift Compiler (0.9.0) + * Autogenerated by Thrift Compiler (0.9.1) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated @@ -15,6 +15,8 @@ import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; +import org.apache.thrift.async.AsyncMethodCallback; +import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; @@ -34,7 +36,7 @@ import org.slf4j.LoggerFactory; * Any timestamps in the columns are ignored, use timeRange to select by timestamp. * Max versions defaults to 1. */ -public class TScan implements org.apache.thrift.TBase, java.io.Serializable, Cloneable { +public class TScan implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan"); private static final org.apache.thrift.protocol.TField START_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("startRow", org.apache.thrift.protocol.TType.STRING, (short)1); @@ -46,6 +48,7 @@ public class TScan implements org.apache.thrift.TBase, jav private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)7); private static final org.apache.thrift.protocol.TField BATCH_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("batchSize", org.apache.thrift.protocol.TType.I32, (short)8); private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)9); + private static final org.apache.thrift.protocol.TField AUTHORIZATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("authorizations", org.apache.thrift.protocol.TType.STRUCT, (short)10); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -62,6 +65,7 @@ public class TScan implements org.apache.thrift.TBase, jav public ByteBuffer filterString; // optional public int batchSize; // optional public Map attributes; // optional + public TAuthorization authorizations; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { @@ -73,7 +77,8 @@ public class TScan implements org.apache.thrift.TBase, jav TIME_RANGE((short)6, "timeRange"), FILTER_STRING((short)7, "filterString"), BATCH_SIZE((short)8, "batchSize"), - ATTRIBUTES((short)9, "attributes"); + ATTRIBUTES((short)9, "attributes"), + AUTHORIZATIONS((short)10, "authorizations"); private static final Map byName = new HashMap(); @@ -106,6 +111,8 @@ public class TScan implements org.apache.thrift.TBase, jav return BATCH_SIZE; case 9: // ATTRIBUTES return ATTRIBUTES; + case 10: // AUTHORIZATIONS + return AUTHORIZATIONS; default: return null; } @@ -150,7 +157,7 @@ public class TScan implements org.apache.thrift.TBase, jav private static final int __MAXVERSIONS_ISSET_ID = 1; private static final int __BATCHSIZE_ISSET_ID = 2; private byte __isset_bitfield = 0; - private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.ATTRIBUTES}; + private _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); @@ -175,6 +182,8 @@ public class TScan implements org.apache.thrift.TBase, jav new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); + tmpMap.put(_Fields.AUTHORIZATIONS, new org.apache.thrift.meta_data.FieldMetaData("authorizations", org.apache.thrift.TFieldRequirementType.OPTIONAL, + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TAuthorization.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TScan.class, metaDataMap); } @@ -198,7 +207,7 @@ public class TScan implements org.apache.thrift.TBase, jav ; } if (other.isSetColumns()) { - List __this__columns = new ArrayList(); + List __this__columns = new ArrayList(other.columns.size()); for (TColumn other_element : other.columns) { __this__columns.add(new TColumn(other_element)); } @@ -215,22 +224,12 @@ public class TScan implements org.apache.thrift.TBase, jav } this.batchSize = other.batchSize; if (other.isSetAttributes()) { - Map __this__attributes = new HashMap(); - for (Map.Entry other_element : other.attributes.entrySet()) { - - ByteBuffer other_element_key = other_element.getKey(); - ByteBuffer other_element_value = other_element.getValue(); - - ByteBuffer __this__attributes_copy_key = org.apache.thrift.TBaseHelper.copyBinary(other_element_key); -; - - ByteBuffer __this__attributes_copy_value = org.apache.thrift.TBaseHelper.copyBinary(other_element_value); -; - - __this__attributes.put(__this__attributes_copy_key, __this__attributes_copy_value); - } + Map __this__attributes = new HashMap(other.attributes); this.attributes = __this__attributes; } + if (other.isSetAuthorizations()) { + this.authorizations = new TAuthorization(other.authorizations); + } } public TScan deepCopy() { @@ -251,6 +250,7 @@ public class TScan implements org.apache.thrift.TBase, jav setBatchSizeIsSet(false); this.batchSize = 0; this.attributes = null; + this.authorizations = null; } public byte[] getStartRow() { @@ -522,6 +522,30 @@ public class TScan implements org.apache.thrift.TBase, jav } } + public TAuthorization getAuthorizations() { + return this.authorizations; + } + + public TScan setAuthorizations(TAuthorization authorizations) { + this.authorizations = authorizations; + return this; + } + + public void unsetAuthorizations() { + this.authorizations = null; + } + + /** Returns true if field authorizations is set (has been assigned a value) and false otherwise */ + public boolean isSetAuthorizations() { + return this.authorizations != null; + } + + public void setAuthorizationsIsSet(boolean value) { + if (!value) { + this.authorizations = null; + } + } + public void setFieldValue(_Fields field, Object value) { switch (field) { case START_ROW: @@ -596,6 +620,14 @@ public class TScan implements org.apache.thrift.TBase, jav } break; + case AUTHORIZATIONS: + if (value == null) { + unsetAuthorizations(); + } else { + setAuthorizations((TAuthorization)value); + } + break; + } } @@ -628,6 +660,9 @@ public class TScan implements org.apache.thrift.TBase, jav case ATTRIBUTES: return getAttributes(); + case AUTHORIZATIONS: + return getAuthorizations(); + } throw new IllegalStateException(); } @@ -657,6 +692,8 @@ public class TScan implements org.apache.thrift.TBase, jav return isSetBatchSize(); case ATTRIBUTES: return isSetAttributes(); + case AUTHORIZATIONS: + return isSetAuthorizations(); } throw new IllegalStateException(); } @@ -755,6 +792,15 @@ public class TScan implements org.apache.thrift.TBase, jav return false; } + boolean this_present_authorizations = true && this.isSetAuthorizations(); + boolean that_present_authorizations = true && that.isSetAuthorizations(); + if (this_present_authorizations || that_present_authorizations) { + if (!(this_present_authorizations && that_present_authorizations)) + return false; + if (!this.authorizations.equals(that.authorizations)) + return false; + } + return true; } @@ -763,100 +809,110 @@ public class TScan implements org.apache.thrift.TBase, jav return 0; } + @Override public int compareTo(TScan other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; - TScan typedOther = (TScan)other; - lastComparison = Boolean.valueOf(isSetStartRow()).compareTo(typedOther.isSetStartRow()); + lastComparison = Boolean.valueOf(isSetStartRow()).compareTo(other.isSetStartRow()); if (lastComparison != 0) { return lastComparison; } if (isSetStartRow()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.startRow, typedOther.startRow); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.startRow, other.startRow); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetStopRow()).compareTo(typedOther.isSetStopRow()); + lastComparison = Boolean.valueOf(isSetStopRow()).compareTo(other.isSetStopRow()); if (lastComparison != 0) { return lastComparison; } if (isSetStopRow()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.stopRow, typedOther.stopRow); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.stopRow, other.stopRow); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetColumns()).compareTo(typedOther.isSetColumns()); + lastComparison = Boolean.valueOf(isSetColumns()).compareTo(other.isSetColumns()); if (lastComparison != 0) { return lastComparison; } if (isSetColumns()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.columns, typedOther.columns); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.columns, other.columns); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetCaching()).compareTo(typedOther.isSetCaching()); + lastComparison = Boolean.valueOf(isSetCaching()).compareTo(other.isSetCaching()); if (lastComparison != 0) { return lastComparison; } if (isSetCaching()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.caching, typedOther.caching); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.caching, other.caching); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetMaxVersions()).compareTo(typedOther.isSetMaxVersions()); + lastComparison = Boolean.valueOf(isSetMaxVersions()).compareTo(other.isSetMaxVersions()); if (lastComparison != 0) { return lastComparison; } if (isSetMaxVersions()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.maxVersions, typedOther.maxVersions); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.maxVersions, other.maxVersions); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetTimeRange()).compareTo(typedOther.isSetTimeRange()); + lastComparison = Boolean.valueOf(isSetTimeRange()).compareTo(other.isSetTimeRange()); if (lastComparison != 0) { return lastComparison; } if (isSetTimeRange()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timeRange, typedOther.timeRange); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timeRange, other.timeRange); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetFilterString()).compareTo(typedOther.isSetFilterString()); + lastComparison = Boolean.valueOf(isSetFilterString()).compareTo(other.isSetFilterString()); if (lastComparison != 0) { return lastComparison; } if (isSetFilterString()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterString, typedOther.filterString); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterString, other.filterString); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetBatchSize()).compareTo(typedOther.isSetBatchSize()); + lastComparison = Boolean.valueOf(isSetBatchSize()).compareTo(other.isSetBatchSize()); if (lastComparison != 0) { return lastComparison; } if (isSetBatchSize()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.batchSize, typedOther.batchSize); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.batchSize, other.batchSize); if (lastComparison != 0) { return lastComparison; } } - lastComparison = Boolean.valueOf(isSetAttributes()).compareTo(typedOther.isSetAttributes()); + lastComparison = Boolean.valueOf(isSetAttributes()).compareTo(other.isSetAttributes()); if (lastComparison != 0) { return lastComparison; } if (isSetAttributes()) { - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.attributes, typedOther.attributes); + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.attributes, other.attributes); + if (lastComparison != 0) { + return lastComparison; + } + } + lastComparison = Boolean.valueOf(isSetAuthorizations()).compareTo(other.isSetAuthorizations()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetAuthorizations()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.authorizations, other.authorizations); if (lastComparison != 0) { return lastComparison; } @@ -958,6 +1014,16 @@ public class TScan implements org.apache.thrift.TBase, jav } first = false; } + if (isSetAuthorizations()) { + if (!first) sb.append(", "); + sb.append("authorizations:"); + if (this.authorizations == null) { + sb.append("null"); + } else { + sb.append(this.authorizations); + } + first = false; + } sb.append(")"); return sb.toString(); } @@ -968,6 +1034,9 @@ public class TScan implements org.apache.thrift.TBase, jav if (timeRange != null) { timeRange.validate(); } + if (authorizations != null) { + authorizations.validate(); + } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { @@ -1025,14 +1094,14 @@ public class TScan implements org.apache.thrift.TBase, jav case 3: // COLUMNS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { - org.apache.thrift.protocol.TList _list80 = iprot.readListBegin(); - struct.columns = new ArrayList(_list80.size); - for (int _i81 = 0; _i81 < _list80.size; ++_i81) + org.apache.thrift.protocol.TList _list88 = iprot.readListBegin(); + struct.columns = new ArrayList(_list88.size); + for (int _i89 = 0; _i89 < _list88.size; ++_i89) { - TColumn _elem82; // required - _elem82 = new TColumn(); - _elem82.read(iprot); - struct.columns.add(_elem82); + TColumn _elem90; + _elem90 = new TColumn(); + _elem90.read(iprot); + struct.columns.add(_elem90); } iprot.readListEnd(); } @@ -1085,15 +1154,15 @@ public class TScan implements org.apache.thrift.TBase, jav case 9: // ATTRIBUTES if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { - org.apache.thrift.protocol.TMap _map83 = iprot.readMapBegin(); - struct.attributes = new HashMap(2*_map83.size); - for (int _i84 = 0; _i84 < _map83.size; ++_i84) + org.apache.thrift.protocol.TMap _map91 = iprot.readMapBegin(); + struct.attributes = new HashMap(2*_map91.size); + for (int _i92 = 0; _i92 < _map91.size; ++_i92) { - ByteBuffer _key85; // required - ByteBuffer _val86; // required - _key85 = iprot.readBinary(); - _val86 = iprot.readBinary(); - struct.attributes.put(_key85, _val86); + ByteBuffer _key93; + ByteBuffer _val94; + _key93 = iprot.readBinary(); + _val94 = iprot.readBinary(); + struct.attributes.put(_key93, _val94); } iprot.readMapEnd(); } @@ -1102,6 +1171,15 @@ public class TScan implements org.apache.thrift.TBase, jav org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; + case 10: // AUTHORIZATIONS + if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { + struct.authorizations = new TAuthorization(); + struct.authorizations.read(iprot); + struct.setAuthorizationsIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -1136,9 +1214,9 @@ public class TScan implements org.apache.thrift.TBase, jav oprot.writeFieldBegin(COLUMNS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.columns.size())); - for (TColumn _iter87 : struct.columns) + for (TColumn _iter95 : struct.columns) { - _iter87.write(oprot); + _iter95.write(oprot); } oprot.writeListEnd(); } @@ -1179,16 +1257,23 @@ public class TScan implements org.apache.thrift.TBase, jav oprot.writeFieldBegin(ATTRIBUTES_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.attributes.size())); - for (Map.Entry _iter88 : struct.attributes.entrySet()) + for (Map.Entry _iter96 : struct.attributes.entrySet()) { - oprot.writeBinary(_iter88.getKey()); - oprot.writeBinary(_iter88.getValue()); + oprot.writeBinary(_iter96.getKey()); + oprot.writeBinary(_iter96.getValue()); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } } + if (struct.authorizations != null) { + if (struct.isSetAuthorizations()) { + oprot.writeFieldBegin(AUTHORIZATIONS_FIELD_DESC); + struct.authorizations.write(oprot); + oprot.writeFieldEnd(); + } + } oprot.writeFieldStop(); oprot.writeStructEnd(); } @@ -1234,7 +1319,10 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetAttributes()) { optionals.set(8); } - oprot.writeBitSet(optionals, 9); + if (struct.isSetAuthorizations()) { + optionals.set(9); + } + oprot.writeBitSet(optionals, 10); if (struct.isSetStartRow()) { oprot.writeBinary(struct.startRow); } @@ -1244,9 +1332,9 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetColumns()) { { oprot.writeI32(struct.columns.size()); - for (TColumn _iter89 : struct.columns) + for (TColumn _iter97 : struct.columns) { - _iter89.write(oprot); + _iter97.write(oprot); } } } @@ -1268,19 +1356,22 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetAttributes()) { { oprot.writeI32(struct.attributes.size()); - for (Map.Entry _iter90 : struct.attributes.entrySet()) + for (Map.Entry _iter98 : struct.attributes.entrySet()) { - oprot.writeBinary(_iter90.getKey()); - oprot.writeBinary(_iter90.getValue()); + oprot.writeBinary(_iter98.getKey()); + oprot.writeBinary(_iter98.getValue()); } } } + if (struct.isSetAuthorizations()) { + struct.authorizations.write(oprot); + } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TScan struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - BitSet incoming = iprot.readBitSet(9); + BitSet incoming = iprot.readBitSet(10); if (incoming.get(0)) { struct.startRow = iprot.readBinary(); struct.setStartRowIsSet(true); @@ -1291,14 +1382,14 @@ public class TScan implements org.apache.thrift.TBase, jav } if (incoming.get(2)) { { - org.apache.thrift.protocol.TList _list91 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); - struct.columns = new ArrayList(_list91.size); - for (int _i92 = 0; _i92 < _list91.size; ++_i92) + org.apache.thrift.protocol.TList _list99 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); + struct.columns = new ArrayList(_list99.size); + for (int _i100 = 0; _i100 < _list99.size; ++_i100) { - TColumn _elem93; // required - _elem93 = new TColumn(); - _elem93.read(iprot); - struct.columns.add(_elem93); + TColumn _elem101; + _elem101 = new TColumn(); + _elem101.read(iprot); + struct.columns.add(_elem101); } } struct.setColumnsIsSet(true); @@ -1326,19 +1417,24 @@ public class TScan implements org.apache.thrift.TBase, jav } if (incoming.get(8)) { { - org.apache.thrift.protocol.TMap _map94 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32()); - struct.attributes = new HashMap(2*_map94.size); - for (int _i95 = 0; _i95 < _map94.size; ++_i95) + org.apache.thrift.protocol.TMap _map102 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32()); + struct.attributes = new HashMap(2*_map102.size); + for (int _i103 = 0; _i103 < _map102.size; ++_i103) { - ByteBuffer _key96; // required - ByteBuffer _val97; // required - _key96 = iprot.readBinary(); - _val97 = iprot.readBinary(); - struct.attributes.put(_key96, _val97); + ByteBuffer _key104; + ByteBuffer _val105; + _key104 = iprot.readBinary(); + _val105 = iprot.readBinary(); + struct.attributes.put(_key104, _val105); } } struct.setAttributesIsSet(true); } + if (incoming.get(9)) { + struct.authorizations = new TAuthorization(); + struct.authorizations.read(iprot); + struct.setAuthorizationsIsSet(true); + } } } diff --git hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift index 4590ec9..0a508a1 100644 --- hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift +++ hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift @@ -92,6 +92,10 @@ enum TDurability { FSYNC_WAL = 4 } +struct TAuthorization { + 1: optional list labels +} + /** * Used to perform Get operations on a single row. * @@ -115,6 +119,7 @@ struct TGet { 5: optional i32 maxVersions, 6: optional binary filterString, 7: optional map attributes + 8: optional TAuthorization authorizations } /** @@ -201,6 +206,7 @@ struct TScan { 7: optional binary filterString, 8: optional i32 batchSize, 9: optional map attributes + 10: optional TAuthorization authorizations } /** diff --git hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java new file mode 100644 index 0000000..777a6cb --- /dev/null +++ hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java @@ -0,0 +1,335 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.thrift2; + +import static java.nio.ByteBuffer.wrap; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService; +import org.apache.hadoop.hbase.security.visibility.VisibilityClient; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; +import org.apache.hadoop.hbase.security.visibility.VisibilityController; +import org.apache.hadoop.hbase.security.visibility.VisibilityUtils; +import org.apache.hadoop.hbase.thrift2.generated.TAuthorization; +import org.apache.hadoop.hbase.thrift2.generated.TColumn; +import org.apache.hadoop.hbase.thrift2.generated.TColumnValue; +import org.apache.hadoop.hbase.thrift2.generated.TGet; +import org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument; +import org.apache.hadoop.hbase.thrift2.generated.TPut; +import org.apache.hadoop.hbase.thrift2.generated.TResult; +import org.apache.hadoop.hbase.thrift2.generated.TScan; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import com.google.protobuf.ByteString; + +@Category(MediumTests.class) +public class TestThriftHBaseServiceHandlerWithLabels { + + public static final Log LOG = LogFactory.getLog(TestThriftHBaseServiceHandlerWithLabels.class); + private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); + + // Static names for tables, columns, rows, and values + private static byte[] tableAname = Bytes.toBytes("tableA"); + private static byte[] familyAname = Bytes.toBytes("familyA"); + private static byte[] familyBname = Bytes.toBytes("familyB"); + private static byte[] qualifierAname = Bytes.toBytes("qualifierA"); + private static byte[] qualifierBname = Bytes.toBytes("qualifierB"); + private static byte[] valueAname = Bytes.toBytes("valueA"); + private static byte[] valueBname = Bytes.toBytes("valueB"); + private static HColumnDescriptor[] families = new HColumnDescriptor[] { + new HColumnDescriptor(familyAname).setMaxVersions(3), + new HColumnDescriptor(familyBname).setMaxVersions(2) }; + + private final static String TOPSECRET = "topsecret"; + private final static String PUBLIC = "public"; + private final static String PRIVATE = "private"; + private final static String CONFIDENTIAL = "confidential"; + private final static String SECRET = "secret"; + + private static Configuration conf; + + public void assertTColumnValuesEqual(List columnValuesA, + List columnValuesB) { + assertEquals(columnValuesA.size(), columnValuesB.size()); + Comparator comparator = new Comparator() { + @Override + public int compare(TColumnValue o1, TColumnValue o2) { + return Bytes.compareTo(Bytes.add(o1.getFamily(), o1.getQualifier()), + Bytes.add(o2.getFamily(), o2.getQualifier())); + } + }; + Collections.sort(columnValuesA, comparator); + Collections.sort(columnValuesB, comparator); + + for (int i = 0; i < columnValuesA.size(); i++) { + TColumnValue a = columnValuesA.get(i); + TColumnValue b = columnValuesB.get(i); + assertArrayEquals(a.getFamily(), b.getFamily()); + assertArrayEquals(a.getQualifier(), b.getQualifier()); + assertArrayEquals(a.getValue(), b.getValue()); + } + } + + @BeforeClass + public static void beforeClass() throws Exception { + conf = UTIL.getConfiguration(); + conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName()); + UTIL.startMiniCluster(1); + // Wait for the labels table to become available + UTIL.waitTableEnabled(VisibilityConstants.LABELS_TABLE_NAME.getName(), 50000); + createLabels(); + HBaseAdmin admin = new HBaseAdmin(UTIL.getConfiguration()); + HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableAname)); + for (HColumnDescriptor family : families) { + tableDescriptor.addFamily(family); + } + admin.createTable(tableDescriptor); + } + + private static void createLabels() throws IOException { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + } + + @AfterClass + public static void afterClass() throws Exception { + UTIL.shutdownMiniCluster(); + } + + @Before + public void setup() throws Exception { + + } + + private ThriftHBaseServiceHandler createHandler() { + return new ThriftHBaseServiceHandler(UTIL.getConfiguration()); + } + + @Test + public void testScanWithVisibilityLabels() throws Exception { + ThriftHBaseServiceHandler handler = createHandler(); + ByteBuffer table = wrap(tableAname); + + // insert data + TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), + wrap(valueAname)); + List columnValues = new ArrayList(); + columnValues.add(columnValue); + for (int i = 0; i < 10; i++) { + TPut put = new TPut(wrap(("testScan" + i).getBytes()), columnValues); + Map attributes = new HashMap(); + if (i == 5) { + attributes.put(wrap(Bytes.toBytes(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY)), + wrap(Bytes.toBytes(PUBLIC))); + } else { + attributes.put(wrap(Bytes.toBytes(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY)), + wrap(Bytes.toBytes("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!" + TOPSECRET))); + } + + put.setAttributes(attributes); + handler.put(table, put); + } + + // create scan instance + TScan scan = new TScan(); + List columns = new ArrayList(); + TColumn column = new TColumn(); + column.setFamily(familyAname); + column.setQualifier(qualifierAname); + columns.add(column); + scan.setColumns(columns); + scan.setStartRow("testScan".getBytes()); + scan.setStopRow("testScan\uffff".getBytes()); + + TAuthorization tauth = new TAuthorization(); + List labels = new ArrayList(); + labels.add(SECRET); + labels.add(PRIVATE); + tauth.setLabels(labels); + scan.setAuthorizations(tauth); + // get scanner and rows + int scanId = handler.openScanner(table, scan); + List results = handler.getScannerRows(scanId, 10); + assertEquals(9, results.size()); + Assert.assertFalse(Bytes.equals(results.get(5).getRow(), ("testScan" + 5).getBytes())); + for (int i = 0; i < 9; i++) { + if (i < 5) { + assertArrayEquals(("testScan" + i).getBytes(), results.get(i).getRow()); + } else if (i == 5) { + continue; + } else { + assertArrayEquals(("testScan" + (i + 1)).getBytes(), results.get(i).getRow()); + } + } + + // check that we are at the end of the scan + results = handler.getScannerRows(scanId, 9); + assertEquals(0, results.size()); + + // close scanner and check that it was indeed closed + handler.closeScanner(scanId); + try { + handler.getScannerRows(scanId, 9); + fail("Scanner id should be invalid"); + } catch (TIllegalArgument e) { + } + } + + @Test + public void testGetScannerResultsWithAuthroizations() throws Exception { + ThriftHBaseServiceHandler handler = createHandler(); + ByteBuffer table = wrap(tableAname); + + // insert data + TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), + wrap(valueAname)); + List columnValues = new ArrayList(); + columnValues.add(columnValue); + for (int i = 0; i < 20; i++) { + TPut put = new TPut(wrap(("testGetScannerResults" + pad(i, (byte) 2)).getBytes()), + columnValues); + Map attributes = new HashMap(); + if (i == 3) { + attributes.put(wrap(Bytes.toBytes(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY)), + wrap(Bytes.toBytes(PUBLIC))); + } else { + attributes.put(wrap(Bytes.toBytes(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY)), + wrap(Bytes.toBytes("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!" + TOPSECRET))); + } + + put.setAttributes(attributes); + handler.put(table, put); + } + + // create scan instance + TScan scan = new TScan(); + List columns = new ArrayList(); + TColumn column = new TColumn(); + column.setFamily(familyAname); + column.setQualifier(qualifierAname); + columns.add(column); + scan.setColumns(columns); + scan.setStartRow("testGetScannerResults".getBytes()); + + // get 5 rows and check the returned results + scan.setStopRow("testGetScannerResults05".getBytes()); + TAuthorization tauth = new TAuthorization(); + List labels = new ArrayList(); + labels.add(SECRET); + labels.add(PRIVATE); + tauth.setLabels(labels); + scan.setAuthorizations(tauth); + List results = handler.getScannerResults(table, scan, 5); + assertEquals(4, results.size()); + for (int i = 0; i < 4; i++) { + if (i < 3) { + assertArrayEquals(("testGetScannerResults" + pad(i, (byte) 2)).getBytes(), results.get(i) + .getRow()); + } else if (i == 3) { + continue; + } else { + assertArrayEquals(("testGetScannerResults" + pad(i + 1, (byte) 2)).getBytes(), + results.get(i).getRow()); + } + } + } + + @Test + public void testGetsWithLabels() throws Exception { + ThriftHBaseServiceHandler handler = createHandler(); + byte[] rowName = "testPutGet".getBytes(); + ByteBuffer table = wrap(tableAname); + + List columnValues = new ArrayList(); + columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname))); + columnValues.add(new TColumnValue(wrap(familyBname), wrap(qualifierBname), wrap(valueBname))); + TPut put = new TPut(wrap(rowName), columnValues); + + put.setColumnValues(columnValues); + Map attributes = new HashMap(); + handler.put(table, put); + attributes.put(wrap(Bytes.toBytes(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY)), + wrap(Bytes.toBytes("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!" + TOPSECRET))); + TGet get = new TGet(wrap(rowName)); + TAuthorization tauth = new TAuthorization(); + List labels = new ArrayList(); + labels.add(SECRET); + labels.add(PRIVATE); + tauth.setLabels(labels); + get.setAuthorizations(tauth); + TResult result = handler.get(table, get); + assertArrayEquals(rowName, result.getRow()); + List returnedColumnValues = result.getColumnValues(); + assertTColumnValuesEqual(columnValues, returnedColumnValues); + } + /** + * Padding numbers to make comparison of sort order easier in a for loop + * + * @param n + * The number to pad. + * @param pad + * The length to pad up to. + * @return The padded number as a string. + */ + private String pad(int n, byte pad) { + String res = Integer.toString(n); + while (res.length() < pad) + res = "0" + res; + return res; + } +}