diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java index 3eee907..d8c6a12 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -34,6 +34,8 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; /** @@ -246,6 +248,17 @@ public class Get extends OperationWithAttributes } /** + * Sets the authorizations to be used by this Get + * + * @param authorizations + * @return this + */ + public Get setAuthorizations(Authorizations authorizations) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, authorizations.toBytes()); + return this; + } + + /** * Get whether blocks should be cached for this Get. * @return true if default caching should be used, false if blocks should not * be cached diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java index 41ec446..87b893b 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java @@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.security.visibility.CellVisibility; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; /** @@ -308,4 +310,13 @@ public class Put extends Mutation implements HeapSize, Comparable { } return filteredList; } + + /** + * Sets the visibility expression associated with cells in this Put. + * + * @param expression + */ + public void setCellVisibility(CellVisibility expression) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, expression.toBytes()); + } } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 0c5565b..b287af2 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -25,6 +25,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.IncompatibleFilterException; import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; import java.io.IOException; @@ -752,4 +754,15 @@ public class Scan extends OperationWithAttributes { public boolean isSmall() { return small; } + + /** + * Sets the authorizations to be used by this Scan + * + * @param authorizations + * @return this + */ + public Scan setAuthorizations(Authorizations authorizations) { + this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, authorizations.toBytes()); + return this; + } } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java new file mode 100644 index 0000000..9bbb645 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; + +/** + * This class contains visibility labels associated with a Scan/Get deciding which all labeled data + * current scan can access. + */ +@InterfaceAudience.Public +@InterfaceStability.Stable +public class Authorizations { + + private List labels; + + public Authorizations(String... labels) { + this.labels = new ArrayList(labels.length); + for (String label : labels) { + this.labels.add(label); + } + } + + public Authorizations(List labels) { + this.labels = labels; + } + + public List getLabels() { + return this.labels; + } + + public byte[] toBytes() { + int length = 0; + for (String label : labels) { + length += label.length(); + } + ByteArrayOutputStream baos = new ByteArrayOutputStream(length + + (labels.size() * Bytes.SIZEOF_BYTE)); + for (String label : labels) { + baos.write(label.length()); + byte[] labelBytes = Bytes.toBytes(label); + for (byte b : labelBytes) { + if (!VisibilityLabelsValidator.isValidAuthChar(b)) { + throw new IllegalArgumentException("Invalid character found in visibility labels " + b); + } + } + try { + baos.write(labelBytes); + } catch (IOException e) { + // We use ByteArrayOutputStream. So IOE won't occur here. + } + } + return baos.toByteArray(); + } + + public static Authorizations fromBytes(byte[] b) { + List labels = new ArrayList(); + int index = 0; + while (index < b.length) { + int labelLen = b[index++]; + // TODO handle negative cases. + labels.add(new String(b, index, labelLen)); + index += labelLen; + } + return new Authorizations(labels); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("["); + for (int i = 0; i < this.labels.size(); i++) { + if (i != 0) { + sb.append(", "); + } + sb.append(this.labels.get(i)); + } + sb.append("]"); + return sb.toString(); + } +} \ No newline at end of file diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java new file mode 100644 index 0000000..dca1ead --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class CellVisibility { + + private String expression; + + public CellVisibility(String expression) { + this.expression = expression; + } + + public byte[] toBytes() { + return Bytes.toBytes(this.expression); + } + + public String getExpression() { + return this.expression; + } +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java new file mode 100644 index 0000000..126c3c8 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.DoNotRetryIOException; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class InvalidLabelException extends DoNotRetryIOException { + private static final long serialVersionUID = 1L; + + public InvalidLabelException(String msg) { + super(msg); + } +} + diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java new file mode 100644 index 0000000..fd07f75 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.DoNotRetryIOException; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class LabelAlreadyExistsException extends DoNotRetryIOException { + private static final long serialVersionUID = 1L; + + public LabelAlreadyExistsException(String msg) { + super(msg); + } + +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java new file mode 100644 index 0000000..cfdd360 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -0,0 +1,103 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.ByteString; + +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class VisibilityClient { + + /** + * Utility method for adding labels to the system. + * + * @param conf + * @param label + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse addLabel(Configuration conf, final String label) + throws Throwable { + VisibilityLabelsResponse addLabels = addLabels(conf, new String[] { label }); + return addLabels; + } + + /** + * Utility method for adding labels to the system. + * + * @param conf + * @param labels + * @return VisibilityLabelsResponse + * @throws Throwable + */ + public static VisibilityLabelsResponse addLabels(Configuration conf, final String[] labels) + throws Throwable { + HTable ht = null; + try { + ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + Batch.Call callable = + new Batch.Call() { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException { + Builder builder = VisibilityLabelsRequest.newBuilder(); + for (String label : labels) { + if (label.length() > 0) { + VisibilityLabel.Builder newBuilder = VisibilityLabel.newBuilder(); + newBuilder.setLabel(ByteString.copyFrom(Bytes.toBytes(label))); + builder.addVisLabel(newBuilder.build()); + } + } + service.addLabels(controller, builder.build(), rpcCallback); + return rpcCallback.get(); + } + }; + Map result = ht.coprocessorService( + VisibilityLabelsService.class, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, + callable); + return result.values().iterator().next(); // There will be exactly one region for labels + // table and so one entry in result Map. + } finally { + if (ht != null) { + ht.close(); + } + } + } +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java new file mode 100644 index 0000000..9be39aa --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Private +public final class VisibilityConstants { + + /** + * The string that is used as key in setting the Operation attributes for visibility labels + */ + public static final String VISIBILITY_LABELS_ATTR_KEY = "VISIBILITY"; + + /** Internal storage table for visibility labels */ + public static final TableName LABELS_TABLE_NAME = TableName.valueOf( + NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "labels"); + + /** Family for the internal storage table for visibility labels */ + public static final byte[] LABELS_TABLE_FAMILY = Bytes.toBytes("l"); + + /** Qualifier for the internal storage table for visibility labels */ + public static final byte[] LABELS_TABLE_QUALIFIER = Bytes.toBytes("q"); +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java new file mode 100644 index 0000000..bcc5df3 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; + +/** + * A simple validator that validates the labels passed + */ +@InterfaceAudience.Private +public class VisibilityLabelsValidator { + private static final boolean[] validAuthChars = new boolean[256]; + + static { + for (int i = 0; i < 256; i++) { + validAuthChars[i] = false; + } + + for (int i = 'a'; i <= 'z'; i++) { + validAuthChars[i] = true; + } + + for (int i = 'A'; i <= 'Z'; i++) { + validAuthChars[i] = true; + } + + for (int i = '0'; i <= '9'; i++) { + validAuthChars[i] = true; + } + + validAuthChars['_'] = true; + validAuthChars['-'] = true; + validAuthChars[':'] = true; + validAuthChars['.'] = true; + validAuthChars['/'] = true; + } + + static final boolean isValidAuthChar(byte b) { + return validAuthChars[0xff & b]; + } + + static final boolean isValidLabel(byte[] label) { + for (int i = 0; i < label.length; i++) { + if (!isValidAuthChar(label[i])) { + return false; + } + } + return true; + } +} diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/security/visibility/TestAuthorizations.java hbase-client/src/test/java/org/apache/hadoop/hbase/security/visibility/TestAuthorizations.java new file mode 100644 index 0000000..f107fec --- /dev/null +++ hbase-client/src/test/java/org/apache/hadoop/hbase/security/visibility/TestAuthorizations.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.List; + +import org.apache.hadoop.hbase.SmallTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestAuthorizations { + + @Test + public void testVisibilityLabels() throws Exception { + Authorizations authorizations = new Authorizations("secret", "private", "public", "confidential"); + byte[] authBytes = authorizations.toBytes(); + Authorizations authorizations2 = Authorizations.fromBytes(authBytes); + List labels = authorizations2.getLabels(); + assertEquals(4, labels.size()); + assertTrue(labels.contains("secret")); + assertTrue(labels.contains("private")); + assertTrue(labels.contains("public")); + assertTrue(labels.contains("confidential")); + } + +} diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java new file mode 100644 index 0000000..34a0fbf --- /dev/null +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java @@ -0,0 +1,2289 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: VisibilityLabels.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class VisibilityLabelsProtos { + private VisibilityLabelsProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface VisibilityLabelsRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .VisibilityLabel visLabel = 1; + /** + * repeated .VisibilityLabel visLabel = 1; + */ + java.util.List + getVisLabelList(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + int getVisLabelCount(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + java.util.List + getVisLabelOrBuilderList(); + /** + * repeated .VisibilityLabel visLabel = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index); + } + /** + * Protobuf type {@code VisibilityLabelsRequest} + */ + public static final class VisibilityLabelsRequest extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelsRequestOrBuilder { + // Use VisibilityLabelsRequest.newBuilder() to construct. + private VisibilityLabelsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabelsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabelsRequest defaultInstance; + public static VisibilityLabelsRequest getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabelsRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabelsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + visLabel_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = java.util.Collections.unmodifiableList(visLabel_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabelsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabelsRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .VisibilityLabel visLabel = 1; + public static final int VISLABEL_FIELD_NUMBER = 1; + private java.util.List visLabel_; + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List getVisLabelList() { + return visLabel_; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelOrBuilderList() { + return visLabel_; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public int getVisLabelCount() { + return visLabel_.size(); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index) { + return visLabel_.get(index); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index) { + return visLabel_.get(index); + } + + private void initFields() { + visLabel_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getVisLabelCount(); i++) { + if (!getVisLabel(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < visLabel_.size(); i++) { + output.writeMessage(1, visLabel_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < visLabel_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, visLabel_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) obj; + + boolean result = true; + result = result && getVisLabelList() + .equals(other.getVisLabelList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getVisLabelCount() > 0) { + hash = (37 * hash) + VISLABEL_FIELD_NUMBER; + hash = (53 * hash) + getVisLabelList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabelsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getVisLabelFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (visLabelBuilder_ == null) { + visLabel_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + visLabelBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest(this); + int from_bitField0_ = bitField0_; + if (visLabelBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = java.util.Collections.unmodifiableList(visLabel_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.visLabel_ = visLabel_; + } else { + result.visLabel_ = visLabelBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance()) return this; + if (visLabelBuilder_ == null) { + if (!other.visLabel_.isEmpty()) { + if (visLabel_.isEmpty()) { + visLabel_ = other.visLabel_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureVisLabelIsMutable(); + visLabel_.addAll(other.visLabel_); + } + onChanged(); + } + } else { + if (!other.visLabel_.isEmpty()) { + if (visLabelBuilder_.isEmpty()) { + visLabelBuilder_.dispose(); + visLabelBuilder_ = null; + visLabel_ = other.visLabel_; + bitField0_ = (bitField0_ & ~0x00000001); + visLabelBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getVisLabelFieldBuilder() : null; + } else { + visLabelBuilder_.addAllMessages(other.visLabel_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getVisLabelCount(); i++) { + if (!getVisLabel(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .VisibilityLabel visLabel = 1; + private java.util.List visLabel_ = + java.util.Collections.emptyList(); + private void ensureVisLabelIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + visLabel_ = new java.util.ArrayList(visLabel_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder> visLabelBuilder_; + + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List getVisLabelList() { + if (visLabelBuilder_ == null) { + return java.util.Collections.unmodifiableList(visLabel_); + } else { + return visLabelBuilder_.getMessageList(); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public int getVisLabelCount() { + if (visLabelBuilder_ == null) { + return visLabel_.size(); + } else { + return visLabelBuilder_.getCount(); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getVisLabel(int index) { + if (visLabelBuilder_ == null) { + return visLabel_.get(index); + } else { + return visLabelBuilder_.getMessage(index); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder setVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.set(index, value); + onChanged(); + } else { + visLabelBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder setVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.set(index, builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.add(value); + onChanged(); + } else { + visLabelBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel value) { + if (visLabelBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureVisLabelIsMutable(); + visLabel_.add(index, value); + onChanged(); + } else { + visLabelBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.add(builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addVisLabel( + int index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder builderForValue) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.add(index, builderForValue.build()); + onChanged(); + } else { + visLabelBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder addAllVisLabel( + java.lang.Iterable values) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + super.addAll(values, visLabel_); + onChanged(); + } else { + visLabelBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder clearVisLabel() { + if (visLabelBuilder_ == null) { + visLabel_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + visLabelBuilder_.clear(); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public Builder removeVisLabel(int index) { + if (visLabelBuilder_ == null) { + ensureVisLabelIsMutable(); + visLabel_.remove(index); + onChanged(); + } else { + visLabelBuilder_.remove(index); + } + return this; + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder getVisLabelBuilder( + int index) { + return getVisLabelFieldBuilder().getBuilder(index); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder getVisLabelOrBuilder( + int index) { + if (visLabelBuilder_ == null) { + return visLabel_.get(index); } else { + return visLabelBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelOrBuilderList() { + if (visLabelBuilder_ != null) { + return visLabelBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(visLabel_); + } + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder addVisLabelBuilder() { + return getVisLabelFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder addVisLabelBuilder( + int index) { + return getVisLabelFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()); + } + /** + * repeated .VisibilityLabel visLabel = 1; + */ + public java.util.List + getVisLabelBuilderList() { + return getVisLabelFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder> + getVisLabelFieldBuilder() { + if (visLabelBuilder_ == null) { + visLabelBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder>( + visLabel_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + visLabel_ = null; + } + return visLabelBuilder_; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabelsRequest) + } + + static { + defaultInstance = new VisibilityLabelsRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsRequest) + } + + public interface VisibilityLabelOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes label = 1; + /** + * required bytes label = 1; + */ + boolean hasLabel(); + /** + * required bytes label = 1; + */ + com.google.protobuf.ByteString getLabel(); + + // optional uint32 ordinal = 2; + /** + * optional uint32 ordinal = 2; + */ + boolean hasOrdinal(); + /** + * optional uint32 ordinal = 2; + */ + int getOrdinal(); + } + /** + * Protobuf type {@code VisibilityLabel} + */ + public static final class VisibilityLabel extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelOrBuilder { + // Use VisibilityLabel.newBuilder() to construct. + private VisibilityLabel(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabel(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabel defaultInstance; + public static VisibilityLabel getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabel getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabel( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + label_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ordinal_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabel parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabel(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes label = 1; + public static final int LABEL_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString label_; + /** + * required bytes label = 1; + */ + public boolean hasLabel() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes label = 1; + */ + public com.google.protobuf.ByteString getLabel() { + return label_; + } + + // optional uint32 ordinal = 2; + public static final int ORDINAL_FIELD_NUMBER = 2; + private int ordinal_; + /** + * optional uint32 ordinal = 2; + */ + public boolean hasOrdinal() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint32 ordinal = 2; + */ + public int getOrdinal() { + return ordinal_; + } + + private void initFields() { + label_ = com.google.protobuf.ByteString.EMPTY; + ordinal_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLabel()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, label_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, ordinal_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, label_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, ordinal_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) obj; + + boolean result = true; + result = result && (hasLabel() == other.hasLabel()); + if (hasLabel()) { + result = result && getLabel() + .equals(other.getLabel()); + } + result = result && (hasOrdinal() == other.hasOrdinal()); + if (hasOrdinal()) { + result = result && (getOrdinal() + == other.getOrdinal()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLabel()) { + hash = (37 * hash) + LABEL_FIELD_NUMBER; + hash = (53 * hash) + getLabel().hashCode(); + } + if (hasOrdinal()) { + hash = (37 * hash) + ORDINAL_FIELD_NUMBER; + hash = (53 * hash) + getOrdinal(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabel} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + label_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + ordinal_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabel_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.label_ = label_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ordinal_ = ordinal_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel.getDefaultInstance()) return this; + if (other.hasLabel()) { + setLabel(other.getLabel()); + } + if (other.hasOrdinal()) { + setOrdinal(other.getOrdinal()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLabel()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes label = 1; + private com.google.protobuf.ByteString label_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes label = 1; + */ + public boolean hasLabel() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes label = 1; + */ + public com.google.protobuf.ByteString getLabel() { + return label_; + } + /** + * required bytes label = 1; + */ + public Builder setLabel(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + label_ = value; + onChanged(); + return this; + } + /** + * required bytes label = 1; + */ + public Builder clearLabel() { + bitField0_ = (bitField0_ & ~0x00000001); + label_ = getDefaultInstance().getLabel(); + onChanged(); + return this; + } + + // optional uint32 ordinal = 2; + private int ordinal_ ; + /** + * optional uint32 ordinal = 2; + */ + public boolean hasOrdinal() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint32 ordinal = 2; + */ + public int getOrdinal() { + return ordinal_; + } + /** + * optional uint32 ordinal = 2; + */ + public Builder setOrdinal(int value) { + bitField0_ |= 0x00000002; + ordinal_ = value; + onChanged(); + return this; + } + /** + * optional uint32 ordinal = 2; + */ + public Builder clearOrdinal() { + bitField0_ = (bitField0_ & ~0x00000002); + ordinal_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabel) + } + + static { + defaultInstance = new VisibilityLabel(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabel) + } + + public interface VisibilityLabelsResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionActionResult result = 1; + /** + * repeated .RegionActionResult result = 1; + */ + java.util.List + getResultList(); + /** + * repeated .RegionActionResult result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index); + /** + * repeated .RegionActionResult result = 1; + */ + int getResultCount(); + /** + * repeated .RegionActionResult result = 1; + */ + java.util.List + getResultOrBuilderList(); + /** + * repeated .RegionActionResult result = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index); + } + /** + * Protobuf type {@code VisibilityLabelsResponse} + */ + public static final class VisibilityLabelsResponse extends + com.google.protobuf.GeneratedMessage + implements VisibilityLabelsResponseOrBuilder { + // Use VisibilityLabelsResponse.newBuilder() to construct. + private VisibilityLabelsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private VisibilityLabelsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final VisibilityLabelsResponse defaultInstance; + public static VisibilityLabelsResponse getDefaultInstance() { + return defaultInstance; + } + + public VisibilityLabelsResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private VisibilityLabelsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + result_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public VisibilityLabelsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new VisibilityLabelsResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .RegionActionResult result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List getResultList() { + return result_; + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultOrBuilderList() { + return result_; + } + /** + * repeated .RegionActionResult result = 1; + */ + public int getResultCount() { + return result_.size(); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index) { + return result_.get(index); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse other = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code VisibilityLabelsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.internal_static_VisibilityLabelsResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse build() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse result = new org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse(this); + int from_bitField0_ = bitField0_; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .RegionActionResult result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> resultBuilder_; + + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); + } + /** + * repeated .RegionActionResult result = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()); + } + /** + * repeated .RegionActionResult result = 1; + */ + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // @@protoc_insertion_point(builder_scope:VisibilityLabelsResponse) + } + + static { + defaultInstance = new VisibilityLabelsResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsResponse) + } + + /** + * Protobuf service {@code VisibilityLabelsService} + */ + public static abstract class VisibilityLabelsService + implements com.google.protobuf.Service { + protected VisibilityLabelsService() {} + + public interface Interface { + /** + * rpc addLabels(.VisibilityLabelsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new VisibilityLabelsService() { + @java.lang.Override + public void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done) { + impl.addLabels(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.addLabels(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc addLabels(.VisibilityLabelsRequest) returns (.VisibilityLabelsResponse); + */ + public abstract void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.addLabels(controller, (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.class, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse addLabels( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:VisibilityLabelsService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabelsRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabelsRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabel_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabel_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_VisibilityLabelsResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_VisibilityLabelsResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\026VisibilityLabels.proto\032\014Client.proto\"=" + + "\n\027VisibilityLabelsRequest\022\"\n\010visLabel\030\001 " + + "\003(\0132\020.VisibilityLabel\"1\n\017VisibilityLabel" + + "\022\r\n\005label\030\001 \002(\014\022\017\n\007ordinal\030\002 \001(\r\"?\n\030Visi" + + "bilityLabelsResponse\022#\n\006result\030\001 \003(\0132\023.R" + + "egionActionResult2[\n\027VisibilityLabelsSer" + + "vice\022@\n\taddLabels\022\030.VisibilityLabelsRequ" + + "est\032\031.VisibilityLabelsResponseBL\n*org.ap" + + "ache.hadoop.hbase.protobuf.generatedB\026Vi" + + "sibilityLabelsProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_VisibilityLabelsRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_VisibilityLabelsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabelsRequest_descriptor, + new java.lang.String[] { "VisLabel", }); + internal_static_VisibilityLabel_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_VisibilityLabel_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabel_descriptor, + new java.lang.String[] { "Label", "Ordinal", }); + internal_static_VisibilityLabelsResponse_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_VisibilityLabelsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_VisibilityLabelsResponse_descriptor, + new java.lang.String[] { "Result", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-protocol/src/main/protobuf/VisibilityLabels.proto hbase-protocol/src/main/protobuf/VisibilityLabels.proto new file mode 100644 index 0000000..269acb8 --- /dev/null +++ hbase-protocol/src/main/protobuf/VisibilityLabels.proto @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "VisibilityLabelsProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "Client.proto"; + +message VisibilityLabelsRequest { + repeated VisibilityLabel visLabel = 1; +} + +message VisibilityLabel { + required bytes label = 1; + optional uint32 ordinal = 2; +} + +message VisibilityLabelsResponse { + repeated RegionActionResult result = 1; +} + +service VisibilityLabelsService { + rpc addLabels(VisibilityLabelsRequest) + returns (VisibilityLabelsResponse); +} \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java index 53d6e78..4093c05 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java @@ -21,7 +21,9 @@ package org.apache.hadoop.hbase.rest; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; +import java.util.ArrayList; import java.util.Collection; +import java.util.List; import java.util.TreeSet; import org.apache.hadoop.classification.InterfaceAudience; @@ -43,6 +45,7 @@ public class RowSpec { private byte[] endRow = null; private TreeSet columns = new TreeSet(Bytes.BYTES_COMPARATOR); + private List labels = new ArrayList(); private long startTime = DEFAULT_START_TIMESTAMP; private long endTime = DEFAULT_END_TIMESTAMP; private int maxVersions = 1; @@ -277,6 +280,13 @@ public class RowSpec { } public RowSpec(byte[] startRow, byte[] endRow, Collection columns, + long startTime, long endTime, int maxVersions, Collection labels) { + this(startRow, endRow, columns, startTime, endTime, maxVersions); + if(labels != null) { + this.labels.addAll(labels); + } + } + public RowSpec(byte[] startRow, byte[] endRow, Collection columns, long startTime, long endTime, int maxVersions) { this.row = startRow; this.endRow = endRow; @@ -311,6 +321,10 @@ public class RowSpec { public boolean hasColumns() { return !columns.isEmpty(); } + + public boolean hasLabels() { + return !labels.isEmpty(); + } public byte[] getRow() { return row; @@ -335,6 +349,10 @@ public class RowSpec { public byte[][] getColumns() { return columns.toArray(new byte[columns.size()][]); } + + public List getLabels() { + return labels; + } public boolean hasTimestamp() { return (startTime == 0) && (endTime != Long.MAX_VALUE); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java index ae91281..81be6fc 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java @@ -82,9 +82,14 @@ public class ScannerResource extends ResourceBase { .build(); } byte[] endRow = model.hasEndRow() ? model.getEndRow() : null; - RowSpec spec = new RowSpec(model.getStartRow(), endRow, - model.getColumns(), model.getStartTime(), model.getEndTime(), - model.getMaxVersions()); + RowSpec spec = null; + if (model.getLabels() != null) { + spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(), + model.getEndTime(), model.getMaxVersions(), model.getLabels()); + } else { + spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(), + model.getEndTime(), model.getMaxVersions()); + } try { Filter filter = ScannerResultGenerator.buildFilterFromModel(model); String tableName = tableResource.getName(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java index 09fb7d4..aeefb0e 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.rest.model.ScannerModel; +import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private @@ -95,6 +96,9 @@ public class ScannerResultGenerator extends ResultGenerator { if (caching > 0 ) { scan.setCaching(caching); } + if(rowspec.hasLabels()) { + scan.setAuthorizations(new Authorizations(rowspec.getLabels())); + } scanner = table.getScanner(scan); cached = null; id = Long.toString(System.currentTimeMillis()) + diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index 001a64c..17d7b90 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -35,15 +35,46 @@ import javax.xml.bind.annotation.XmlRootElement; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.filter.*; +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; +import org.apache.hadoop.hbase.filter.BitComparator; +import org.apache.hadoop.hbase.filter.ByteArrayComparable; +import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; +import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; +import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.ColumnRangeFilter; +import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.DependentColumnFilter; +import org.apache.hadoop.hbase.filter.FamilyFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; +import org.apache.hadoop.hbase.filter.InclusiveStopFilter; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.NullComparator; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.RandomRowFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; +import org.apache.hadoop.hbase.filter.SkipFilter; +import org.apache.hadoop.hbase.filter.SubstringComparator; +import org.apache.hadoop.hbase.filter.TimestampsFilter; +import org.apache.hadoop.hbase.filter.ValueFilter; +import org.apache.hadoop.hbase.filter.WhileMatchFilter; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import com.google.protobuf.ByteString; - import com.sun.jersey.api.json.JSONConfiguration; import com.sun.jersey.api.json.JSONJAXBContext; import com.sun.jersey.api.json.JSONMarshaller; @@ -83,6 +114,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { private String filter = null; private int maxVersions = Integer.MAX_VALUE; private int caching = -1; + private List labels = new ArrayList(); @XmlRootElement static class FilterModel { @@ -488,6 +520,15 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { if (filter != null) { model.setFilter(stringifyFilter(filter)); } + // Add the visbility labels if found in the attributes + if(scan.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY) != null) { + byte[] b = scan.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + Authorizations authorizations = Authorizations.fromBytes(b); + List labels = authorizations.getLabels(); + for(String label : labels) { + model.addLabel(label); + } + } return model; } @@ -556,6 +597,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { } /** + * Add a visibility label to the scan + */ + public void addLabel(String label) { + labels.add(label); + } + + /** * @return true if a start row was specified */ public boolean hasStartRow() { @@ -593,6 +641,11 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { return columns; } + @XmlElement(name="label") + public List getLabels() { + return labels; + } + /** * @return the number of cells to return in batch */ @@ -730,6 +783,10 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { if (filter != null) { builder.setFilter(filter); } + if (labels != null && labels.size() > 0) { + for (String label : labels) + builder.addLabels(label); + } return builder.build().toByteArray(); } @@ -765,6 +822,12 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { if (builder.hasFilter()) { filter = builder.getFilter(); } + if(builder.getLabelsList() != null) { + List labels = builder.getLabelsList(); + for(String label : labels) { + addLabel(label); + } + } return this; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java index 493463a..b76e4e0 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java @@ -109,6 +109,26 @@ public final class ScannerMessage { * optional int32 caching = 9; */ int getCaching(); + + // repeated string labels = 10; + /** + * repeated string labels = 10; + */ + java.util.List + getLabelsList(); + /** + * repeated string labels = 10; + */ + int getLabelsCount(); + /** + * repeated string labels = 10; + */ + java.lang.String getLabels(int index); + /** + * repeated string labels = 10; + */ + com.google.protobuf.ByteString + getLabelsBytes(int index); } /** * Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Scanner} @@ -209,6 +229,14 @@ public final class ScannerMessage { caching_ = input.readInt32(); break; } + case 82: { + if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000200; + } + labels_.add(input.readBytes()); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -220,6 +248,9 @@ public final class ScannerMessage { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { columns_ = java.util.Collections.unmodifiableList(columns_); } + if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.UnmodifiableLazyStringList(labels_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } @@ -430,6 +461,36 @@ public final class ScannerMessage { return caching_; } + // repeated string labels = 10; + public static final int LABELS_FIELD_NUMBER = 10; + private com.google.protobuf.LazyStringList labels_; + /** + * repeated string labels = 10; + */ + public java.util.List + getLabelsList() { + return labels_; + } + /** + * repeated string labels = 10; + */ + public int getLabelsCount() { + return labels_.size(); + } + /** + * repeated string labels = 10; + */ + public java.lang.String getLabels(int index) { + return labels_.get(index); + } + /** + * repeated string labels = 10; + */ + public com.google.protobuf.ByteString + getLabelsBytes(int index) { + return labels_.getByteString(index); + } + private void initFields() { startRow_ = com.google.protobuf.ByteString.EMPTY; endRow_ = com.google.protobuf.ByteString.EMPTY; @@ -440,6 +501,7 @@ public final class ScannerMessage { maxVersions_ = 0; filter_ = ""; caching_ = 0; + labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -480,6 +542,9 @@ public final class ScannerMessage { if (((bitField0_ & 0x00000080) == 0x00000080)) { output.writeInt32(9, caching_); } + for (int i = 0; i < labels_.size(); i++) { + output.writeBytes(10, labels_.getByteString(i)); + } getUnknownFields().writeTo(output); } @@ -530,6 +595,15 @@ public final class ScannerMessage { size += com.google.protobuf.CodedOutputStream .computeInt32Size(9, caching_); } + { + int dataSize = 0; + for (int i = 0; i < labels_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(labels_.getByteString(i)); + } + size += dataSize; + size += 1 * getLabelsList().size(); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -664,6 +738,8 @@ public final class ScannerMessage { bitField0_ = (bitField0_ & ~0x00000080); caching_ = 0; bitField0_ = (bitField0_ & ~0x00000100); + labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000200); return this; } @@ -729,6 +805,12 @@ public final class ScannerMessage { to_bitField0_ |= 0x00000080; } result.caching_ = caching_; + if (((bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.UnmodifiableLazyStringList( + labels_); + bitField0_ = (bitField0_ & ~0x00000200); + } + result.labels_ = labels_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -781,6 +863,16 @@ public final class ScannerMessage { if (other.hasCaching()) { setCaching(other.getCaching()); } + if (!other.labels_.isEmpty()) { + if (labels_.isEmpty()) { + labels_ = other.labels_; + bitField0_ = (bitField0_ & ~0x00000200); + } else { + ensureLabelsIsMutable(); + labels_.addAll(other.labels_); + } + onChanged(); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -1113,7 +1205,7 @@ public final class ScannerMessage { getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { - com.google.protobuf.ByteString b = + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); filter_ = b; @@ -1191,6 +1283,99 @@ public final class ScannerMessage { return this; } + // repeated string labels = 10; + private com.google.protobuf.LazyStringList labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureLabelsIsMutable() { + if (!((bitField0_ & 0x00000200) == 0x00000200)) { + labels_ = new com.google.protobuf.LazyStringArrayList(labels_); + bitField0_ |= 0x00000200; + } + } + /** + * repeated string labels = 10; + */ + public java.util.List + getLabelsList() { + return java.util.Collections.unmodifiableList(labels_); + } + /** + * repeated string labels = 10; + */ + public int getLabelsCount() { + return labels_.size(); + } + /** + * repeated string labels = 10; + */ + public java.lang.String getLabels(int index) { + return labels_.get(index); + } + /** + * repeated string labels = 10; + */ + public com.google.protobuf.ByteString + getLabelsBytes(int index) { + return labels_.getByteString(index); + } + /** + * repeated string labels = 10; + */ + public Builder setLabels( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelsIsMutable(); + labels_.set(index, value); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder addLabels( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelsIsMutable(); + labels_.add(value); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder addAllLabels( + java.lang.Iterable values) { + ensureLabelsIsMutable(); + super.addAll(values, labels_); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder clearLabels() { + labels_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000200); + onChanged(); + return this; + } + /** + * repeated string labels = 10; + */ + public Builder addLabelsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureLabelsIsMutable(); + labels_.add(value); + onChanged(); + return this; + } + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.Scanner) } @@ -1217,11 +1402,12 @@ public final class ScannerMessage { static { java.lang.String[] descriptorData = { "\n\024ScannerMessage.proto\022/org.apache.hadoo" + - "p.hbase.rest.protobuf.generated\"\245\001\n\007Scan" + + "p.hbase.rest.protobuf.generated\"\265\001\n\007Scan" + "ner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(\014\022\017\n" + "\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tstartTi" + "me\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003\022\023\n\013maxVersions" + - "\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t\022\017\n\007caching\030\t \001(\005" + "\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t\022\017\n\007caching\030\t \001(\005\022" + + "\016\n\006labels\030\n \003(\t" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -1233,7 +1419,7 @@ public final class ScannerMessage { internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor, - new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", "Caching", }); + new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", "Caching", "Labels", }); return null; } }; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java new file mode 100644 index 0000000..16b3af9 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultScanLabelGenerator.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.security.User; + +/** + * This default implementation for ScanLabelGenerator. It will just extract labels passed via + * Scan#OPAttributes. + */ +@InterfaceAudience.Private +public class DefaultScanLabelGenerator implements ScanLabelGenerator { + + private Configuration conf; + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return this.conf; + } + + @Override + public List getLabels(User user, OperationWithAttributes op) { + byte[] authorizations = op.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY); + if (authorizations != null && authorizations.length > 0) { + return Authorizations.fromBytes(authorizations).getLabels(); + } + return null; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java new file mode 100644 index 0000000..445b12f --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionExpander.java @@ -0,0 +1,184 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; + +@InterfaceAudience.Private +public class ExpressionExpander { + + public ExpressionNode expand(ExpressionNode src) { + if (!src.isSingleNode()) { + NonLeafExpressionNode nlExp = (NonLeafExpressionNode) src; + List childExps = nlExp.getChildExps(); + Operator outerOp = nlExp.getOperator(); + if (isToBeExpanded(childExps)) { + // Any of the child exp is a non leaf exp with & or | operator + NonLeafExpressionNode newNode = new NonLeafExpressionNode(nlExp.getOperator()); + for (ExpressionNode exp : childExps) { + if (exp.isSingleNode()) { + newNode.addChildExp(exp); + } else { + newNode.addChildExp(expand(exp)); + } + } + nlExp = expandNonLeaf(newNode, outerOp); + } + return nlExp; + } + if (src instanceof NonLeafExpressionNode + && ((NonLeafExpressionNode) src).getOperator() == Operator.NOT) { + // Negate the exp + return negate((NonLeafExpressionNode) src); + } + return src; + } + + private ExpressionNode negate(NonLeafExpressionNode nlExp) { + ExpressionNode notChild = nlExp.getChildExps().get(0); + if (notChild instanceof LeafExpressionNode) { + return nlExp; + } + NonLeafExpressionNode nlNotChild = (NonLeafExpressionNode) notChild; + if (nlNotChild.getOperator() == Operator.NOT) { + // negate the negate + return nlNotChild.getChildExps().get(0); + } + Operator negateOp = nlNotChild.getOperator() == Operator.AND ? Operator.OR : Operator.AND; + NonLeafExpressionNode newNode = new NonLeafExpressionNode(negateOp); + for (ExpressionNode expNode : nlNotChild.getChildExps()) { + NonLeafExpressionNode negateNode = new NonLeafExpressionNode(Operator.NOT); + negateNode.addChildExp(expNode.deepClone()); + newNode.addChildExp(expand(negateNode)); + } + return newNode; + } + + private boolean isToBeExpanded(List childExps) { + for (ExpressionNode exp : childExps) { + if (!exp.isSingleNode()) { + return true; + } + } + return false; + } + + private NonLeafExpressionNode expandNonLeaf(NonLeafExpressionNode newNode, Operator outerOp) { + // Now go for the merge or expansion across brackets + List newChildExps = newNode.getChildExps(); + assert newChildExps.size() == 2; + ExpressionNode leftChild = newChildExps.get(0); + ExpressionNode rightChild = newChildExps.get(1); + if (rightChild.isSingleNode()) { + // Merge the single right node into the left side + assert leftChild instanceof NonLeafExpressionNode; + newNode = mergeChildNodes(newNode, outerOp, rightChild, (NonLeafExpressionNode) leftChild); + } else if (leftChild.isSingleNode()) { + // Merge the single left node into the right side + assert rightChild instanceof NonLeafExpressionNode; + newNode = mergeChildNodes(newNode, outerOp, leftChild, (NonLeafExpressionNode) rightChild); + } else { + // Both the child exp nodes are non single. + NonLeafExpressionNode leftChildNLE = (NonLeafExpressionNode) leftChild; + NonLeafExpressionNode rightChildNLE = (NonLeafExpressionNode) rightChild; + if (outerOp == leftChildNLE.getOperator() && outerOp == rightChildNLE.getOperator()) { + // Merge + NonLeafExpressionNode leftChildNLEClone = leftChildNLE.deepClone(); + leftChildNLEClone.addChildExps(rightChildNLE.getChildExps()); + newNode = leftChildNLEClone; + } else { + // (a | b) & (c & d) ... + if (outerOp == Operator.OR) { + // (a | b) | (c & d) + if (leftChildNLE.getOperator() == Operator.OR + && rightChildNLE.getOperator() == Operator.AND) { + leftChildNLE.addChildExp(rightChildNLE); + newNode = leftChildNLE; + } else if (leftChildNLE.getOperator() == Operator.AND + && rightChildNLE.getOperator() == Operator.OR) { + // (a & b) | (c | d) + rightChildNLE.addChildExp(leftChildNLE); + newNode = rightChildNLE; + } + // (a & b) | (c & d) + // This case no need to do any thing + } else { + // outer op is & + // (a | b) & (c & d) => (a & c & d) | (b & c & d) + if (leftChildNLE.getOperator() == Operator.OR + && rightChildNLE.getOperator() == Operator.AND) { + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : leftChildNLE.getChildExps()) { + NonLeafExpressionNode rightChildNLEClone = rightChildNLE.deepClone(); + rightChildNLEClone.addChildExp(exp); + newNode.addChildExp(rightChildNLEClone); + } + } else if (leftChildNLE.getOperator() == Operator.AND + && rightChildNLE.getOperator() == Operator.OR) { + // (a & b) & (c | d) => (a & b & c) | (a & b & d) + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : rightChildNLE.getChildExps()) { + NonLeafExpressionNode leftChildNLEClone = leftChildNLE.deepClone(); + leftChildNLEClone.addChildExp(exp); + newNode.addChildExp(leftChildNLEClone); + } + } else { + // (a | b) & (c | d) => (a & c) | (a & d) | (b & c) | (b & d) + newNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode leftExp : leftChildNLE.getChildExps()) { + for (ExpressionNode rightExp : rightChildNLE.getChildExps()) { + NonLeafExpressionNode newChild = new NonLeafExpressionNode(Operator.AND); + newChild.addChildExp(leftExp.deepClone()); + newChild.addChildExp(rightExp.deepClone()); + newNode.addChildExp(newChild); + } + } + } + } + } + } + return newNode; + } + + private NonLeafExpressionNode mergeChildNodes(NonLeafExpressionNode newOuterNode, + Operator outerOp, ExpressionNode lChild, NonLeafExpressionNode nlChild) { + // Merge the single right/left node into the other side + if (nlChild.getOperator() == outerOp) { + NonLeafExpressionNode leftChildNLEClone = nlChild.deepClone(); + leftChildNLEClone.addChildExp(lChild); + newOuterNode = leftChildNLEClone; + } else if (outerOp == Operator.AND) { + assert nlChild.getOperator() == Operator.OR; + // outerOp is & here. We need to expand the node here + // (a | b) & c -> (a & c) | (b & c) + // OR + // c & (a | b) -> (c & a) | (c & b) + newOuterNode = new NonLeafExpressionNode(Operator.OR); + for (ExpressionNode exp : nlChild.getChildExps()) { + newOuterNode.addChildExp(new NonLeafExpressionNode(Operator.AND, exp, lChild)); + } + } + return newOuterNode; + } +} \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java new file mode 100644 index 0000000..f6ddf75 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java @@ -0,0 +1,273 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.Stack; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.apache.hadoop.hbase.util.Bytes; + +@InterfaceAudience.Private +public class ExpressionParser { + + private static final char CLOSE_PARAN = ')'; + private static final char OPEN_PARAN = '('; + private static final char OR = '|'; + private static final char AND = '&'; + private static final char NOT = '!'; + private static final char SPACE = ' '; + + public ExpressionNode parse(String expS) throws ParseException { + expS = expS.trim(); + Stack expStack = new Stack(); + int index = 0; + int endPos = expS.length(); + byte[] exp = Bytes.toBytes(expS); + while (index < endPos) { + byte b = exp[index]; + switch (b) { + case OPEN_PARAN: + processOpenParan(expStack, expS, index); + index = skipSpaces(exp, index); + break; + case CLOSE_PARAN: + processCloseParan(expStack, expS, index); + index = skipSpaces(exp, index); + break; + case AND: + case OR: + processANDorOROp(getOperator(b), expStack, expS, index); + index = skipSpaces(exp, index); + break; + case NOT: + processNOTOp(expStack, expS, index); + break; + default: + int labelOffset = index; + do { + if (!VisibilityLabelsValidator.isValidAuthChar(exp[index])) { + throw new ParseException("Error parsing expression " + expS + " at column : " + + index); + } + index++; + } while (index < endPos && !isEndOfLabel(exp[index])); + String leafExp = new String(exp, labelOffset, index - labelOffset).trim(); + if (leafExp.isEmpty()) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + processLabelExpNode(new LeafExpressionNode(leafExp), expStack, expS, index); + // We already crossed the label node index. So need to reduce 1 here. + index--; + index = skipSpaces(exp, index); + } + index++; + } + if (expStack.size() != 1) { + throw new ParseException("Error parsing expression " + expS); + } + ExpressionNode top = expStack.pop(); + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS); + } + if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if (nlTop.getOperator() == Operator.NOT) { + if (nlTop.getChildExps().size() != 1) { + throw new ParseException("Error parsing expression " + expS); + } + } else if (nlTop.getChildExps().size() != 2) { + throw new ParseException("Error parsing expression " + expS); + } + } + return top; + } + + private int skipSpaces(byte[] exp, int index) { + while (index < exp.length -1 && exp[index+1] == SPACE) { + index++; + } + return index; + } + + private void processCloseParan(Stack expStack, String expS, int index) + throws ParseException { + if (expStack.size() < 2) { + // When ) comes we expect atleast a ( node and another leaf/non leaf node + // in stack. + throw new ParseException(); + } else { + ExpressionNode top = expStack.pop(); + ExpressionNode secondTop = expStack.pop(); + // The second top must be a ( node and top should not be a ). Top can be + // any thing else + if (top == LeafExpressionNode.OPEN_PARAN_NODE + || secondTop != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + // a&(b|) is not valid. + // The top can be a ! node but with exactly child nodes. !).. is invalid + // Other NonLeafExpressionNode , then there should be exactly 2 child. + // (a&) is not valid. + if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if ((nlTop.getOperator() == Operator.NOT && nlTop.getChildExps().size() != 1) + || (nlTop.getOperator() != Operator.NOT && nlTop.getChildExps().size() != 2)) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + // When (a|b)&(c|d) comes while processing the second ) there will be + // already (a|b)& node + // avail in the stack. The top will be c|d node. We need to take it out + // and combine as one + // node. + if (!expStack.isEmpty()) { + ExpressionNode thirdTop = expStack.peek(); + if (thirdTop instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlThirdTop = (NonLeafExpressionNode) expStack.pop(); + nlThirdTop.addChildExp(top); + if (nlThirdTop.getOperator() == Operator.NOT) { + // It is a NOT node. So there may be a NonLeafExpressionNode below + // it to which the + // completed NOT can be added now. + if (!expStack.isEmpty()) { + ExpressionNode fourthTop = expStack.peek(); + if (fourthTop instanceof NonLeafExpressionNode) { + // Its Operator will be OR or AND + NonLeafExpressionNode nlFourthTop = (NonLeafExpressionNode) fourthTop; + assert nlFourthTop.getOperator() != Operator.NOT; + // Also for sure its number of children will be 1 + assert nlFourthTop.getChildExps().size() == 1; + nlFourthTop.addChildExp(nlThirdTop); + return;// This case no need to add back the nlThirdTop. + } + } + } + top = nlThirdTop; + } + } + expStack.push(top); + } + } + + private void processOpenParan(Stack expStack, String expS, int index) + throws ParseException { + if (!expStack.isEmpty()) { + ExpressionNode top = expStack.peek(); + // Top can not be a Label Node. a(.. is not valid. but ((a.. is fine. + if (top instanceof LeafExpressionNode && top != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } else if (top instanceof NonLeafExpressionNode) { + // Top is non leaf. + // It can be ! node but with out any child nodes. !a(.. is invalid + // Other NonLeafExpressionNode , then there should be exactly 1 child. + // a&b( is not valid. + // a&( is valid though. Also !( is valid + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if ((nlTop.getOperator() == Operator.NOT && nlTop.getChildExps().size() != 0) + || (nlTop.getOperator() != Operator.NOT && nlTop.getChildExps().size() != 1)) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + } + expStack.push(LeafExpressionNode.OPEN_PARAN_NODE); + } + + private void processLabelExpNode(LeafExpressionNode node, Stack expStack, + String expS, int index) throws ParseException { + if (expStack.isEmpty()) { + expStack.push(node); + } else { + ExpressionNode top = expStack.peek(); + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + expStack.push(node); + } else if (top instanceof NonLeafExpressionNode) { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) expStack.pop(); + nlTop.addChildExp(node); + if (nlTop.getOperator() == Operator.NOT && !expStack.isEmpty()) { + ExpressionNode secondTop = expStack.peek(); + if (secondTop == LeafExpressionNode.OPEN_PARAN_NODE) { + expStack.push(nlTop); + } else if (secondTop instanceof NonLeafExpressionNode) { + ((NonLeafExpressionNode) secondTop).addChildExp(nlTop); + } + } else { + expStack.push(nlTop); + } + } else { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + } + + private void processANDorOROp(Operator op, Stack expStack, String expS, int index) + throws ParseException { + if (expStack.isEmpty()) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + ExpressionNode top = expStack.pop(); + if (top.isSingleNode()) { + if (top == LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + expStack.push(new NonLeafExpressionNode(op, top)); + } else { + NonLeafExpressionNode nlTop = (NonLeafExpressionNode) top; + if (nlTop.getChildExps().size() != 2) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + expStack.push(new NonLeafExpressionNode(op, nlTop)); + } + } + + private void processNOTOp(Stack expStack, String expS, int index) + throws ParseException { + // When ! comes, the stack can be empty or top ( or top can be some exp like + // a& + // !!.., a!, a&b!, !a! are invalid + if (!expStack.isEmpty()) { + ExpressionNode top = expStack.peek(); + if (top.isSingleNode() && top != LeafExpressionNode.OPEN_PARAN_NODE) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + if (!top.isSingleNode() && ((NonLeafExpressionNode) top).getChildExps().size() != 1) { + throw new ParseException("Error parsing expression " + expS + " at column : " + index); + } + } + expStack.push(new NonLeafExpressionNode(Operator.NOT)); + } + + private static boolean isEndOfLabel(byte b) { + return (b == OPEN_PARAN || b == CLOSE_PARAN || b == OR || b == AND || b == NOT || b == SPACE); + } + + private static Operator getOperator(byte op) { + switch (op) { + case AND: + return Operator.AND; + case OR: + return Operator.OR; + case NOT: + return Operator.NOT; + } + return null; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java new file mode 100644 index 0000000..54a8c5b --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ParseException.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class ParseException extends Exception { + + private static final long serialVersionUID = 1725986524206989173L; + + public ParseException() { + + } + + public ParseException(String msg) { + super(msg); + } + + public ParseException(Throwable t) { + super(t); + } + + public ParseException(String msg, Throwable t) { + super(msg, t); + } + +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java new file mode 100644 index 0000000..7b54a1a --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.security.User; + +/** + * This would be the interface which would be used add labels to the RPC context + * and this would be stored against the UGI. + * + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public interface ScanLabelGenerator extends Configurable { + + /** + * Helps to get a list of lables associated with an UGI + * @param user + * @param op + * @return The labels + */ + public List getLabels(User user, OperationWithAttributes op); +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java new file mode 100644 index 0000000..e968178 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -0,0 +1,853 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SUCCESS; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_QUALIFIER; +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants.OperationStatusCode; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.catalog.MetaReader; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.MasterObserver; +import org.apache.hadoop.hbase.coprocessor.ObserverContext; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.RegionObserver; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.ipc.RequestContext; +import org.apache.hadoop.hbase.master.MasterServices; +import org.apache.hadoop.hbase.master.RegionPlan; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse.Builder; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; +import org.apache.hadoop.hbase.regionserver.OperationStatus; +import org.apache.hadoop.hbase.regionserver.RegionScanner; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.hadoop.io.WritableUtils; + +import com.google.protobuf.ByteString; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +/** + * Coprocessor that has both the MasterObserver and RegionObserver implemented that would support + * in visibility labels + */ +@InterfaceAudience.Private +public class VisibilityController extends BaseRegionObserver implements MasterObserver, + RegionObserver, VisibilityLabelsService.Interface, CoprocessorService { + + private static final Log LOG = LogFactory.getLog(VisibilityController.class); + + private final ExpressionParser expressionParser = new ExpressionParser(); + private final ExpressionExpander expressionExpander = new ExpressionExpander(); + private VisibilityLabelsManager visibilityManager; + // defined only for Endpoint implementation, so it can have way to access region services. + private RegionCoprocessorEnvironment regionEnv; + private ScanLabelGenerator scanLabelGenerator; + + private int ordinalCounter = -1; + + // flags if we are running on a region of the 'labels' table + private boolean labelsRegion = false; + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + ZooKeeperWatcher zk = null; + if (env instanceof MasterCoprocessorEnvironment) { + // if running on HMaster + MasterCoprocessorEnvironment mEnv = (MasterCoprocessorEnvironment) env; + zk = mEnv.getMasterServices().getZooKeeper(); + } else if (env instanceof RegionCoprocessorEnvironment) { + // if running at region + regionEnv = (RegionCoprocessorEnvironment) env; + zk = regionEnv.getRegionServerServices().getZooKeeper(); + // ScanLabelGenerator to be instantiated only with Region Observer. + scanLabelGenerator = VisibilityUtils.getScanLabelGenerator(env.getConfiguration()); + } + + // If zk is null or IOException while obtaining auth manager, + // throw RuntimeException so that the coprocessor is unloaded. + if (zk == null) { + throw new RuntimeException("Error obtaining VisibilityLabelsManager, zk found null."); + } + try { + this.visibilityManager = VisibilityLabelsManager.get(zk, env.getConfiguration()); + } catch (IOException ioe) { + throw new RuntimeException("Error obtaining VisibilityLabelsManager", ioe); + } + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + + } + + /********************************* Master related hooks **********************************/ + + @Override + public void postStartMaster(ObserverContext ctx) throws IOException { + // Need to create the new system table for labels here + MasterServices master = ctx.getEnvironment().getMasterServices(); + if (!MetaReader.tableExists(master.getCatalogTracker(), LABELS_TABLE_NAME)) { + HTableDescriptor labelsTable = new HTableDescriptor(LABELS_TABLE_NAME); + HColumnDescriptor labelsColumn = new HColumnDescriptor(LABELS_TABLE_FAMILY); + labelsColumn.setBloomFilterType(BloomType.NONE); + labelsColumn.setBlockCacheEnabled(false); // We will cache all the labels. No need of normal + // table block cache. + labelsTable.addFamily(labelsColumn); + // Let the "labels" table having only one region always. We are not expecting too many labels + // in the system. + labelsTable.setValue(HTableDescriptor.SPLIT_POLICY, + DisabledRegionSplitPolicy.class.getName()); + master.createTable(labelsTable, null); + } + } + + @Override + public void preCreateTable(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void postCreateTable(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void preCreateTableHandler(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void postCreateTableHandler(ObserverContext ctx, + HTableDescriptor desc, HRegionInfo[] regions) throws IOException { + } + + @Override + public void preDeleteTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postDeleteTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void preDeleteTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postDeleteTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preModifyTable(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void postModifyTable(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void preModifyTableHandler(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void postModifyTableHandler(ObserverContext ctx, + TableName tableName, HTableDescriptor htd) throws IOException { + } + + @Override + public void preAddColumn(ObserverContext ctx, TableName tableName, + HColumnDescriptor column) throws IOException { + } + + @Override + public void postAddColumn(ObserverContext ctx, TableName tableName, + HColumnDescriptor column) throws IOException { + } + + @Override + public void preAddColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor column) throws IOException { + } + + @Override + public void postAddColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor column) throws IOException { + } + + @Override + public void preModifyColumn(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void postModifyColumn(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void preModifyColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void postModifyColumnHandler(ObserverContext ctx, + TableName tableName, HColumnDescriptor descriptor) throws IOException { + } + + @Override + public void preDeleteColumn(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void postDeleteColumn(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void preDeleteColumnHandler(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void postDeleteColumnHandler(ObserverContext ctx, + TableName tableName, byte[] c) throws IOException { + } + + @Override + public void preEnableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postEnableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void preEnableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postEnableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preDisableTable(ObserverContext ctx, TableName tableName) + throws IOException { + } + + @Override + public void postDisableTable(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preDisableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void postDisableTableHandler(ObserverContext ctx, + TableName tableName) throws IOException { + } + + @Override + public void preMove(ObserverContext ctx, HRegionInfo region, + ServerName srcServer, ServerName destServer) throws IOException { + } + + @Override + public void postMove(ObserverContext ctx, HRegionInfo region, + ServerName srcServer, ServerName destServer) throws IOException { + } + + @Override + public void preAssign(ObserverContext ctx, HRegionInfo regionInfo) + throws IOException { + } + + @Override + public void postAssign(ObserverContext ctx, HRegionInfo regionInfo) + throws IOException { + } + + @Override + public void preUnassign(ObserverContext ctx, + HRegionInfo regionInfo, boolean force) throws IOException { + } + + @Override + public void postUnassign(ObserverContext ctx, + HRegionInfo regionInfo, boolean force) throws IOException { + } + + @Override + public void preRegionOffline(ObserverContext ctx, + HRegionInfo regionInfo) throws IOException { + } + + @Override + public void postRegionOffline(ObserverContext ctx, + HRegionInfo regionInfo) throws IOException { + } + + @Override + public void preBalance(ObserverContext ctx) throws IOException { + } + + @Override + public void postBalance(ObserverContext ctx, List plans) + throws IOException { + } + + @Override + public boolean preBalanceSwitch(ObserverContext ctx, + boolean newValue) throws IOException { + return false; + } + + @Override + public void postBalanceSwitch(ObserverContext ctx, + boolean oldValue, boolean newValue) throws IOException { + } + + @Override + public void preShutdown(ObserverContext ctx) throws IOException { + } + + @Override + public void preStopMaster(ObserverContext ctx) throws IOException { + } + + @Override + public void preSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preCloneSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postCloneSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preRestoreSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void postRestoreSnapshot(ObserverContext ctx, + SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException { + } + + @Override + public void preDeleteSnapshot(ObserverContext ctx, + SnapshotDescription snapshot) throws IOException { + } + + @Override + public void postDeleteSnapshot(ObserverContext ctx, + SnapshotDescription snapshot) throws IOException { + } + + @Override + public void preGetTableDescriptors(ObserverContext ctx, + List tableNamesList, List descriptors) throws IOException { + } + + @Override + public void postGetTableDescriptors(ObserverContext ctx, + List descriptors) throws IOException { + } + + @Override + public void preCreateNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void postCreateNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void preDeleteNamespace(ObserverContext ctx, + String namespace) throws IOException { + } + + @Override + public void postDeleteNamespace(ObserverContext ctx, + String namespace) throws IOException { + } + + @Override + public void preModifyNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void postModifyNamespace(ObserverContext ctx, + NamespaceDescriptor ns) throws IOException { + } + + @Override + public void preMasterInitialization(ObserverContext ctx) + throws IOException { + + } + + /****************************** Region related hooks ******************************/ + + @Override + public void postOpen(ObserverContext e) { + // Read the entire labels table and populate the zk + if (e.getEnvironment().getRegion().getRegionInfo().getTable().equals(LABELS_TABLE_NAME)) { + this.labelsRegion = true; + try { + Map existingLabels = new HashMap(); + int ordinal = 0; + for (Cell cell : getExistingLabels()) { + ordinal = Bytes.toInt(cell.getRowArray(), cell.getRowOffset()); + existingLabels.put( + ByteString.copyFrom(cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength()), ordinal); + } + this.ordinalCounter = ordinal + 1; + if (existingLabels.size() > 0) { + // If there is no data need not write to zk + byte[] serialized = VisibilityUtils.getDataTowriteToZooKeeper(existingLabels); + this.visibilityManager.writeToZookeeper(serialized); + } + } catch (IOException ioe) { + LOG.error("Error while updating the zk with the exisiting labels data", ioe); + } + } + } + @Override + public void preBatchMutate(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + if (!c.getEnvironment().getRegion().getRegionInfo().getTable().isSystemTable()) { + // TODO this can be made as a global LRU cache at HRS level? + Map> labelCache = new HashMap>(); + for (int i = 0; i < miniBatchOp.size(); i++) { + Mutation m = miniBatchOp.getOperation(i); + if (m instanceof Put) { + Put p = (Put) m; + boolean sanityFailure = false; + for (List cells : p.getFamilyCellMap().values()) { + for (Cell cell : cells) { + if (!checkForReservedVisibilityTagPresence(cell)) { + miniBatchOp.setOperationStatus(i, new OperationStatus( + OperationStatusCode.SANITY_CHECK_FAILURE, + "Mutation contains cell with reserved type tag")); + sanityFailure = true; + break; + } + } + if (sanityFailure) { + break; + } + } + if (!sanityFailure) { + byte[] labelsExp = m.getAttribute(VISIBILITY_LABELS_ATTR_KEY); + if (labelsExp != null) { + String labelsExpStr = Bytes.toString(labelsExp); + List visibilityTags = labelCache.get(labelsExpStr); + if (visibilityTags == null) { + try { + visibilityTags = createVisibilityTags(labelsExpStr); + } catch (ParseException e) { + miniBatchOp.setOperationStatus(i, new OperationStatus( + OperationStatusCode.SANITY_CHECK_FAILURE, e.getMessage())); + } catch (InvalidLabelException e) { + miniBatchOp.setOperationStatus(i, new OperationStatus( + OperationStatusCode.SANITY_CHECK_FAILURE, e.getMessage())); + } + } + if (visibilityTags != null) { + labelCache.put(labelsExpStr, visibilityTags); + List updatedCells = new ArrayList(); + for (List cells : p.getFamilyCellMap().values()) { + for (Cell cell : cells) { + List tags = Tag.createTags(cell.getTagsArray(), cell.getTagsOffset(), + cell.getTagsLength()); + tags.addAll(visibilityTags); + Cell updatedCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength(), cell.getQualifierArray(), + cell.getQualifierOffset(), cell.getQualifierLength(), cell.getTimestamp(), + Type.codeToType(cell.getTypeByte()), cell.getValueArray(), + cell.getValueOffset(), cell.getValueLength(), tags); + updatedCells.add(updatedCell); + } + } + p.getFamilyCellMap().clear(); + // Clear and add new Cells to the Mutation. + for(Cell cell : updatedCells){ + p.add(cell); + } + } + } + } + } + } + } + } + + @Override + public void postBatchMutate(ObserverContext c, + MiniBatchOperationInProgress miniBatchOp) throws IOException { + if (this.labelsRegion) { + // We will add to zookeeper here. + Map existingLabels = new HashMap(); + for (Cell cell : getExistingLabels()) { + existingLabels + .put( + ByteString.copyFrom(cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength()), Bytes.toInt(cell.getRowArray(), cell.getRowOffset())); + } + for (int i = 0; i < miniBatchOp.size(); i++) { + Mutation m = miniBatchOp.getOperation(i); + if (m instanceof Put + && miniBatchOp.getOperationStatus(i).getOperationStatusCode() == SUCCESS) { + for (Map.Entry> f : ((Put) m).getFamilyCellMap().entrySet()) { + List cells = f.getValue(); + for (Cell cell : cells) { + if (Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength(), LABELS_TABLE_FAMILY, 0, LABELS_TABLE_FAMILY.length) + && Bytes.equals(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength(), LABELS_TABLE_QUALIFIER, 0, + LABELS_TABLE_QUALIFIER.length)) { + existingLabels.put( + ByteString.copyFrom(cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength()), + Bytes.toInt(cell.getRowArray(), cell.getRowOffset())); + } + } + } + } + } + byte[] serialized = VisibilityUtils.getDataTowriteToZooKeeper(existingLabels); + this.visibilityManager.writeToZookeeper(serialized); + } + } + + // Checks whether cell contains any tag with type as VISIBILITY_TAG_TYPE. + // This tag type is reserved and should not be explicitly set by user. + private boolean checkForReservedVisibilityTagPresence(Cell cell) throws IOException { + if (cell.getTagsLength() > 0) { + KeyValue kv = KeyValueUtil.ensureKeyValue(cell); + Iterator tagsIterator = kv.tagsIterator(); + while (tagsIterator.hasNext()) { + if (tagsIterator.next().getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) { + return false; + } + } + } + return true; + } + + private List createVisibilityTags(String visibilityLabelsExp) throws IOException, + ParseException, InvalidLabelException { + ExpressionNode node = null; + node = this.expressionParser.parse(visibilityLabelsExp); + node = this.expressionExpander.expand(node); + List tags = new ArrayList(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream dos = new DataOutputStream(baos); + if (node.isSingleNode()) { + writeLabelOrdinalsToStream(node, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } else { + NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node; + if (nlNode.getOperator() == Operator.OR) { + for (ExpressionNode child : nlNode.getChildExps()) { + writeLabelOrdinalsToStream(child, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } + } else { + writeLabelOrdinalsToStream(nlNode, dos); + tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray())); + baos.reset(); + } + } + return tags; + } + + private void writeLabelOrdinalsToStream(ExpressionNode node, DataOutputStream dos) + throws IOException, InvalidLabelException { + if (node.isSingleNode()) { + String identifier = null; + int labelOrdinal = 0; + if (node instanceof LeafExpressionNode) { + identifier = ((LeafExpressionNode) node) + .getIdentifier(); + labelOrdinal = this.visibilityManager.getLabelOrdinal(identifier); + } else { + // This is a NOT node. + LeafExpressionNode lNode = (LeafExpressionNode) ((NonLeafExpressionNode) node) + .getChildExps().get(0); + identifier = lNode.getIdentifier(); + labelOrdinal = this.visibilityManager.getLabelOrdinal(identifier); + labelOrdinal = -1 * labelOrdinal; // Store NOT node as -ve ordinal. + } + if (labelOrdinal == 0) { + throw new InvalidLabelException("Invalid visibility label " + identifier); + } + WritableUtils.writeVInt(dos, labelOrdinal); + } else { + List childExps = ((NonLeafExpressionNode) node).getChildExps(); + for (ExpressionNode child : childExps) { + writeLabelOrdinalsToStream(child, dos); + } + } + } + + @Override + public RegionScanner preScannerOpen(ObserverContext e, Scan scan, + RegionScanner s) throws IOException { + HRegion region = e.getEnvironment().getRegion(); + if (region.getRegionInfo().getTable().isSystemTable()) { + return s; + } + Filter visibilityLabelFilter = createVisibilityLabelFilter(scan); + if (visibilityLabelFilter != null) { + Filter filter = scan.getFilter(); + if (filter != null) { + scan.setFilter(new FilterList(filter, visibilityLabelFilter)); + } else { + scan.setFilter(visibilityLabelFilter); + } + } + return s; + } + + @Override + public void preGetOp(ObserverContext e, Get get, List results) + throws IOException { + HRegion region = e.getEnvironment().getRegion(); + if (!region.getRegionInfo().getTable().isSystemTable()) { + Filter visibilityLabelFilter = createVisibilityLabelFilter(get); + if (visibilityLabelFilter != null) { + Filter filter = get.getFilter(); + if (filter != null) { + get.setFilter(new FilterList(filter, visibilityLabelFilter)); + } else { + get.setFilter(visibilityLabelFilter); + } + } + } + } + + private Filter createVisibilityLabelFilter(OperationWithAttributes op) { + Filter visibilityLabelFilter = null; + if (this.scanLabelGenerator != null) { + List labels = null; + try { + labels = this.scanLabelGenerator.getLabels(getActiveUser(), op); + } catch (Throwable t) { + LOG.error(t); + } + if (labels != null && !labels.isEmpty()) { + int labelsCount = this.visibilityManager.getLabelsCount(); + BitSet bs = new BitSet(labelsCount + 1); // ordinal is index 1 based + for (String label : labels) { + int labelOrdinal = this.visibilityManager.getLabelOrdinal(label); + if (labelOrdinal != 0) { + bs.set(labelOrdinal); + } + } + visibilityLabelFilter = new VisibilityLabelFilter(bs); + } + } + return visibilityLabelFilter; + } + + private User getActiveUser() throws IOException { + User user = RequestContext.getRequestUser(); + if (!RequestContext.isInRequestContext()) { + // for non-rpc handling, fallback to system user + user = User.getCurrent(); + } + return user; + } + + @Override + public Service getService() { + return VisibilityLabelsProtos.VisibilityLabelsService.newReflectiveService(this); + } + + /********************* VisibilityEndpoint service related methods **********************/ + @Override + public synchronized void addLabels(RpcController controller, VisibilityLabelsRequest request, + RpcCallback done) { + Builder response = VisibilityLabelsResponse.newBuilder(); + List labels = request.getVisLabelList(); + try { + // TODO check in AccessController who can add to this table. Global ADMIN only? + List puts = new ArrayList(labels.size()); + RegionActionResult successResult = RegionActionResult.newBuilder().build(); + for (VisibilityLabel visLabel : labels) { + byte[] label = visLabel.getLabel().toByteArray(); + String labelStr = Bytes.toString(label); + if (VisibilityLabelsValidator.isValidLabel(label)) { + if (this.visibilityManager.getLabelOrdinal(labelStr) > 0) { + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new LabelAlreadyExistsException("Label '" + labelStr + + "' already exists"))); + response.addResult(failureResultBuilder.build()); + } else { + Put p = new Put(Bytes.toBytes(ordinalCounter)); + p.add(LABELS_TABLE_FAMILY, LABELS_TABLE_QUALIFIER, label); + puts.add(p); + ordinalCounter++; + response.addResult(successResult); + } + } else { + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new InvalidLabelException("Invalid visibility label '" + labelStr + + "'"))); + response.addResult(failureResultBuilder.build()); + } + } + OperationStatus[] opStatus = this.regionEnv.getRegion().batchMutate( + puts.toArray(new Mutation[puts.size()])); + int i = 0; + for (OperationStatus status : opStatus) { + if (status.getOperationStatusCode() != SUCCESS) { + while (response.getResult(i) != successResult) i++; + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter + .buildException(new DoNotRetryIOException(status.getExceptionMsg()))); + response.setResult(i, failureResultBuilder.build()); + } + i++; + } + } catch (IOException e) { + LOG.error(e); + RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder(); + failureResultBuilder.setException(ResponseConverter.buildException(e)); + RegionActionResult failureResult = failureResultBuilder.build(); + for (int i = 0; i < labels.size(); i++) { + response.setResult(i, failureResult); + } + } + done.run(response.build()); + } + + private List getExistingLabels() throws IOException { + List dummy = new ArrayList(1); + Scan scan = new Scan(); + scan.addColumn(LABELS_TABLE_FAMILY, LABELS_TABLE_QUALIFIER); + RegionScanner scanner = this.regionEnv.getRegion().getScanner(scan); + List existingLabels = new ArrayList(); + try { + while (true) { + scanner.next(dummy); + if (dummy.isEmpty()) { + break; + } + existingLabels.add(dummy.get(0)); + dummy.clear(); + } + } finally { + scanner.close(); + } + return existingLabels; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java new file mode 100644 index 0000000..c19e172 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java @@ -0,0 +1,83 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; +import java.util.BitSet; +import java.util.Iterator; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.filter.FilterBase; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.WritableUtils; + +/** + * This Filter checks the visibility expression with each KV against visibility labels associated + * with the scan. Based on the check the KV is included in the scan result or gets filtered out. + */ +@InterfaceAudience.Private +class VisibilityLabelFilter extends FilterBase { + + private BitSet authLabels; + + public VisibilityLabelFilter(BitSet authLabels) { + this.authLabels = authLabels; + } + + @Override + public ReturnCode filterKeyValue(Cell cell) throws IOException { + KeyValue kv = KeyValueUtil.ensureKeyValue(cell); + Iterator tagsItr = kv.tagsIterator(); + while (tagsItr.hasNext()) { + boolean includeKV = true; + Tag tag = tagsItr.next(); + if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) { + int offset = tag.getTagOffset(); + int endOffset = offset + tag.getTagLength(); + while (offset < endOffset) { + int currLabelOrdinal = (int) Bytes.readVLong(tag.getBuffer(), offset); + if (currLabelOrdinal < 0) { + // check for the absence of this label in the Scan Auth labels + // ie. to check BitSet corresponding bit is 0 + int temp = -currLabelOrdinal; + if (this.authLabels.get(temp)) { + includeKV = false; + break; + } + } else { + if (!this.authLabels.get(currLabelOrdinal)) { + includeKV = false; + break; + } + } + offset += WritableUtils.getVIntSize(currLabelOrdinal); + } + if (includeKV) { + // We got one visibility expression getting evaluated to true. Good to include this KV in + // the result then. + return ReturnCode.INCLUDE; + } + } + } + return ReturnCode.SKIP; + } +} \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java new file mode 100644 index 0000000..ac9c177 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsManager.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.zookeeper.KeeperException; + +/** + * Maintains the cache for visibility labels and also uses the zookeeper to update the labels + * in the system. The cache updation happens based on the data change event that happens + * on the zookeeper znode for labels table + */ +@InterfaceAudience.Private +public class VisibilityLabelsManager { + + private static Log LOG = LogFactory.getLog(VisibilityLabelsManager.class); + private static VisibilityLabelsManager instance; + + private ZKVisibilityLabelWatcher zkVisibilityWatcher; + private Map labels = new HashMap(); + private ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + + private VisibilityLabelsManager(ZooKeeperWatcher watcher, Configuration conf) { + zkVisibilityWatcher = new ZKVisibilityLabelWatcher(watcher, this, conf); + try { + zkVisibilityWatcher.start(); + } catch (KeeperException ke) { + LOG.error("ZooKeeper initialization failed", ke); + } + } + + public synchronized static VisibilityLabelsManager get(ZooKeeperWatcher watcher, + Configuration conf) throws IOException { + if (instance == null) { + instance = new VisibilityLabelsManager(watcher, conf); + } + return instance; + } + + public void refreshCache(byte[] data) throws IOException { + List visibilityLabels = null; + try { + visibilityLabels = VisibilityUtils.readLabelsFromZKData(data); + } catch (DeserializationException dse) { + throw new IOException(dse); + } + this.lock.writeLock().lock(); + try { + for (VisibilityLabel visLabel : visibilityLabels) { + labels.put(Bytes.toString(visLabel.getLabel().toByteArray()), visLabel.getOrdinal()); + } + } finally { + this.lock.writeLock().unlock(); + } + } + + /** + * @param label + * @return The ordinal for the label. The ordinal starts from 1. Returns 0 when the passed a non + * existing label. + */ + public int getLabelOrdinal(String label) { + Integer ordinal = null; + this.lock.readLock().lock(); + try { + ordinal = labels.get(label); + } finally { + this.lock.readLock().unlock(); + } + if (ordinal != null) { + return ordinal.intValue(); + } + // 0 denotes not available + return 0; + } + + /** + * @return The total number of visibility labels. + */ + public int getLabelsCount(){ + return this.labels.size(); + } + + /** + * Writes the labels data to zookeeper node. + * @param serializedData + */ + public void writeToZookeeper(byte[] serializedData) { + this.zkVisibilityWatcher.writeToZookeeper(serializedData); + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java new file mode 100644 index 0000000..d65b171 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -0,0 +1,88 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest; +import org.apache.hadoop.util.ReflectionUtils; + +import com.google.protobuf.ByteString; +import com.google.protobuf.InvalidProtocolBufferException; + +/** + * Utility method to support visibility + */ +@InterfaceAudience.Private +public class VisibilityUtils { + + public static final byte VISIBILITY_TAG_TYPE = (byte) 2; + + /** + * + * @param existingLabels + * @return + */ + public static byte[] getDataTowriteToZooKeeper(Map existingLabels) { + VisibilityLabelsRequest.Builder visReqBuilder = VisibilityLabelsRequest.newBuilder(); + for (Entry entry : existingLabels.entrySet()) { + VisibilityLabel.Builder visLabBuilder = VisibilityLabel.newBuilder(); + visLabBuilder.setLabel(entry.getKey()); + visLabBuilder.setOrdinal(entry.getValue()); + visReqBuilder.addVisLabel(visLabBuilder.build()); + } + return ProtobufUtil.prependPBMagic(visReqBuilder.build().toByteArray()); + } + + /** + * Reads back from the zookeeper. The data read here is of the form written by + * writeToZooKeeper(Map entries). + * + * @param data + * @return + * @throws DeserializationException + */ + public static List readLabelsFromZKData(byte[] data) + throws DeserializationException { + if (ProtobufUtil.isPBMagicPrefix(data)) { + int pblen = ProtobufUtil.lengthOfPBMagic(); + try { + VisibilityLabelsRequest request = VisibilityLabelsRequest.newBuilder() + .mergeFrom(data, pblen, data.length - pblen).build(); + return request.getVisLabelList(); + } catch (InvalidProtocolBufferException e) { + throw new DeserializationException(e); + } + } + return null; + } + + public static ScanLabelGenerator getScanLabelGenerator(Configuration conf) { + Class scanLabelGeneratorKlass = conf.getClass( + "hbase.regionserver.scan.visibility.label.generator.class", + DefaultScanLabelGenerator.class, ScanLabelGenerator.class); + return ReflectionUtils.newInstance(scanLabelGeneratorKlass, conf); + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java new file mode 100644 index 0000000..697a917 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java @@ -0,0 +1,138 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.zookeeper.ZKUtil; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.zookeeper.KeeperException; + +/** + * A zk watcher that watches the labels table znode. This would create a znode + * /hbase/visibility_labels and will have a serialized form of a set of labels in the system. + */ +@InterfaceAudience.Private +public class ZKVisibilityLabelWatcher extends ZooKeeperListener { + + private static final Log LOG = LogFactory.getLog(ZKVisibilityLabelWatcher.class); + private static final String DEFAULT_VISIBILITY_LABEL_NODE = "visibility_labels"; + private static final String VISIBILITY_LABEL_ZK_PATH = "zookeeper.znode.visibility.label.parent"; + + private VisibilityLabelsManager labelsManager; + private String labelZnode; + + public ZKVisibilityLabelWatcher(ZooKeeperWatcher watcher, VisibilityLabelsManager labelsManager, + Configuration conf) { + super(watcher); + this.labelsManager = labelsManager; + String labelZnodeParent = conf.get(VISIBILITY_LABEL_ZK_PATH, DEFAULT_VISIBILITY_LABEL_NODE); + this.labelZnode = ZKUtil.joinZNode(watcher.baseZNode, labelZnodeParent); + } + + public void start() throws KeeperException { + watcher.registerListener(this); + if (ZKUtil.watchAndCheckExists(watcher, labelZnode)) { + byte[] data = ZKUtil.getDataAndWatch(watcher, labelZnode); + if (data != null) { + // TODO better to initialize the cache by reading from the labels table(?) + refreshVisibilityLabelsCache(data); + } + } + } + + private void refreshVisibilityLabelsCache(byte[] data) { + try { + this.labelsManager.refreshCache(data); + } catch (IOException ioe) { + LOG.error("Failed parsing data from labels table " + " from zk", ioe); + } + } + + @Override + public void nodeCreated(String path) { + if (path.equals(labelZnode)) { + try { + byte[] data = ZKUtil.getDataAndWatch(watcher, labelZnode); + if (data.length != 0) { + // TODO : there is a chance that before the data is written this could be called + // we need not refresh cache here + // refreshVisbilityLabelsCache(data); + } + } catch (KeeperException ke) { + LOG.error("Error reading data from zookeeper", ke); + // only option is to abort + watcher.abort("Zookeeper error obtaining label node children", ke); + } + } + } + + @Override + public void nodeDeleted(String path) { + // There is no case of visibility labels path to get deleted. + } + + @Override + public void nodeDataChanged(String path) { + if (path.equals(labelZnode)) { + try { + byte[] data = ZKUtil.getDataAndWatch(watcher, path); + refreshVisibilityLabelsCache(data); + } catch (KeeperException ke) { + LOG.error("Error reading data from zookeeper for node " + path, ke); + // only option is to abort + watcher.abort("Zookeeper error getting data for node " + path, ke); + } + } + } + + @Override + public void nodeChildrenChanged(String path) { + if (path.equals(labelZnode)) { + // table permissions changed + try { + byte[] data = ZKUtil.getDataAndWatch(watcher, labelZnode); + refreshVisibilityLabelsCache(data); + } catch (KeeperException ke) { + LOG.error("Error reading data from zookeeper for path " + path, ke); + watcher.abort("Zookeeper error get node children for path " + path, ke); + } + } + } + + /*** + * Write a table's access controls to the permissions mirror in zookeeper + * + * @param entry + * @param permsData + */ + public void writeToZookeeper(byte[] serializedData) { + try { + ZKUtil.createWithParents(watcher, this.labelZnode); + ZKUtil.updateExistingNodeData(watcher, this.labelZnode, serializedData, -1); + } catch (KeeperException e) { + LOG.error("Failed labels entry '" + serializedData + "'", e); + watcher.abort("Failed writing node " + this.labelZnode + " to zookeeper", e); + } + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java new file mode 100644 index 0000000..fb39e2d --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.java @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public interface ExpressionNode { + boolean isSingleNode(); + + ExpressionNode deepClone(); +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java new file mode 100644 index 0000000..4e2d351 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class LeafExpressionNode implements ExpressionNode { + public static final LeafExpressionNode OPEN_PARAN_NODE = new LeafExpressionNode("("); + public static final LeafExpressionNode CLOSE_PARAN_NODE = new LeafExpressionNode(")"); + + private String identifier; + + public LeafExpressionNode(String identifier) { + this.identifier = identifier; + } + + public String getIdentifier() { + return this.identifier; + } + + @Override + public int hashCode() { + return this.identifier.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof LeafExpressionNode) { + LeafExpressionNode that = (LeafExpressionNode) obj; + return this.identifier.equals(that.identifier); + } + return false; + } + + @Override + public String toString() { + return this.identifier; + } + + @Override + public boolean isSingleNode() { + return true; + } + + public LeafExpressionNode deepClone() { + LeafExpressionNode clone = new LeafExpressionNode(this.identifier); + return clone; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java new file mode 100644 index 0000000..03def94 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience; + +@InterfaceAudience.Private +public class NonLeafExpressionNode implements ExpressionNode { + private Operator op; + private List childExps = new ArrayList(2); + + public NonLeafExpressionNode() { + + } + + public NonLeafExpressionNode(Operator op) { + this.op = op; + } + + public NonLeafExpressionNode(Operator op, List exps) { + this.op = op; + if (op == Operator.NOT && exps.size() > 1) { + throw new IllegalArgumentException(Operator.NOT + " should be on 1 child expression"); + } + this.childExps = exps; + } + + public NonLeafExpressionNode(Operator op, ExpressionNode... exps) { + this.op = op; + List expLst = new ArrayList(); + for (ExpressionNode exp : exps) { + expLst.add(exp); + } + this.childExps = expLst; + } + + public Operator getOperator() { + return op; + } + + public List getChildExps() { + return childExps; + } + + public void addChildExp(ExpressionNode exp) { + if (op == Operator.NOT && this.childExps.size() == 1) { + throw new IllegalStateException(Operator.NOT + " should be on 1 child expression"); + } + this.childExps.add(exp); + } + + public void addChildExps(List exps) { + this.childExps.addAll(exps); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("("); + if (this.op == Operator.NOT) { + sb.append(this.op); + } + for (int i = 0; i < this.childExps.size(); i++) { + sb.append(childExps.get(i)); + if (i < this.childExps.size() - 1) { + sb.append(" " + this.op + " "); + } + } + sb.append(")"); + return sb.toString(); + } + + @Override + public boolean isSingleNode() { + return this.op == Operator.NOT; + } + + public NonLeafExpressionNode deepClone() { + NonLeafExpressionNode clone = new NonLeafExpressionNode(this.op); + for (ExpressionNode exp : this.childExps) { + clone.addChildExp(exp.deepClone()); + } + return clone; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java new file mode 100644 index 0000000..6f47b50 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility.expression; + +public enum Operator { + AND('&'), OR('|'), NOT('!'); + + private char rep; + + private Operator(char rep) { + this.rep = rep; + } + + public String toString() { + return String.valueOf(this.rep); + }; +} diff --git hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto index 85d6024..ed8f14a 100644 --- hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto +++ hbase-server/src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto @@ -27,4 +27,5 @@ message Scanner { optional int32 maxVersions = 7; optional string filter = 8; optional int32 caching = 9; + optional repeated string labels = 10; } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java new file mode 100644 index 0000000..f629aa4 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java @@ -0,0 +1,210 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */package org.apache.hadoop.hbase.rest; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.StringWriter; +import java.util.Iterator; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; +import javax.xml.bind.Unmarshaller; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.rest.client.Client; +import org.apache.hadoop.hbase.rest.client.Cluster; +import org.apache.hadoop.hbase.rest.client.Response; +import org.apache.hadoop.hbase.rest.model.CellModel; +import org.apache.hadoop.hbase.rest.model.CellSetModel; +import org.apache.hadoop.hbase.rest.model.RowModel; +import org.apache.hadoop.hbase.rest.model.ScannerModel; +import org.apache.hadoop.hbase.security.visibility.VisibilityClient; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; +import org.apache.hadoop.hbase.security.visibility.VisibilityController; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(MediumTests.class) +public class TestScannersWithLabels { + private static final String TABLE = "TestScannersWithLabels"; + private static final String CFA = "a"; + private static final String CFB = "b"; + private static final String COLUMN_1 = CFA + ":1"; + private static final String COLUMN_2 = CFB + ":2"; + private final static String TOPSECRET = "topsecret"; + private final static String PUBLIC = "public"; + private final static String PRIVATE = "private"; + private final static String CONFIDENTIAL = "confidential"; + private final static String SECRET = "secret"; + + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final HBaseRESTTestingUtility REST_TEST_UTIL = new HBaseRESTTestingUtility(); + private static Client client; + private static JAXBContext context; + private static Marshaller marshaller; + private static Unmarshaller unmarshaller; + private static Configuration conf; + + private static int insertData(String tableName, String column, double prob) throws IOException { + int count = 0; + HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); + byte[] k = new byte[3]; + byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); + + for (int i = 0; i < 9; i++) { + Put put = new Put(Bytes.toBytes("row" + i)); + put.setDurability(Durability.SKIP_WAL); + put.add(famAndQf[0], famAndQf[1], k); + put.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, + Bytes.toBytes("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!" + TOPSECRET)); + table.put(put); + count++; + } + table.flushCommits(); + return count; + } + + private static int countCellSet(CellSetModel model) { + int count = 0; + Iterator rows = model.getRows().iterator(); + while (rows.hasNext()) { + RowModel row = rows.next(); + Iterator cells = row.getCells().iterator(); + while (cells.hasNext()) { + cells.next(); + count++; + } + } + return count; + } + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + conf = TEST_UTIL.getConfiguration(); + conf = TEST_UTIL.getConfiguration(); + conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName()); + TEST_UTIL.startMiniCluster(1); + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(VisibilityConstants.LABELS_TABLE_NAME.getName(), 50000); + createLabels(); + REST_TEST_UTIL.startServletContainer(conf); + client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort())); + context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class, + ScannerModel.class); + marshaller = context.createMarshaller(); + unmarshaller = context.createUnmarshaller(); + HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); + if (admin.tableExists(TABLE)) { + return; + } + HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE)); + htd.addFamily(new HColumnDescriptor(CFA)); + htd.addFamily(new HColumnDescriptor(CFB)); + admin.createTable(htd); + insertData(TABLE, COLUMN_1, 1.0); + insertData(TABLE, COLUMN_2, 0.5); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + REST_TEST_UTIL.shutdownServletContainer(); + TEST_UTIL.shutdownMiniCluster(); + } + + private static void createLabels() throws IOException { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + } + + @Test + public void testSimpleScannerXMLWithLabelsThatReceivesNoData() throws IOException, JAXBException { + final int BATCH_SIZE = 5; + // new scanner + ScannerModel model = new ScannerModel(); + model.setBatch(BATCH_SIZE); + model.addColumn(Bytes.toBytes(COLUMN_1)); + model.addLabel(PUBLIC); + StringWriter writer = new StringWriter(); + marshaller.marshal(model, writer); + byte[] body = Bytes.toBytes(writer.toString()); + // recall previous put operation with read-only off + conf.set("hbase.rest.readonly", "false"); + Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); + assertEquals(response.getCode(), 201); + String scannerURI = response.getLocation(); + assertNotNull(scannerURI); + + // get a cell set + response = client.get(scannerURI, Constants.MIMETYPE_XML); + // Respond with 204 as there are no cells to be retrieved + assertEquals(response.getCode(), 204); + assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); + } + + @Test + public void testSimpleScannerXMLWithLabelsThatReceivesData() throws IOException, JAXBException { + // new scanner + ScannerModel model = new ScannerModel(); + model.setBatch(5); + model.addColumn(Bytes.toBytes(COLUMN_1)); + model.addLabel(SECRET); + StringWriter writer = new StringWriter(); + marshaller.marshal(model, writer); + byte[] body = Bytes.toBytes(writer.toString()); + + // recall previous put operation with read-only off + conf.set("hbase.rest.readonly", "false"); + Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); + assertEquals(response.getCode(), 201); + String scannerURI = response.getLocation(); + assertNotNull(scannerURI); + + // get a cell set + response = client.get(scannerURI, Constants.MIMETYPE_XML); + // Respond with 204 as there are no cells to be retrieved + assertEquals(response.getCode(), 200); + assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); + CellSetModel cellSet = (CellSetModel) unmarshaller.unmarshal(new ByteArrayInputStream(response + .getBody())); + // as the label does not match, none of the rows would be retrieved + assertEquals(countCellSet(cellSet), 5); + } + +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java index 2e2b304..c84a058 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java @@ -19,22 +19,14 @@ package org.apache.hadoop.hbase.rest.model; -import java.io.IOException; -import java.io.StringReader; -import java.io.StringWriter; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; - import org.apache.hadoop.hbase.SmallTests; -import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; - -import junit.framework.TestCase; import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestScannerModel extends TestModelBase { + private static final String PRIVATE = "private"; + private static final String PUBLIC = "public"; private static final byte[] START_ROW = Bytes.toBytes("abracadabra"); private static final byte[] END_ROW = Bytes.toBytes("zzyzx"); private static final byte[] COLUMN1 = Bytes.toBytes("column1"); @@ -46,20 +38,20 @@ public class TestScannerModel extends TestModelBase { public TestScannerModel() throws Exception { super(ScannerModel.class); - AS_XML = - "" + - "" + - "Y29sdW1uMQ==Y29sdW1uMjpmb28="; + AS_XML = "" + + "" + + "Y29sdW1uMQ==Y29sdW1uMjpmb28=" + + ""; - AS_JSON = - "{\"batch\":100,\"caching\":1000,\"endRow\":\"enp5eng=\",\"endTime\":1245393318192,"+ - "\"maxVersions\":2147483647,\"startRow\":\"YWJyYWNhZGFicmE=\",\"startTime\":1245219839331,"+ - "\"column\":[\"Y29sdW1uMQ==\",\"Y29sdW1uMjpmb28=\"]}"; + AS_JSON = "{\"batch\":100,\"caching\":1000,\"endRow\":\"enp5eng=\",\"endTime\":1245393318192," + + "\"maxVersions\":2147483647,\"startRow\":\"YWJyYWNhZGFicmE=\",\"startTime\":1245219839331," + + "\"column\":[\"Y29sdW1uMQ==\",\"Y29sdW1uMjpmb28=\"]," + +"\"labels\":[\"private\",\"public\"]}"; - AS_PB = - "CgthYnJhY2FkYWJyYRIFenp5engaB2NvbHVtbjEaC2NvbHVtbjI6Zm9vIGQo47qL554kMLDi57mf" + - "JDj/////B0joBw=="; + // TODO + AS_PB = "CgthYnJhY2FkYWJyYRIFenp5engaB2NvbHVtbjEaC2NvbHVtbjI6Zm9vIGQo47qL554kMLDi57mf" + + "JDj/////B0joBw=="; } protected ScannerModel buildTestModel() { @@ -72,6 +64,8 @@ public class TestScannerModel extends TestModelBase { model.setEndTime(END_TIME); model.setBatch(BATCH); model.setCaching(CACHING); + model.addLabel(PRIVATE); + model.addLabel(PUBLIC); return model; } @@ -79,7 +73,7 @@ public class TestScannerModel extends TestModelBase { assertTrue(Bytes.equals(model.getStartRow(), START_ROW)); assertTrue(Bytes.equals(model.getEndRow(), END_ROW)); boolean foundCol1 = false, foundCol2 = false; - for (byte[] column: model.getColumns()) { + for (byte[] column : model.getColumns()) { if (Bytes.equals(column, COLUMN1)) { foundCol1 = true; } else if (Bytes.equals(column, COLUMN2)) { @@ -92,7 +86,19 @@ public class TestScannerModel extends TestModelBase { assertEquals(model.getEndTime(), END_TIME); assertEquals(model.getBatch(), BATCH); assertEquals(model.getCaching(), CACHING); + boolean foundLabel1 = false; + boolean foundLabel2 = false; + if (model.getLabels() != null && model.getLabels().size() > 0) { + for (String label : model.getLabels()) { + if (label.equals(PRIVATE)) { + foundLabel1 = true; + } else if (label.equals(PUBLIC)) { + foundLabel2 = true; + } + } + assertTrue(foundLabel1); + assertTrue(foundLabel2); + } } } - diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java new file mode 100644 index 0000000..ea3e892 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java @@ -0,0 +1,393 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestExpressionExpander { + + @Test + public void testPositiveCases() throws Exception { + ExpressionExpander expander = new ExpressionExpander(); + + // (!a) -> (!a) + NonLeafExpressionNode exp1 = new NonLeafExpressionNode(Operator.NOT, + new LeafExpressionNode("a")); + ExpressionNode result = expander.expand(exp1); + assertTrue(result instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.NOT, nlResult.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + + // (a | b) -> (a | b) + NonLeafExpressionNode exp2 = new NonLeafExpressionNode(Operator.OR, + new LeafExpressionNode("a"), new LeafExpressionNode("b")); + result = expander.expand(exp2); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // (a & b) -> (a & b) + NonLeafExpressionNode exp3 = new NonLeafExpressionNode(Operator.AND, + new LeafExpressionNode("a"), new LeafExpressionNode("b")); + result = expander.expand(exp3); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // ((a | b) | c) -> (a | b | c) + NonLeafExpressionNode exp4 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode( + Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp4); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + + // ((a & b) & c) -> (a & b & c) + NonLeafExpressionNode exp5 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode( + Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp5); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + + // (a | b) & c -> ((a & c) | (b & c)) + NonLeafExpressionNode exp6 = new NonLeafExpressionNode(Operator.AND, new NonLeafExpressionNode( + Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp6); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + NonLeafExpressionNode temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a & b) | c -> ((a & b) | c) + NonLeafExpressionNode exp7 = new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode( + Operator.AND, new LeafExpressionNode("a"), new LeafExpressionNode("b")), + new LeafExpressionNode("c")); + result = expander.expand(exp7); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + nlResult = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + + // ((a & b) | c) & d -> (((a & b) & d) | (c & d)) + NonLeafExpressionNode exp8 = new NonLeafExpressionNode(Operator.AND); + exp8.addChildExp(new NonLeafExpressionNode(Operator.OR, new NonLeafExpressionNode(Operator.AND, + new LeafExpressionNode("a"), new LeafExpressionNode("b")), new LeafExpressionNode("c"))); + exp8.addChildExp(new LeafExpressionNode("d")); + result = expander.expand(exp8); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a | b) | (c | d) -> (a | b | c | d) + NonLeafExpressionNode exp9 = new NonLeafExpressionNode(Operator.OR); + exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp9.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp9); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier()); + + // (a & b) & (c & d) -> (a & b & c & d) + NonLeafExpressionNode exp10 = new NonLeafExpressionNode(Operator.AND); + exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp10.addChildExp(new NonLeafExpressionNode(Operator.AND, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp10); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlResult.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlResult.getChildExps().get(1)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) nlResult.getChildExps().get(2)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlResult.getChildExps().get(3)).getIdentifier()); + + // (a | b) & (c | d) -> ((a & c) | (a & d) | (b & c) | (b & d)) + NonLeafExpressionNode exp11 = new NonLeafExpressionNode(Operator.AND); + exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + exp11.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + result = expander.expand(exp11); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (((a | b) | c) | d) & e -> ((a & e) | (b & e) | (c & e) | (d & e)) + NonLeafExpressionNode exp12 = new NonLeafExpressionNode(Operator.AND); + NonLeafExpressionNode tempExp1 = new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode( + "a"), new LeafExpressionNode("b")); + NonLeafExpressionNode tempExp2 = new NonLeafExpressionNode(Operator.OR, tempExp1, + new LeafExpressionNode("c")); + NonLeafExpressionNode tempExp3 = new NonLeafExpressionNode(Operator.OR, tempExp2, + new LeafExpressionNode("d")); + exp12.addChildExp(tempExp3); + exp12.addChildExp(new LeafExpressionNode("e")); + result = expander.expand(exp12); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(4, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // (a | b | c) & d -> ((a & d) | (b & d) | (c & d)) + NonLeafExpressionNode exp13 = new NonLeafExpressionNode(Operator.AND, + new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode( + "b"), new LeafExpressionNode("c")), new LeafExpressionNode("d")); + result = expander.expand(exp13); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(3, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // ((a | b) & (c | d)) & (e | f) -> (((a & c) & e) | ((a & c) & f) | ((a & d) & e) | ((a & d) & + // f) | ((b & c) & e) | ((b & c) & f) | ((b & d) & e) | ((b & d) & f)) + NonLeafExpressionNode exp15 = new NonLeafExpressionNode(Operator.AND); + NonLeafExpressionNode temp1 = new NonLeafExpressionNode(Operator.AND); + temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), + new LeafExpressionNode("b"))); + temp1.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("c"), + new LeafExpressionNode("d"))); + exp15.addChildExp(temp1); + exp15.addChildExp(new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("e"), + new LeafExpressionNode("f"))); + result = expander.expand(exp15); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.OR, nlResult.getOperator()); + assertEquals(8, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(2); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(3); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(4); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(5); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("c", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(6); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("e", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(7); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("f", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + temp = (NonLeafExpressionNode) temp.getChildExps().get(0); + assertEquals(Operator.AND, temp.getOperator()); + assertEquals(2, temp.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) temp.getChildExps().get(1)).getIdentifier()); + + // !(a | b) -> ((!a) & (!b)) + NonLeafExpressionNode exp16 = new NonLeafExpressionNode(Operator.NOT, + new NonLeafExpressionNode(Operator.OR, new LeafExpressionNode("a"), new LeafExpressionNode( + "b"))); + result = expander.expand(exp16); + assertTrue(result instanceof NonLeafExpressionNode); + nlResult = (NonLeafExpressionNode) result; + assertEquals(Operator.AND, nlResult.getOperator()); + assertEquals(2, nlResult.getChildExps().size()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(0); + assertEquals(Operator.NOT, temp.getOperator()); + assertEquals("a", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + temp = (NonLeafExpressionNode) nlResult.getChildExps().get(1); + assertEquals(Operator.NOT, temp.getOperator()); + assertEquals("b", ((LeafExpressionNode) temp.getChildExps().get(0)).getIdentifier()); + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java new file mode 100644 index 0000000..f7a8dfd --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java @@ -0,0 +1,318 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.hbase.SmallTests; +import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; +import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(SmallTests.class) +public class TestExpressionParser { + + private ExpressionParser parser = new ExpressionParser(); + + @Test + public void testPositiveCases() throws Exception { + // abc -> (abc) + ExpressionNode node = parser.parse("abc"); + assertTrue(node instanceof LeafExpressionNode); + assertEquals("abc", ((LeafExpressionNode) node).getIdentifier()); + + // a&b|c&d -> (((a & b) | c) & ) + node = parser.parse("a&b|c&d"); + assertTrue(node instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // (a) -> (a) + node = parser.parse("(a)"); + assertTrue(node instanceof LeafExpressionNode); + assertEquals("a", ((LeafExpressionNode) node).getIdentifier()); + + // (a&b) -> (a & b) + node = parser.parse(" ( a & b )"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // ((((a&b)))) -> (a & b) + node = parser.parse("((((a&b))))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a|b)&(cc|def) -> ((a | b) & (cc | def)) + node = parser.parse("( a | b ) & (cc|def)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + NonLeafExpressionNode nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + NonLeafExpressionNode nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNodeLeft.getOperator()); + assertEquals(2, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(1)).getIdentifier()); + assertEquals(Operator.OR, nlNodeRight.getOperator()); + assertEquals(2, nlNodeRight.getChildExps().size()); + assertEquals("cc", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + assertEquals("def", ((LeafExpressionNode) nlNodeRight.getChildExps().get(1)).getIdentifier()); + + // a&(cc|de) -> (a & (cc | de)) + node = parser.parse("a&(cc|de)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("cc", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("de", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a&b)|c -> ((a & b) | c) + node = parser.parse("(a&b)|c"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (a&b&c)|d -> (((a & b) & c) | d) + node = parser.parse("(a&b&c)|d"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // a&(b|(c|d)) -> (a & (b | (c | d))) + node = parser.parse("a&(b|(c|d))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("d", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // (!a) -> (!a) + node = parser.parse("(!a)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // a&(!b) -> (a & (!b)) + node = parser.parse("a&(!b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !a&b -> ((!a) & b) + node = parser.parse("!a&b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !a&(!b) -> ((!a) & (!b)) + node = parser.parse("!a&(!b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeLeft.getOperator()); + assertEquals(1, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + + // !a&!b -> ((!a) & (!b)) + node = parser.parse("!a&!b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeLeft.getOperator()); + assertEquals(1, nlNodeLeft.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + + // !(a&b) -> (!(a & b)) + node = parser.parse("!(a&b)"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(1)).getIdentifier()); + + // a&!b -> (a & (!b)) + node = parser.parse("a&!b"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertEquals("a", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier()); + + // !((a|b)&!(c&!b)) -> (!((a | b) & (!(c & (!b))))) + node = parser.parse("!((a | b) & !(c & !b))"); + assertTrue(node instanceof NonLeafExpressionNode); + nlNode = (NonLeafExpressionNode) node; + assertEquals(Operator.NOT, nlNode.getOperator()); + assertEquals(1, nlNode.getChildExps().size()); + nlNode = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + assertEquals(Operator.AND, nlNode.getOperator()); + assertEquals(2, nlNode.getChildExps().size()); + assertTrue(nlNode.getChildExps().get(0) instanceof NonLeafExpressionNode); + assertTrue(nlNode.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeLeft = (NonLeafExpressionNode) nlNode.getChildExps().get(0); + nlNodeRight = (NonLeafExpressionNode) nlNode.getChildExps().get(1); + assertEquals(Operator.OR, nlNodeLeft.getOperator()); + assertEquals("a", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(0)).getIdentifier()); + assertEquals("b", ((LeafExpressionNode) nlNodeLeft.getChildExps().get(1)).getIdentifier()); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + nlNodeRight = (NonLeafExpressionNode) nlNodeRight.getChildExps().get(0); + assertEquals(Operator.AND, nlNodeRight.getOperator()); + assertEquals(2, nlNodeRight.getChildExps().size()); + assertEquals("c", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + assertTrue(nlNodeRight.getChildExps().get(1) instanceof NonLeafExpressionNode); + nlNodeRight = (NonLeafExpressionNode) nlNodeRight.getChildExps().get(1); + assertEquals(Operator.NOT, nlNodeRight.getOperator()); + assertEquals(1, nlNodeRight.getChildExps().size()); + assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier()); + } + + @Test + public void testNegativeCases() throws Exception { + executeNegativeCase("("); + executeNegativeCase(")"); + executeNegativeCase("()"); + executeNegativeCase("(a"); + executeNegativeCase("a&"); + executeNegativeCase("a&|b"); + executeNegativeCase("!"); + executeNegativeCase("a!"); + executeNegativeCase("a!&"); + executeNegativeCase("&"); + executeNegativeCase("|"); + executeNegativeCase("!(a|(b&c)&!b"); + executeNegativeCase("!!a"); + executeNegativeCase("( a & b ) | ( c & d e)"); + executeNegativeCase("! a"); + } + + private void executeNegativeCase(String exp) { + try { + parser.parse(exp); + fail("Expected ParseException for expression " + exp); + } catch (ParseException e) { + } + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java new file mode 100644 index 0000000..9403826 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java @@ -0,0 +1,475 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.security.visibility; + +import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.IOException; +import java.io.InterruptedIOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.HRegionServer; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.AccessController; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; + +/** + * Test class that tests the visibility labels + */ +@Category(MediumTests.class) +public class TestVisibilityLabels { + + private static final String TOPSECRET = "topsecret"; + private static final String PUBLIC = "public"; + private static final String PRIVATE = "private"; + private static final String CONFIDENTIAL = "confidential"; + private static final String SECRET = "secret"; + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private static final byte[] row1 = Bytes.toBytes("row1"); + private static final byte[] row2 = Bytes.toBytes("row2"); + private static final byte[] row3 = Bytes.toBytes("row3"); + private static final byte[] row4 = Bytes.toBytes("row4"); + private final static byte[] fam = Bytes.toBytes("info"); + private final static byte[] qual = Bytes.toBytes("qual"); + private final static byte[] value = Bytes.toBytes("value"); + private static Configuration conf; + + private volatile boolean killedRS = false; + @Rule + public final TestName TEST_NAME = new TestName(); + + @BeforeClass + public static void setupBeforeClass() throws Exception { + // setup configuration + conf = TEST_UTIL.getConfiguration(); + conf.setInt("hfile.format.version", 3); + SecureTestUtil.enableSecurity(conf); + + conf.set("hbase.coprocessor.master.classes", AccessController.class.getName() + "," + + VisibilityController.class.getName()); + conf.set("hbase.coprocessor.region.classes", AccessController.class.getName() + "," + + VisibilityController.class.getName()); + TEST_UTIL.startMiniCluster(2); + + TEST_UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME.getName(), 50000); + // Wait for the labels table to become available + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + addLabels(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @After + public void tearDown() throws Exception { + killedRS = false; + } + + @Test + public void testSimpleVisibilityLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "|" + CONFIDENTIAL, + PRIVATE + "|" + CONFIDENTIAL); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL, PRIVATE)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + + assertTrue(next.length == 2); + CellScanner cellScanner = next[0].cellScanner(); + cellScanner.advance(); + Cell current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row1, 0, row1.length)); + cellScanner = next[1].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row2, 0, row2.length)); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsWithComplexLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")" + "&" + "!" + TOPSECRET, "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + PRIVATE + "&" + CONFIDENTIAL + + "&" + SECRET + ")"); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(TOPSECRET, CONFIDENTIAL, PRIVATE, PUBLIC, SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertEquals(3, next.length); + CellScanner cellScanner = next[0].cellScanner(); + cellScanner.advance(); + Cell current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row2, 0, row2.length)); + cellScanner = next[1].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row3, 0, row3.length)); + cellScanner = next[2].cellScanner(); + cellScanner.advance(); + current = cellScanner.current(); + assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), + current.getRowLength(), row4, 0, row4.length)); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsThatDoesNotPassTheCriteria() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(PUBLIC)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 0); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsInPutsThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + try { + createTableAndWriteDataWithLabels(tableName, "SAMPLE_LABEL", "TEST"); + fail("Should have failed with failed sanity check exception"); + } catch (Exception e) { + } + } + + @Test + public void testVisibilityLabelsInScanThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations("SAMPLE")); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 0); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsWithGet() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + + PRIVATE, SECRET + "&" + CONFIDENTIAL + "&" + PRIVATE); + try { + Get get = new Get(row1); + get.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); + Result result = table.get(get); + assertTrue(!result.isEmpty()); + Cell cell = result.getColumnLatestCell(fam, qual); + assertTrue(Bytes.equals(value, 0, value.length, cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength())); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsOnKillingOfRSContainingLabelsTable() throws Exception { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + int liveRS = 0; + for (RegionServerThread rsThreads : regionServerThreads) { + if (!rsThreads.getRegionServer().isAborted()) { + liveRS++; + } + } + if (liveRS == 1) { + TEST_UTIL.getHBaseCluster().startRegionServer(); + } + Thread t1 = new Thread() { + public void run() { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + List onlineRegions = rsThread.getRegionServer().getOnlineRegions( + LABELS_TABLE_NAME); + if (onlineRegions.size() > 0) { + rsThread.getRegionServer().abort("Aborting "); + killedRS = true; + break; + } + } + } + + }; + t1.start(); + final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + Thread t = new Thread() { + public void run() { + try { + while (!killedRS) { + Thread.sleep(1); + } + createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", + PRIVATE); + } catch (RetriesExhaustedWithDetailsException e) { + e.printStackTrace(); + } catch (InterruptedIOException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + }; + t.start(); + regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); + while (!killedRS) { + Thread.sleep(10); + } + regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + while (true) { + if (!rsThread.getRegionServer().isAborted()) { + List onlineRegions = rsThread.getRegionServer().getOnlineRegions( + LABELS_TABLE_NAME); + if (onlineRegions.size() > 0) { + break; + } else { + Thread.sleep(10); + } + } else { + break; + } + } + } + TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); + t.join(); + HTable table = null; + try { + table = new HTable(TEST_UTIL.getConfiguration(), tableName); + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 1); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testVisibilityLabelsOnRSRestart() throws Exception { + final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + rsThread.getRegionServer().abort("Aborting "); + } + // Start one new RS + RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); + HRegionServer regionServer = rs.getRegionServer(); + while (!regionServer.isOnline()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Scan s = new Scan(); + s.setAuthorizations(new Authorizations(SECRET)); + ResultScanner scanner = table.getScanner(s); + Result[] next = scanner.next(3); + assertTrue(next.length == 1); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testAddVisibilityLabelsOnRSRestart() throws Exception { + List regionServerThreads = TEST_UTIL.getHBaseCluster() + .getRegionServerThreads(); + for (RegionServerThread rsThread : regionServerThreads) { + rsThread.getRegionServer().abort("Aborting "); + } + // Start one new RS + RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); + HRegionServer regionServer = rs.getRegionServer(); + while (!regionServer.isOnline()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, "ABC", "XYZ" }; + try { + VisibilityClient.addLabels(conf, labels); + } catch (Throwable t) { + throw new IOException(t); + } + // Scan the visibility label + Scan s = new Scan(); + HTable ht = new HTable(conf, LABELS_TABLE_NAME.getName()); + int i = 0; + try { + ResultScanner scanner = ht.getScanner(s); + while (true) { + Result next = scanner.next(); + if (next == null) { + break; + } + i++; + } + } finally { + if (ht != null) { + ht.close(); + } + } + Assert.assertEquals("The count should be 7", i, 7); + } + + @Test + public void testVisibilityLabelsInGetThatDoesNotMatchAnyDefinedLabels() throws Exception { + TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); + HTable table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + + ")", PRIVATE); + try { + Get get = new Get(row1); + get.setAuthorizations(new Authorizations("SAMPLE")); + Result result = table.get(get); + assertTrue(result.isEmpty()); + } finally { + if (table != null) { + table.close(); + } + } + } + + @Test + public void testAddLabels() throws Throwable { + String[] labels = { "L1", SECRET, "L2", "invalid~", "L3" }; + VisibilityLabelsResponse response = VisibilityClient.addLabels(conf, labels); + List resultList = response.getResultList(); + assertEquals(5, resultList.size()); + assertTrue(resultList.get(0).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.LabelAlreadyExistsException", + resultList.get(1).getException().getName()); + assertTrue(resultList.get(2).getException().getValue().isEmpty()); + assertEquals("org.apache.hadoop.hbase.security.visibility.InvalidLabelException", resultList + .get(3).getException().getName()); + assertTrue(resultList.get(4).getException().getValue().isEmpty()); + } + + private static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) + throws IOException, InterruptedIOException, RetriesExhaustedWithDetailsException { + HTable table = null; + try { + table = TEST_UTIL.createTable(tableName, fam); + int i = 1; + List puts = new ArrayList(); + for (String labelExp : labelExps) { + Put put = new Put(Bytes.toBytes("row" + i)); + put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value); + put.setCellVisibility(new CellVisibility(labelExp)); + puts.add(put); + i++; + } + table.put(puts); + } finally { + if (table != null) { + table.close(); + } + } + return table; + } + + private static void addLabels() throws IOException { + String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET }; + try { + VisibilityClient.addLabels(conf, labels); + VisibilityClient.addLabel(conf, SECRET); + } catch (Throwable t) { + throw new IOException(t); + } + } +} diff --git hbase-shell/src/main/ruby/hbase.rb hbase-shell/src/main/ruby/hbase.rb index 87512bf..2b32369 100644 --- hbase-shell/src/main/ruby/hbase.rb +++ hbase-shell/src/main/ruby/hbase.rb @@ -78,3 +78,4 @@ require 'hbase/admin' require 'hbase/table' require 'hbase/replication_admin' require 'hbase/security' +require 'hbase/visibility_labels' \ No newline at end of file diff --git hbase-shell/src/main/ruby/hbase/hbase.rb hbase-shell/src/main/ruby/hbase/hbase.rb index 64482c5..8c8d2b1 100644 --- hbase-shell/src/main/ruby/hbase/hbase.rb +++ hbase-shell/src/main/ruby/hbase/hbase.rb @@ -22,6 +22,7 @@ include Java require 'hbase/admin' require 'hbase/table' require 'hbase/security' +require 'hbase/visibility_labels' module Hbase class Hbase @@ -55,5 +56,9 @@ module Hbase def security_admin(formatter) ::Hbase::SecurityAdmin.new(configuration, formatter) end + + def visibility_labels_admin(formatter) + ::Hbase::VisibilityLabelsAdmin.new(configuration, formatter) + end end end diff --git hbase-shell/src/main/ruby/hbase/visibility_labels.rb hbase-shell/src/main/ruby/hbase/visibility_labels.rb new file mode 100644 index 0000000..472d567 --- /dev/null +++ hbase-shell/src/main/ruby/hbase/visibility_labels.rb @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +include Java +java_import org.apache.hadoop.hbase.security.visibility.VisibilityClient +java_import org.apache.hadoop.hbase.util.Bytes + +module Hbase + class VisibilityLabelsAdmin + + def initialize(configuration, formatter) + @config = configuration + @formatter = formatter + @admin = org.apache.hadoop.hbase.client.HBaseAdmin.new(configuration) + end + + def add_label(label) + lables_table_available? + + begin + response = VisibilityClient.addLabel(@config, label) + if response.nil? + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + end + if response.getResult(0).hasException() + raise(ArgumentError, Bytes.toString(response.getResult(0).getException().getValue().toByteArray())) + end + end + end + + # Make sure that lables table is available + def lables_table_available?() + raise(ArgumentError, "DISABLED: Visibility labels feature is not available") \ + unless exists?(VisibilityClient::LABELS_TABLE_NAME) + end + + # Does table exist? + def exists?(table_name) + @admin.tableExists(table_name) + end + end +end \ No newline at end of file diff --git hbase-shell/src/main/ruby/shell.rb hbase-shell/src/main/ruby/shell.rb index 8576dae..4bcc4f3 100644 --- hbase-shell/src/main/ruby/shell.rb +++ hbase-shell/src/main/ruby/shell.rb @@ -90,6 +90,10 @@ module Shell @hbase_security_admin ||= hbase.security_admin(formatter) end + def hbase_visibility_labels_admin + @hbase_visibility_labels_admin ||= hbase.visibility_labels_admin(formatter) + end + def export_commands(where) ::Shell.commands.keys.each do |cmd| # here where is the IRB namespace @@ -345,3 +349,11 @@ Shell.load_command_group( ] ) +Shell.load_command_group( + 'visibility labels', + :full_name => 'VISIBILITY LABEL TOOLS', + :comment => "NOTE: Above commands are only applicable if running with the VisibilityController coprocessor", + :commands => %w[ + add_label + ] +) \ No newline at end of file diff --git hbase-shell/src/main/ruby/shell/commands.rb hbase-shell/src/main/ruby/shell/commands.rb index 72f6eb2..75f4797 100644 --- hbase-shell/src/main/ruby/shell/commands.rb +++ hbase-shell/src/main/ruby/shell/commands.rb @@ -62,6 +62,10 @@ module Shell @shell.hbase_security_admin end + def visibility_labels_admin + @shell.hbase_visibility_labels_admin + end + #---------------------------------------------------------------------- def formatter diff --git hbase-shell/src/main/ruby/shell/commands/add_label.rb hbase-shell/src/main/ruby/shell/commands/add_label.rb new file mode 100644 index 0000000..0f39adc --- /dev/null +++ hbase-shell/src/main/ruby/shell/commands/add_label.rb @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class AddLabel < Command + def help + return <<-EOF +Add a visibility label. +Syntax : add_label