diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 5bded37..f301f60 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -3058,4 +3058,16 @@ public final class ProtobufUtil { builder.setSrcChecksum(VersionInfo.getSrcChecksum()); return builder.build(); } + + /** + * A utility to refersh super user groups from shell. + * + * @param protocol the AccessControlService protocol proxy + * @throws ServiceException the service exception + */ + public static void refreshSuperUserGroupConfiguration( + AccessControlService.BlockingInterface protocol) throws ServiceException { + protocol.refreshSuperUserGroupsConf(null, + AccessControlProtos.RefreshSuperUserGroupsConfRequest.getDefaultInstance()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java index 5b41716..5760926 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java @@ -22,10 +22,13 @@ import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; +import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -36,6 +39,7 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; +import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService.BlockingInterface; import org.apache.hadoop.hbase.util.Bytes; @@ -187,4 +191,9 @@ public class AccessControlClient { } return permList; } + + public static void refreshSuperUserGroupConfiguration(Connection connection) throws Throwable { + ProtobufUtil.refreshSuperUserGroupConfiguration( + AccessControlService.newBlockingStub(connection.getAdmin().coprocessorService())); + } } \ No newline at end of file diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java index cd1dda1..804ff3e 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java @@ -9880,6 +9880,682 @@ public final class AccessControlProtos { // @@protoc_insertion_point(class_scope:hbase.pb.CheckPermissionsResponse) } + public interface RefreshSuperUserGroupsConfRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.RefreshSuperUserGroupsConfRequest} + */ + public static final class RefreshSuperUserGroupsConfRequest extends + com.google.protobuf.GeneratedMessage + implements RefreshSuperUserGroupsConfRequestOrBuilder { + // Use RefreshSuperUserGroupsConfRequest.newBuilder() to construct. + private RefreshSuperUserGroupsConfRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RefreshSuperUserGroupsConfRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RefreshSuperUserGroupsConfRequest defaultInstance; + public static RefreshSuperUserGroupsConfRequest getDefaultInstance() { + return defaultInstance; + } + + public RefreshSuperUserGroupsConfRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RefreshSuperUserGroupsConfRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RefreshSuperUserGroupsConfRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RefreshSuperUserGroupsConfRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.RefreshSuperUserGroupsConfRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.RefreshSuperUserGroupsConfRequest) + } + + static { + defaultInstance = new RefreshSuperUserGroupsConfRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.RefreshSuperUserGroupsConfRequest) + } + + public interface RefreshSuperUserGroupsConfResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.RefreshSuperUserGroupsConfResponse} + */ + public static final class RefreshSuperUserGroupsConfResponse extends + com.google.protobuf.GeneratedMessage + implements RefreshSuperUserGroupsConfResponseOrBuilder { + // Use RefreshSuperUserGroupsConfResponse.newBuilder() to construct. + private RefreshSuperUserGroupsConfResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RefreshSuperUserGroupsConfResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RefreshSuperUserGroupsConfResponse defaultInstance; + public static RefreshSuperUserGroupsConfResponse getDefaultInstance() { + return defaultInstance; + } + + public RefreshSuperUserGroupsConfResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RefreshSuperUserGroupsConfResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RefreshSuperUserGroupsConfResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RefreshSuperUserGroupsConfResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse other = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.RefreshSuperUserGroupsConfResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse build() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse result = new org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.RefreshSuperUserGroupsConfResponse) + } + + static { + defaultInstance = new RefreshSuperUserGroupsConfResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.RefreshSuperUserGroupsConfResponse) + } + /** * Protobuf service {@code hbase.pb.AccessControlService} */ @@ -9920,6 +10596,14 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, com.google.protobuf.RpcCallback done); + /** + * rpc RefreshSuperUserGroupsConf(.hbase.pb.RefreshSuperUserGroupsConfRequest) returns (.hbase.pb.RefreshSuperUserGroupsConfResponse); + */ + public abstract void refreshSuperUserGroupsConf( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest request, + com.google.protobuf.RpcCallback done); + } public static com.google.protobuf.Service newReflectiveService( @@ -9957,6 +10641,14 @@ public final class AccessControlProtos { impl.checkPermissions(controller, request, done); } + @java.lang.Override + public void refreshSuperUserGroupsConf( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest request, + com.google.protobuf.RpcCallback done) { + impl.refreshSuperUserGroupsConf(controller, request, done); + } + }; } @@ -9987,6 +10679,8 @@ public final class AccessControlProtos { return impl.getUserPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest)request); case 3: return impl.checkPermissions(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest)request); + case 4: + return impl.refreshSuperUserGroupsConf(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -10009,6 +10703,8 @@ public final class AccessControlProtos { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -10031,6 +10727,8 @@ public final class AccessControlProtos { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -10071,6 +10769,14 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request, com.google.protobuf.RpcCallback done); + /** + * rpc RefreshSuperUserGroupsConf(.hbase.pb.RefreshSuperUserGroupsConfRequest) returns (.hbase.pb.RefreshSuperUserGroupsConfResponse); + */ + public abstract void refreshSuperUserGroupsConf( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest request, + com.google.protobuf.RpcCallback done); + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -10113,6 +10819,11 @@ public final class AccessControlProtos { com.google.protobuf.RpcUtil.specializeCallback( done)); return; + case 4: + this.refreshSuperUserGroupsConf(controller, (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; default: throw new java.lang.AssertionError("Can't get here."); } @@ -10135,6 +10846,8 @@ public final class AccessControlProtos { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -10157,6 +10870,8 @@ public final class AccessControlProtos { return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -10237,6 +10952,21 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance())); } + + public void refreshSuperUserGroupsConf( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.class, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance())); + } } public static BlockingInterface newBlockingStub( @@ -10264,6 +10994,11 @@ public final class AccessControlProtos { com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest request) throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse refreshSuperUserGroupsConf( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest request) + throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { @@ -10320,6 +11055,18 @@ public final class AccessControlProtos { org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsResponse.getDefaultInstance()); } + + public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse refreshSuperUserGroupsConf( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance()); + } + } // @@protoc_insertion_point(class_scope:hbase.pb.AccessControlService) @@ -10400,6 +11147,16 @@ public final class AccessControlProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_CheckPermissionsResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -10444,17 +11201,22 @@ public final class AccessControlProtos { "\001 \003(\0132\030.hbase.pb.UserPermission\"C\n\027Check" + "PermissionsRequest\022(\n\npermission\030\001 \003(\0132\024" + ".hbase.pb.Permission\"\032\n\030CheckPermissions" + - "Response2\311\002\n\024AccessControlService\0228\n\005Gra" + - "nt\022\026.hbase.pb.GrantRequest\032\027.hbase.pb.Gr" + - "antResponse\022;\n\006Revoke\022\027.hbase.pb.RevokeR" + - "equest\032\030.hbase.pb.RevokeResponse\022_\n\022GetU" + - "serPermissions\022#.hbase.pb.GetUserPermiss", - "ionsRequest\032$.hbase.pb.GetUserPermission" + - "sResponse\022Y\n\020CheckPermissions\022!.hbase.pb" + - ".CheckPermissionsRequest\032\".hbase.pb.Chec" + - "kPermissionsResponseBI\n*org.apache.hadoo" + - "p.hbase.protobuf.generatedB\023AccessContro" + - "lProtosH\001\210\001\001\240\001\001" + "Response\"#\n!RefreshSuperUserGroupsConfRe" + + "quest\"$\n\"RefreshSuperUserGroupsConfRespo" + + "nse2\302\003\n\024AccessControlService\0228\n\005Grant\022\026." + + "hbase.pb.GrantRequest\032\027.hbase.pb.GrantRe" + + "sponse\022;\n\006Revoke\022\027.hbase.pb.RevokeReques", + "t\032\030.hbase.pb.RevokeResponse\022_\n\022GetUserPe" + + "rmissions\022#.hbase.pb.GetUserPermissionsR" + + "equest\032$.hbase.pb.GetUserPermissionsResp" + + "onse\022Y\n\020CheckPermissions\022!.hbase.pb.Chec" + + "kPermissionsRequest\032\".hbase.pb.CheckPerm" + + "issionsResponse\022w\n\032RefreshSuperUserGroup" + + "sConf\022+.hbase.pb.RefreshSuperUserGroupsC" + + "onfRequest\032,.hbase.pb.RefreshSuperUserGr" + + "oupsConfResponseBI\n*org.apache.hadoop.hb" + + "ase.protobuf.generatedB\023AccessControlPro", + "tosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -10551,6 +11313,18 @@ public final class AccessControlProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CheckPermissionsResponse_descriptor, new java.lang.String[] { }); + internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_RefreshSuperUserGroupsConfRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_RefreshSuperUserGroupsConfResponse_descriptor, + new java.lang.String[] { }); return null; } }; diff --git a/hbase-protocol/src/main/protobuf/AccessControl.proto b/hbase-protocol/src/main/protobuf/AccessControl.proto index e67540b..d24c21a 100644 --- a/hbase-protocol/src/main/protobuf/AccessControl.proto +++ b/hbase-protocol/src/main/protobuf/AccessControl.proto @@ -108,6 +108,12 @@ message CheckPermissionsRequest { message CheckPermissionsResponse { } +message RefreshSuperUserGroupsConfRequest { +} + +message RefreshSuperUserGroupsConfResponse { +} + service AccessControlService { rpc Grant(GrantRequest) returns (GrantResponse); @@ -120,4 +126,7 @@ service AccessControlService { rpc CheckPermissions(CheckPermissionsRequest) returns (CheckPermissionsResponse); + + rpc RefreshSuperUserGroupsConf(RefreshSuperUserGroupsConfRequest) + returns (RefreshSuperUserGroupsConfResponse); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index dac744f..19cf622 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -27,6 +27,12 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import com.google.protobuf.Message; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; +import com.google.protobuf.ServiceException; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -51,6 +57,7 @@ import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagRewriteCell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; @@ -70,11 +77,13 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionServerObserver; +import org.apache.hadoop.hbase.coprocessor.SingletonCoprocessorService; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -82,6 +91,8 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry; +import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest; +import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; @@ -107,6 +118,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.hadoop.security.authorize.ProxyUsers; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableSet; @@ -115,10 +127,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; /** * Provides basic authorization checks for data access and administrative @@ -156,7 +164,8 @@ import com.google.protobuf.Service; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class AccessController extends BaseMasterAndRegionObserver implements RegionServerObserver, - AccessControlService.Interface, CoprocessorService, EndpointObserver, BulkLoadObserver { + AccessControlService.Interface, CoprocessorService, SingletonCoprocessorService, + EndpointObserver, BulkLoadObserver { private static final Log LOG = LogFactory.getLog(AccessController.class); @@ -174,6 +183,8 @@ public class AccessController extends BaseMasterAndRegionObserver /** defined only for Endpoint implementation, so it can have way to access region services */ private RegionCoprocessorEnvironment regionEnv; + private MasterCoprocessorEnvironment mEnv; + private RegionServerCoprocessorEnvironment rsEnv; /** Mapping of scanner instances to the user who created them */ private Map scannerOwners = @@ -937,11 +948,22 @@ public class AccessController extends BaseMasterAndRegionObserver ZooKeeperWatcher zk = null; if (env instanceof MasterCoprocessorEnvironment) { // if running on HMaster - MasterCoprocessorEnvironment mEnv = (MasterCoprocessorEnvironment) env; + mEnv = (MasterCoprocessorEnvironment) env; zk = mEnv.getMasterServices().getZooKeeper(); + //refresh proxy users on startup, this is for + //when backup master become an active master + //we want to refresh proxy users, in case it + //was change since backup master startup + ProxyUsers.refreshSuperUserGroupsConfiguration(); + LOG.info("Refreshed superuser groups configuration"); } else if (env instanceof RegionServerCoprocessorEnvironment) { - RegionServerCoprocessorEnvironment rsEnv = (RegionServerCoprocessorEnvironment) env; + rsEnv = (RegionServerCoprocessorEnvironment) env; zk = rsEnv.getRegionServerServices().getZooKeeper(); + //refresh proxy users on startup, this is for + //a case where super user file was updated + //and an RS just started up and it hasn't reported for duty yet + ProxyUsers.refreshSuperUserGroupsConfiguration(); + LOG.info("Refreshed superuser groups configuration"); } else if (env instanceof RegionCoprocessorEnvironment) { // if running at region regionEnv = (RegionCoprocessorEnvironment) env; @@ -2554,4 +2576,36 @@ public class AccessController extends BaseMasterAndRegionObserver public void postReplicateLogEntries(ObserverContext ctx, List entries, CellScanner cells) throws IOException { } + + @Override + public void refreshSuperUserGroupsConf(RpcController controller, + RefreshSuperUserGroupsConfRequest request, + RpcCallback done) { + try { + checkSystemOrSuperUser(); + Configuration conf = rsEnv != null ? rsEnv.getConfiguration() : mEnv.getConfiguration(); + LOG.info("Refreshing super user/groups configuration"); + ProxyUsers.refreshSuperUserGroupsConfiguration(conf); + if (mEnv != null) { + ClusterConnection conn = mEnv.getMasterServices().getConnection(); + for (ServerName server : + mEnv.getMasterServices().getServerManager().getOnlineServersList()) { + LOG.debug("Sending refreshSuperUserGroupsConfiguration to " + server.getHostAndPort()); + + if (!server.equals(mEnv.getMasterServices().getServerName())) { + CoprocessorRpcChannel channel = + conn.getAdmin().coprocessorService(server); + AccessControlService.BlockingInterface service = + AccessControlService.newBlockingStub(channel); + service.refreshSuperUserGroupsConf(controller, request); + } + } + } + } catch (ServiceException e) { + ResponseConverter.setControllerException(controller, new IOException(e)); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, new IOException(e)); + } + done.run(AccessControlProtos.RefreshSuperUserGroupsConfResponse.getDefaultInstance()); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index 003e4ab..7f53091 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -109,11 +109,11 @@ public class SecureTestUtil { public static void verifyConfiguration(Configuration conf) { if (!(conf.get(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY).contains( - AccessController.class.getName()) + AccessController.class.getSimpleName()) && conf.get(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY).contains( - AccessController.class.getName()) && conf.get( + AccessController.class.getSimpleName()) && conf.get( CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY).contains( - AccessController.class.getName()))) { + AccessController.class.getSimpleName()))) { throw new RuntimeException("AccessController is missing from a system coprocessor list"); } if (conf.getInt(HFile.FORMAT_VERSION_KEY, 2) < HFile.MIN_FORMAT_VERSION_WITH_TAGS) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index b474dd2..176cc01 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -29,6 +29,7 @@ import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -95,6 +96,8 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest; +import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfRequest; +import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.RefreshSuperUserGroupsConfResponse; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; @@ -193,9 +196,15 @@ public class TestAccessController extends SecureTestUtil { conf = TEST_UTIL.getConfiguration(); // Enable security enableSecurity(conf); + conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, + InstrumentedAccessController.class.getName() + + "," + MasterSyncObserver.class.getName()); + conf.set(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, + InstrumentedAccessController.class.getName()); // In this particular test case, we can't use SecureBulkLoadEndpoint because its doAs will fail // to move a file for a random user - conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName()); + conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, + InstrumentedAccessController.class.getName()); // Verify enableSecurity sets up what we require verifyConfiguration(conf); @@ -2629,4 +2638,23 @@ public class TestAccessController extends SecureTestUtil { verifyDenied(replicateLogEntriesAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER, USER_GROUP_READ, USER_GROUP_ADMIN, USER_GROUP_CREATE); } + + @Test + public void testRefreshSuperUser() throws Throwable { + InstrumentedAccessController.refreshCallCount.set(0); + AccessControlClient.refreshSuperUserGroupConfiguration(systemUserConnection); + assertEquals( + TEST_UTIL.getMiniHBaseCluster().getLiveRegionServerThreads().size() + 1, + InstrumentedAccessController.refreshCallCount.get()); + } + + public static class InstrumentedAccessController extends AccessController { + public static AtomicInteger refreshCallCount = new AtomicInteger(0); + + @Override + public void refreshSuperUserGroupsConf(RpcController controller, RefreshSuperUserGroupsConfRequest request, RpcCallback done) { + super.refreshSuperUserGroupsConf(controller, request, done); + refreshCallCount.incrementAndGet(); + } + } } diff --git a/hbase-shell/src/main/ruby/hbase/security.rb b/hbase-shell/src/main/ruby/hbase/security.rb index 2aaef02..d33ad5e 100644 --- a/hbase-shell/src/main/ruby/hbase/security.rb +++ b/hbase-shell/src/main/ruby/hbase/security.rb @@ -182,6 +182,11 @@ module Hbase end end + def refreshSuperUserGroups + org.apache.hadoop.hbase.security.access.AccessControlClient. + refreshSuperUserGroupConfiguration(@connection) + end + # Make sure that security tables are available def security_available?() raise(ArgumentError, "DISABLED: Security features are not available") \ diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index ab5f44d..56864d7 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -399,6 +399,7 @@ Shell.load_command_group( grant revoke user_permission + refresh_super_user_groups ] ) diff --git a/hbase-shell/src/main/ruby/shell/commands/refresh_super_user_groups.rb b/hbase-shell/src/main/ruby/shell/commands/refresh_super_user_groups.rb new file mode 100644 index 0000000..71fb6bb --- /dev/null +++ b/hbase-shell/src/main/ruby/shell/commands/refresh_super_user_groups.rb @@ -0,0 +1,41 @@ +# +# Copyright 2010 The Apache Software Foundation +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +module Shell + module Commands + class RefreshSuperUserGroups < Command + def help + return <<-EOF + Refreshes the super user/group configuration. +EOF + end + + def command() + + format_simple_command do + security_admin.refreshSuperUserGroups + formatter.row([ + "Done." + ]) + end + end + end + end +end