diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml index 56ab13a..60ec46f 100644 --- a/hbase-protocol/pom.xml +++ b/hbase-protocol/pom.xml @@ -180,6 +180,7 @@ Comparator.proto Encryption.proto ErrorHandling.proto + Export.proto FS.proto Filter.proto HBase.proto diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java new file mode 100644 index 0000000..4a503fc --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java @@ -0,0 +1,1990 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Export.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ExportProtos { + private ExportProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ExportRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .hbase.pb.Scan scan = 1; + /** + * required .hbase.pb.Scan scan = 1; + */ + boolean hasScan(); + /** + * required .hbase.pb.Scan scan = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + /** + * required .hbase.pb.Scan scan = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); + + // required string outputPath = 2; + /** + * required string outputPath = 2; + */ + boolean hasOutputPath(); + /** + * required string outputPath = 2; + */ + java.lang.String getOutputPath(); + /** + * required string outputPath = 2; + */ + com.google.protobuf.ByteString + getOutputPathBytes(); + + // optional bool compressed = 3 [default = false]; + /** + * optional bool compressed = 3 [default = false]; + */ + boolean hasCompressed(); + /** + * optional bool compressed = 3 [default = false]; + */ + boolean getCompressed(); + + // optional string compressType = 4; + /** + * optional string compressType = 4; + */ + boolean hasCompressType(); + /** + * optional string compressType = 4; + */ + java.lang.String getCompressType(); + /** + * optional string compressType = 4; + */ + com.google.protobuf.ByteString + getCompressTypeBytes(); + + // optional string compressCodec = 5; + /** + * optional string compressCodec = 5; + */ + boolean hasCompressCodec(); + /** + * optional string compressCodec = 5; + */ + java.lang.String getCompressCodec(); + /** + * optional string compressCodec = 5; + */ + com.google.protobuf.ByteString + getCompressCodecBytes(); + } + /** + * Protobuf type {@code hbase.pb.ExportRequest} + */ + public static final class ExportRequest extends + com.google.protobuf.GeneratedMessage + implements ExportRequestOrBuilder { + // Use ExportRequest.newBuilder() to construct. + private ExportRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ExportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ExportRequest defaultInstance; + public static ExportRequest getDefaultInstance() { + return defaultInstance; + } + + public ExportRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExportRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = scan_.toBuilder(); + } + scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(scan_); + scan_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + outputPath_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + compressed_ = input.readBool(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + compressType_ = input.readBytes(); + break; + } + case 42: { + bitField0_ |= 0x00000010; + compressCodec_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ExportRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExportRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .hbase.pb.Scan scan = 1; + public static final int SCAN_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + /** + * required .hbase.pb.Scan scan = 1; + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // required string outputPath = 2; + public static final int OUTPUTPATH_FIELD_NUMBER = 2; + private java.lang.Object outputPath_; + /** + * required string outputPath = 2; + */ + public boolean hasOutputPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string outputPath = 2; + */ + public java.lang.String getOutputPath() { + java.lang.Object ref = outputPath_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + outputPath_ = s; + } + return s; + } + } + /** + * required string outputPath = 2; + */ + public com.google.protobuf.ByteString + getOutputPathBytes() { + java.lang.Object ref = outputPath_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputPath_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bool compressed = 3 [default = false]; + public static final int COMPRESSED_FIELD_NUMBER = 3; + private boolean compressed_; + /** + * optional bool compressed = 3 [default = false]; + */ + public boolean hasCompressed() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bool compressed = 3 [default = false]; + */ + public boolean getCompressed() { + return compressed_; + } + + // optional string compressType = 4; + public static final int COMPRESSTYPE_FIELD_NUMBER = 4; + private java.lang.Object compressType_; + /** + * optional string compressType = 4; + */ + public boolean hasCompressType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string compressType = 4; + */ + public java.lang.String getCompressType() { + java.lang.Object ref = compressType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + compressType_ = s; + } + return s; + } + } + /** + * optional string compressType = 4; + */ + public com.google.protobuf.ByteString + getCompressTypeBytes() { + java.lang.Object ref = compressType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string compressCodec = 5; + public static final int COMPRESSCODEC_FIELD_NUMBER = 5; + private java.lang.Object compressCodec_; + /** + * optional string compressCodec = 5; + */ + public boolean hasCompressCodec() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional string compressCodec = 5; + */ + public java.lang.String getCompressCodec() { + java.lang.Object ref = compressCodec_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + compressCodec_ = s; + } + return s; + } + } + /** + * optional string compressCodec = 5; + */ + public com.google.protobuf.ByteString + getCompressCodecBytes() { + java.lang.Object ref = compressCodec_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressCodec_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + outputPath_ = ""; + compressed_ = false; + compressType_ = ""; + compressCodec_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasScan()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasOutputPath()) { + memoizedIsInitialized = 0; + return false; + } + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, scan_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getOutputPathBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, compressed_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getCompressTypeBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBytes(5, getCompressCodecBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, scan_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getOutputPathBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, compressed_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getCompressTypeBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, getCompressCodecBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) obj; + + boolean result = true; + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasOutputPath() == other.hasOutputPath()); + if (hasOutputPath()) { + result = result && getOutputPath() + .equals(other.getOutputPath()); + } + result = result && (hasCompressed() == other.hasCompressed()); + if (hasCompressed()) { + result = result && (getCompressed() + == other.getCompressed()); + } + result = result && (hasCompressType() == other.hasCompressType()); + if (hasCompressType()) { + result = result && getCompressType() + .equals(other.getCompressType()); + } + result = result && (hasCompressCodec() == other.hasCompressCodec()); + if (hasCompressCodec()) { + result = result && getCompressCodec() + .equals(other.getCompressCodec()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasOutputPath()) { + hash = (37 * hash) + OUTPUTPATH_FIELD_NUMBER; + hash = (53 * hash) + getOutputPath().hashCode(); + } + if (hasCompressed()) { + hash = (37 * hash) + COMPRESSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCompressed()); + } + if (hasCompressType()) { + hash = (37 * hash) + COMPRESSTYPE_FIELD_NUMBER; + hash = (53 * hash) + getCompressType().hashCode(); + } + if (hasCompressCodec()) { + hash = (37 * hash) + COMPRESSCODEC_FIELD_NUMBER; + hash = (53 * hash) + getCompressCodec().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ExportRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + outputPath_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + compressed_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + compressType_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + compressCodec_ = ""; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.outputPath_ = outputPath_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.compressed_ = compressed_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.compressType_ = compressType_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.compressCodec_ = compressCodec_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasOutputPath()) { + bitField0_ |= 0x00000002; + outputPath_ = other.outputPath_; + onChanged(); + } + if (other.hasCompressed()) { + setCompressed(other.getCompressed()); + } + if (other.hasCompressType()) { + bitField0_ |= 0x00000008; + compressType_ = other.compressType_; + onChanged(); + } + if (other.hasCompressCodec()) { + bitField0_ |= 0x00000010; + compressCodec_ = other.compressCodec_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasScan()) { + return false; + } + if (!hasOutputPath()) { + return false; + } + if (!getScan().isInitialized()) { + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .hbase.pb.Scan scan = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + /** + * required .hbase.pb.Scan scan = 1; + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + /** + * required .hbase.pb.Scan scan = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // required string outputPath = 2; + private java.lang.Object outputPath_ = ""; + /** + * required string outputPath = 2; + */ + public boolean hasOutputPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string outputPath = 2; + */ + public java.lang.String getOutputPath() { + java.lang.Object ref = outputPath_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + outputPath_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string outputPath = 2; + */ + public com.google.protobuf.ByteString + getOutputPathBytes() { + java.lang.Object ref = outputPath_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputPath_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string outputPath = 2; + */ + public Builder setOutputPath( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + outputPath_ = value; + onChanged(); + return this; + } + /** + * required string outputPath = 2; + */ + public Builder clearOutputPath() { + bitField0_ = (bitField0_ & ~0x00000002); + outputPath_ = getDefaultInstance().getOutputPath(); + onChanged(); + return this; + } + /** + * required string outputPath = 2; + */ + public Builder setOutputPathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + outputPath_ = value; + onChanged(); + return this; + } + + // optional bool compressed = 3 [default = false]; + private boolean compressed_ ; + /** + * optional bool compressed = 3 [default = false]; + */ + public boolean hasCompressed() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bool compressed = 3 [default = false]; + */ + public boolean getCompressed() { + return compressed_; + } + /** + * optional bool compressed = 3 [default = false]; + */ + public Builder setCompressed(boolean value) { + bitField0_ |= 0x00000004; + compressed_ = value; + onChanged(); + return this; + } + /** + * optional bool compressed = 3 [default = false]; + */ + public Builder clearCompressed() { + bitField0_ = (bitField0_ & ~0x00000004); + compressed_ = false; + onChanged(); + return this; + } + + // optional string compressType = 4; + private java.lang.Object compressType_ = ""; + /** + * optional string compressType = 4; + */ + public boolean hasCompressType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string compressType = 4; + */ + public java.lang.String getCompressType() { + java.lang.Object ref = compressType_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + compressType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string compressType = 4; + */ + public com.google.protobuf.ByteString + getCompressTypeBytes() { + java.lang.Object ref = compressType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string compressType = 4; + */ + public Builder setCompressType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + compressType_ = value; + onChanged(); + return this; + } + /** + * optional string compressType = 4; + */ + public Builder clearCompressType() { + bitField0_ = (bitField0_ & ~0x00000008); + compressType_ = getDefaultInstance().getCompressType(); + onChanged(); + return this; + } + /** + * optional string compressType = 4; + */ + public Builder setCompressTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + compressType_ = value; + onChanged(); + return this; + } + + // optional string compressCodec = 5; + private java.lang.Object compressCodec_ = ""; + /** + * optional string compressCodec = 5; + */ + public boolean hasCompressCodec() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional string compressCodec = 5; + */ + public java.lang.String getCompressCodec() { + java.lang.Object ref = compressCodec_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + compressCodec_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string compressCodec = 5; + */ + public com.google.protobuf.ByteString + getCompressCodecBytes() { + java.lang.Object ref = compressCodec_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressCodec_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string compressCodec = 5; + */ + public Builder setCompressCodec( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + compressCodec_ = value; + onChanged(); + return this; + } + /** + * optional string compressCodec = 5; + */ + public Builder clearCompressCodec() { + bitField0_ = (bitField0_ & ~0x00000010); + compressCodec_ = getDefaultInstance().getCompressCodec(); + onChanged(); + return this; + } + /** + * optional string compressCodec = 5; + */ + public Builder setCompressCodecBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + compressCodec_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ExportRequest) + } + + static { + defaultInstance = new ExportRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ExportRequest) + } + + public interface ExportResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 rowCount = 1; + /** + * required uint64 rowCount = 1; + */ + boolean hasRowCount(); + /** + * required uint64 rowCount = 1; + */ + long getRowCount(); + + // required uint64 cellCount = 2; + /** + * required uint64 cellCount = 2; + */ + boolean hasCellCount(); + /** + * required uint64 cellCount = 2; + */ + long getCellCount(); + } + /** + * Protobuf type {@code hbase.pb.ExportResponse} + */ + public static final class ExportResponse extends + com.google.protobuf.GeneratedMessage + implements ExportResponseOrBuilder { + // Use ExportResponse.newBuilder() to construct. + private ExportResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ExportResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ExportResponse defaultInstance; + public static ExportResponse getDefaultInstance() { + return defaultInstance; + } + + public ExportResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExportResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + rowCount_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + cellCount_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ExportResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExportResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required uint64 rowCount = 1; + public static final int ROWCOUNT_FIELD_NUMBER = 1; + private long rowCount_; + /** + * required uint64 rowCount = 1; + */ + public boolean hasRowCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required uint64 rowCount = 1; + */ + public long getRowCount() { + return rowCount_; + } + + // required uint64 cellCount = 2; + public static final int CELLCOUNT_FIELD_NUMBER = 2; + private long cellCount_; + /** + * required uint64 cellCount = 2; + */ + public boolean hasCellCount() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required uint64 cellCount = 2; + */ + public long getCellCount() { + return cellCount_; + } + + private void initFields() { + rowCount_ = 0L; + cellCount_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRowCount()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCellCount()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, rowCount_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, cellCount_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, rowCount_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, cellCount_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse other = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) obj; + + boolean result = true; + result = result && (hasRowCount() == other.hasRowCount()); + if (hasRowCount()) { + result = result && (getRowCount() + == other.getRowCount()); + } + result = result && (hasCellCount() == other.hasCellCount()); + if (hasCellCount()) { + result = result && (getCellCount() + == other.getCellCount()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRowCount()) { + hash = (37 * hash) + ROWCOUNT_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getRowCount()); + } + if (hasCellCount()) { + hash = (37 * hash) + CELLCOUNT_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getCellCount()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ExportResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + rowCount_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + cellCount_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.rowCount_ = rowCount_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.cellCount_ = cellCount_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance()) return this; + if (other.hasRowCount()) { + setRowCount(other.getRowCount()); + } + if (other.hasCellCount()) { + setCellCount(other.getCellCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRowCount()) { + return false; + } + if (!hasCellCount()) { + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required uint64 rowCount = 1; + private long rowCount_ ; + /** + * required uint64 rowCount = 1; + */ + public boolean hasRowCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required uint64 rowCount = 1; + */ + public long getRowCount() { + return rowCount_; + } + /** + * required uint64 rowCount = 1; + */ + public Builder setRowCount(long value) { + bitField0_ |= 0x00000001; + rowCount_ = value; + onChanged(); + return this; + } + /** + * required uint64 rowCount = 1; + */ + public Builder clearRowCount() { + bitField0_ = (bitField0_ & ~0x00000001); + rowCount_ = 0L; + onChanged(); + return this; + } + + // required uint64 cellCount = 2; + private long cellCount_ ; + /** + * required uint64 cellCount = 2; + */ + public boolean hasCellCount() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required uint64 cellCount = 2; + */ + public long getCellCount() { + return cellCount_; + } + /** + * required uint64 cellCount = 2; + */ + public Builder setCellCount(long value) { + bitField0_ |= 0x00000002; + cellCount_ = value; + onChanged(); + return this; + } + /** + * required uint64 cellCount = 2; + */ + public Builder clearCellCount() { + bitField0_ = (bitField0_ & ~0x00000002); + cellCount_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ExportResponse) + } + + static { + defaultInstance = new ExportResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ExportResponse) + } + + /** + * Protobuf service {@code hbase.pb.ExportService} + */ + public static abstract class ExportService + implements com.google.protobuf.Service { + protected ExportService() {} + + public interface Interface { + /** + * rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse); + */ + public abstract void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new ExportService() { + @java.lang.Override + public void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback done) { + impl.export(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.export(controller, (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse); + */ + public abstract void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.export(controller, (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ExportService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ExportRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_ExportRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ExportResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_ExportResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014Export.proto\022\010hbase.pb\032\014Client.proto\"\211" + + "\001\n\rExportRequest\022\034\n\004scan\030\001 \002(\0132\016.hbase.p" + + "b.Scan\022\022\n\noutputPath\030\002 \002(\t\022\031\n\ncompressed" + + "\030\003 \001(\010:\005false\022\024\n\014compressType\030\004 \001(\t\022\025\n\rc" + + "ompressCodec\030\005 \001(\t\"5\n\016ExportResponse\022\020\n\010" + + "rowCount\030\001 \002(\004\022\021\n\tcellCount\030\002 \002(\0042L\n\rExp" + + "ortService\022;\n\006export\022\027.hbase.pb.ExportRe" + + "quest\032\030.hbase.pb.ExportResponseBB\n*org.a" + + "pache.hadoop.hbase.protobuf.generatedB\014E" + + "xportProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_hbase_pb_ExportRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_ExportRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_ExportRequest_descriptor, + new java.lang.String[] { "Scan", "OutputPath", "Compressed", "CompressType", "CompressCodec", }); + internal_static_hbase_pb_ExportResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_ExportResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_ExportResponse_descriptor, + new java.lang.String[] { "RowCount", "CellCount", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-protocol/src/main/protobuf/Export.proto b/hbase-protocol/src/main/protobuf/Export.proto new file mode 100644 index 0000000..0551851 --- /dev/null +++ b/hbase-protocol/src/main/protobuf/Export.proto @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package hbase.pb; + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ExportProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; +option java_generic_services = true; + +import "Client.proto"; + +service ExportService { + rpc export (ExportRequest) returns (ExportResponse); +} + +message ExportRequest { + required Scan scan = 1; + required string outputPath = 2; + optional bool compressed = 3 [default = false]; + optional string compressType = 4; + optional string compressCodec = 5; +} +message ExportResponse { + required uint64 rowCount = 1; + required uint64 cellCount = 2; +} + diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ExportEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ExportEndpoint.java new file mode 100644 index 0000000..0467e58 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ExportEndpoint.java @@ -0,0 +1,365 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hbase.coprocessor; + +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; +import com.google.protobuf.ServiceException; +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.mapreduce.ResultSerialization; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ExportProtos; +import org.apache.hadoop.hbase.regionserver.Region; +import org.apache.hadoop.hbase.regionserver.RegionScanner; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.compress.CompressionCodec; +import org.apache.hadoop.io.compress.DefaultCodec; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.util.GenericOptionsParser; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.hbase.mapreduce.Export; +import org.apache.hadoop.hbase.mapreduce.Import; +import org.apache.hadoop.hbase.regionserver.InternalScanner; + +/** +* Export an HBase table. +* Writes content to sequence files up in HDFS. Use {@link Import} to read it +* back in again. +* It is implemented by the endpoint technique. +* @see Export +*/ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class ExportEndpoint extends ExportProtos.ExportService + implements Coprocessor, CoprocessorService { + private static final Log LOG = LogFactory.getLog(ExportEndpoint.class); + private RegionCoprocessorEnvironment env = null; + @Override + public void start(CoprocessorEnvironment environment) throws IOException { + if (environment instanceof RegionCoprocessorEnvironment) { + this.env = (RegionCoprocessorEnvironment) environment; + } else { + throw new CoprocessorException("Must be loaded on a table region!"); + } + } + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + } + + @Override + public Service getService() { + return this; + } + private static boolean getCompression(final ExportProtos.ExportRequest request) { + if (request.hasCompressed()) { + return request.getCompressed(); + } else { + return false; + } + } + private static SequenceFile.CompressionType getCompressionType(final ExportProtos.ExportRequest request) { + if (!request.hasCompressType()) { + return null; + } + return SequenceFile.CompressionType.valueOf(request.getCompressType()); + } + private static CompressionCodec getCompressionCodec(final Configuration conf, final ExportProtos.ExportRequest request) { + if (!request.hasCompressCodec()) { + return null; + } + try { + Class codecClass = conf.getClassByName(request.getCompressCodec()).asSubclass(CompressionCodec.class); + return ReflectionUtils.newInstance(codecClass, conf); + } catch (ClassNotFoundException e) { + throw new IllegalArgumentException("Compression codec " + + request.getCompressCodec()+ " was not found.", e); + } + } + private static SequenceFile.Writer.Option getOutputPath(final Configuration conf, + final HRegionInfo info, final ExportProtos.ExportRequest request) throws IOException { + Path file = new Path(request.getOutputPath(), "export-" + info.getEncodedName()); + FileSystem fs = FileSystem.get(conf); + if (fs.exists(file)) { + throw new IOException(file + " exists"); + } + return SequenceFile.Writer.file(file); + } + private static List getWriterOptions(final Configuration conf, + final HRegionInfo info, final ExportProtos.ExportRequest request) throws IOException { + List rval = new LinkedList<>(); + rval.add(SequenceFile.Writer.keyClass(ImmutableBytesWritable.class)); + rval.add(SequenceFile.Writer.valueClass(Result.class)); + rval.add(getOutputPath(conf, info, request)); + boolean compressed = getCompression(request); + if (compressed) { + SequenceFile.CompressionType type = getCompressionType(request); + if (type != null) { + CompressionCodec codec = getCompressionCodec(conf, request); + rval.add(SequenceFile.Writer.compression(type, codec)); + } + } + return rval; + } + private Scan validateKey(final HRegionInfo region, final ExportProtos.ExportRequest request) throws IOException { + Scan scan = ProtobufUtil.toScan(request.getScan()); + byte[] regionStartKey = region.getStartKey(); + byte[] originStartKey = scan.getStartRow(); + if (originStartKey == null + || Bytes.compareTo(originStartKey, regionStartKey) < 0) { + scan.setStartRow(regionStartKey); + } + byte[] regionEndKey = region.getEndKey(); + byte[] originEndKey = scan.getStopRow(); + if (originEndKey == null + || Bytes.compareTo(originEndKey, regionEndKey) > 0) { + scan.setStartRow(regionEndKey); + } + return scan; + } + @Override + public void export(RpcController controller, ExportProtos.ExportRequest request, + RpcCallback done) { + Region region = env.getRegion(); + Configuration conf = HBaseConfiguration.create(env.getConfiguration()); + conf.setStrings("io.serializations", conf.get("io.serializations"), ResultSerialization.class.getName()); + try { + Scan scan = validateKey(region.getRegionInfo(), request); + ExportProtos.ExportResponse response = processData(conf, region, scan, + getWriterOptions(conf, region.getRegionInfo(), request)); + done.run(response); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + LOG.error(e); + } + } + private static ExportProtos.ExportResponse processData(final Configuration conf, + final Region region, final Scan scan, final List opts) throws IOException { + ScanCoprocessor cp = new ScanCoprocessor(region); + RegionScanner scanner = null; + try (RegionOp regionOp = new RegionOp(region); + SequenceFile.Writer out = SequenceFile.createWriter(conf, + opts.toArray(new SequenceFile.Writer.Option[opts.size()]))) { + scanner = cp.checkScannerOpen(scan); + ImmutableBytesWritable key = new ImmutableBytesWritable(); + long rowCount = 0; + long cellCount = 0; + List results = new ArrayList<>(); + List cells = new ArrayList<>(); + boolean hasMore; + do { + boolean bypass = cp.preScannerNext(scanner, results, scan.getBatch()); + if (bypass) { + hasMore = false; + } else { + hasMore = scanner.nextRaw(cells); + if (cells.isEmpty()) { + continue; + } + Cell firstCell = cells.get(0); + for (Cell cell : cells) { + if (Bytes.compareTo(firstCell.getRowArray(), firstCell.getRowOffset(), firstCell.getRowLength(), + cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()) != 0) { + throw new IOException("Why the RegionScanner#nextRaw returns the data of different rows??"); + } + } + results.add(Result.create(cells)); + cells.clear(); + cp.postScannerNext(scanner, results, scan.getBatch(), hasMore); + } + for (Result r : results) { + key.set(r.getRow()); + out.append(key, r); + ++rowCount; + cellCount += r.size(); + } + results.clear(); + } while (hasMore); + return ExportProtos.ExportResponse.newBuilder() + .setRowCount(rowCount) + .setCellCount(cellCount) + .build(); + } finally { + cp.checkScannerClose(scanner); + } + } + public static void main(String[] args) throws IOException, Throwable { + run(HBaseConfiguration.create(), args); + } + public static Map run(final Configuration conf, + final String[] args) throws ServiceException, IOException, Throwable { + String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); + if (!Export.checkArguments(otherArgs)) { + Export.usage("Wrong number of arguments: " + otherArgs.length); + System.exit(-1); + } + TableName tableName = TableName.valueOf(otherArgs[0]); + FileSystem fs = FileSystem.get(conf); + String dir = otherArgs[1]; + checkDir(fs, dir); + Scan scan = Export.getConfiguredScanForJob(conf, otherArgs); + final ExportProtos.ExportRequest request = getConfiguredRequestForJob(conf, otherArgs, scan); + try (Connection con = ConnectionFactory.createConnection(conf); + Table table = con.getTable(tableName)) { + return table.coprocessorService(ExportProtos.ExportService.class, + scan.getStartRow(), + scan.getStopRow(), new Batch.Call() { + @Override + public ExportProtos.ExportResponse call(ExportProtos.ExportService service) throws IOException { + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = new BlockingRpcCallback<>(); + service.export(controller, request, rpcCallback); + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + return rpcCallback.get(); + } + }); + } catch (Throwable e) { + fs.delete(new Path(dir), true); + throw e; + } + } + private static void checkDir(final FileSystem fs, final String path) throws IOException { + Path dir = fs.makeQualified(new Path(path)); + if (fs.exists(dir)) { + throw new RuntimeException("The " + path + " exists"); + } + fs.mkdirs(dir); + fs.setPermission(dir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); + } + private static ExportProtos.ExportRequest getConfiguredRequestForJob(Configuration conf, + String[] args, final Scan scan) throws IOException { + String dir = args[1]; + boolean compressed = conf.getBoolean(FileOutputFormat.COMPRESS, true); + String compressionType = conf.get(FileOutputFormat.COMPRESS_TYPE, + SequenceFile.CompressionType.RECORD.toString()); + String compressionCodec = conf.get(FileOutputFormat.COMPRESS_CODEC, + DefaultCodec.class.getName()); + LOG.info("compressed=" + compressed + + ", compression type=" + compressionType + + ", compression codec=" + compressionCodec); + return ExportProtos.ExportRequest.newBuilder() + .setScan(ProtobufUtil.toScan(scan)) + .setOutputPath(dir) + .setCompressed(compressed) + .setCompressCodec(compressionCodec) + .setCompressType(compressionType) + .build(); + } + private static class RegionOp implements Closeable { + private final Region region; + RegionOp(final Region region) throws IOException { + this.region = region; + region.startRegionOperation(); + } + @Override + public void close() throws IOException { + region.closeRegionOperation(); + } + } + private static class ScanCoprocessor { + private final Region region; + ScanCoprocessor(final Region region) { + this.region = region; + } + RegionScanner checkScannerOpen(final Scan scan) throws IOException { + RegionScanner scanner; + if (region.getCoprocessorHost() == null) { + scanner = region.getScanner(scan); + } else { + scanner = region.getCoprocessorHost().preScannerOpen(scan); + if (scanner == null) { + scanner = region.getScanner(scan); + } + scanner = region.getCoprocessorHost().postScannerOpen(scan, scanner); + } + if (scanner == null) { + throw new IOException ("Failed to open region scanner"); + } + return scanner; + } + void checkScannerClose(final InternalScanner s) throws IOException { + if (s == null) { + return; + } + if (region.getCoprocessorHost() == null) { + s.close(); + return; + } + if (region.getCoprocessorHost().preScannerClose(s)) { + return; + } + try { + s.close(); + } finally { + region.getCoprocessorHost().postScannerClose(s); + } + } + boolean preScannerNext(final InternalScanner s, + final List results, final int limit) throws IOException { + if (region.getCoprocessorHost() == null) { + return false; + } else { + Boolean bypass = region.getCoprocessorHost().preScannerNext(s, results, limit); + return bypass == null ? false : bypass; + } + } + boolean postScannerNext(final InternalScanner s, + final List results, final int limit, boolean hasMore) + throws IOException { + if (region.getCoprocessorHost() == null) { + return false; + } else { + return region.getCoprocessorHost().postScannerNext(s, results, limit, hasMore); + } + } + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java index 56d229a..dc52aeb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java @@ -19,6 +19,8 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; +import java.util.Arrays; +import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -37,6 +39,7 @@ import org.apache.hadoop.hbase.filter.RegexStringComparator; import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; @@ -56,7 +59,7 @@ final static String NAME = "export"; final static String RAW_SCAN = "hbase.mapreduce.include.deleted.rows"; final static String EXPORT_BATCHING = "hbase.export.scanner.batch"; - + final static String EXPORT_VISIBILITY_LABELS = "hbase.export.visibility.labels"; private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name"; /** @@ -86,7 +89,7 @@ return job; } - private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException { + public static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException { Scan s = new Scan(); // Optional arguments. // Set Scan Versions @@ -110,7 +113,6 @@ if (raw) { s.setRaw(raw); } - if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) { s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY))); } @@ -119,6 +121,13 @@ if (exportFilter!= null) { LOG.info("Setting Scan Filter for Export."); s.setFilter(exportFilter); + } + List labels = null; + if (conf.get(EXPORT_VISIBILITY_LABELS) != null) { + labels = Arrays.asList(conf.getStrings(EXPORT_VISIBILITY_LABELS)); + if (!labels.isEmpty()) { + s.setAuthorizations(new Authorizations(labels)); + } } int batching = conf.getInt(EXPORT_BATCHING, -1); @@ -129,13 +138,14 @@ LOG.error("Batching could not be set", e); } } - LOG.info("versions=" + versions + ", starttime=" + startTime + - ", endtime=" + endTime + ", keepDeletedCells=" + raw); + LOG.info("versions=" + versions + ", starttime=" + startTime + + ", endtime=" + endTime + ", keepDeletedCells=" + raw + + ", visibility labels=" + labels); return s; } private static Filter getExportFilter(String[] args) { - Filter exportFilter = null; + Filter exportFilter; String filterCriteria = (args.length > 5) ? args[5]: null; if (filterCriteria == null) return null; if (filterCriteria.startsWith("^")) { @@ -147,10 +157,12 @@ return exportFilter; } - /* + /** + * Common usage for other export tools. + * @see org.apache.hadoop.hbase.coprocessor.ExportEndpoint * @param errorMsg Error message. Can be null. */ - private static void usage(final String errorMsg) { + public static void usage(final String errorMsg) { if (errorMsg != null && errorMsg.length() > 0) { System.err.println("ERROR: " + errorMsg); } @@ -167,21 +179,30 @@ System.err.println(" -D " + RAW_SCAN + "=true"); System.err.println(" -D " + TableInputFormat.SCAN_ROW_START + "="); System.err.println(" -D " + TableInputFormat.SCAN_ROW_STOP + "="); + System.err.println(" -D hbase.client.scanner.caching=100"); + System.err.println(" -D " + EXPORT_VISIBILITY_LABELS + "="); + } + /** + * Extra options for MR-based export. + * @param errorMsg + */ + private static void usageForMr(final String errorMsg) { + usage(errorMsg); System.err.println(" -D " + JOB_NAME_CONF_KEY + "=jobName - use the specified mapreduce job name for the export"); - System.err.println("For performance consider the following properties:\n" - + " -Dhbase.client.scanner.caching=100\n" - + " -Dmapreduce.map.speculative=false\n" - + " -Dmapreduce.reduce.speculative=false"); + System.err.println("For MR performance consider the following properties:"); + System.err.println(" -D mapreduce.map.speculative=false"); + System.err.println(" -D mapreduce.reduce.speculative=false"); System.err.println("For tables with very wide rows consider setting the batch size as below:\n" - + " -D" + EXPORT_BATCHING + "=10"); + + " -D " + EXPORT_BATCHING + "=10"); } - - + public static boolean checkArguments(final String[] args) { + return args.length >= 2; + } @Override public int run(String[] args) throws Exception { - if (args.length < 2) { - usage("Wrong number of arguments: " + args.length); + if (!checkArguments(args)) { + usageForMr("Wrong number of arguments: " + args.length); return -1; } Job job = createSubmittableJob(getConf(), args); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 50146fd..28ac397 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -17,13 +17,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Matchers.any; +import com.google.common.collect.ListMultimap; +import com.google.protobuf.ServiceException; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.ByteArrayOutputStream; @@ -31,16 +27,20 @@ import java.io.IOException; import java.io.PrintStream; import java.net.URL; +import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; +import java.util.LinkedList; import java.util.List; -import java.util.NavigableMap; - +import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -51,6 +51,8 @@ import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; @@ -59,20 +61,41 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.coprocessor.ExportEndpoint; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.Import.KeyValueImporter; +import org.apache.hadoop.hbase.protobuf.generated.ExportProtos; +import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlConstants; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.Permission; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; +import org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction; +import org.apache.hadoop.hbase.security.access.TablePermission; +import org.apache.hadoop.hbase.security.visibility.Authorizations; +import org.apache.hadoop.hbase.security.visibility.CellVisibility; +import org.apache.hadoop.hbase.security.visibility.VisibilityClient; +import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; +import org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LauncherSecurityManager; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.compress.BZip2Codec; import org.apache.hadoop.mapreduce.Mapper.Context; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.ToolRunner; import org.junit.After; import org.junit.AfterClass; @@ -85,6 +108,12 @@ import org.junit.rules.TestName; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; /** * Tests the table import and table export MR job functionality @@ -93,6 +122,7 @@ public class TestImportExport { private static final Log LOG = LogFactory.getLog(TestImportExport.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); + private static final HBaseTestingUtility UTIL_SECURITY = new HBaseTestingUtility(); private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1"); private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2"); private static final byte[] ROW3 = Bytes.toBytesBinary("\\x32row3"); @@ -104,21 +134,90 @@ private static final String OUTPUT_DIR = "outputdir"; private static String FQ_OUTPUT_DIR; private static final String EXPORT_BATCH_SIZE = "100"; - + private static final String PRIVATE = "private"; + private static final String CONFIDENTIAL = "confidential"; + private static final String SECRET = "secret"; + private static final String TOPSECRET = "topsecret"; + // user granted with all global permission + private static User USER_ADMIN; + // user is table owner. will have all permissions on table + private static User USER_OWNER; + // user with rx permissions. + private static User USER_RX; + // user with exe-only permissions. + private static User USER_XO; + // user with read-only permissions. + private static User USER_RO; + // user with no permissions + private static User USER_NONE; private static long now = System.currentTimeMillis(); + private static final Exporter EXPORTER_MR = new Exporter() { + @Override + public boolean runExport(final Configuration conf, String[] args) throws ServiceException, IOException, Throwable { + // need to make a copy of the configuration because to make sure different temp dirs are used. + return ToolRunner.run(new Configuration(conf), new Export(), args) == 0; + } + @Override + public String toString() { + return "MR-based export"; + } + }; + private static final Exporter EXPORTER_ENDPOINT = new Exporter() { + @Override + public boolean runExport(final Configuration conf, String[] args) throws ServiceException, IOException, Throwable { + ExportEndpoint.run(new Configuration(conf), args); + return true; + } + @Override + public String toString() { + return "Endpoint-based export"; + } + }; + private static final List EXPORTERS = Arrays.asList(EXPORTER_MR, EXPORTER_ENDPOINT); + @BeforeClass public static void beforeClass() throws Exception { // Up the handlers; this test needs more than usual. + UTIL.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, + ExportEndpoint.class.getName()); UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10); UTIL.startMiniCluster(); + FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FQ_OUTPUT_DIR = - new Path(OUTPUT_DIR).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString(); + new Path(OUTPUT_DIR).makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString(); + // config security and visibility + SecureTestUtil.enableSecurity(UTIL_SECURITY.getConfiguration()); + UTIL_SECURITY.getConfiguration().setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, true); + VisibilityTestUtil.enableVisiblityLabels(UTIL_SECURITY.getConfiguration()); + UTIL_SECURITY.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, UTIL_SECURITY.getConfiguration().get( + CoprocessorHost.REGION_COPROCESSOR_CONF_KEY) + "," + ExportEndpoint.class.getName()); + SecureTestUtil.verifyConfiguration(UTIL_SECURITY.getConfiguration()); + UTIL_SECURITY.startMiniCluster(); + UTIL_SECURITY.waitUntilAllRegionsAssigned(AccessControlLists.ACL_TABLE_NAME); + UTIL_SECURITY.waitUntilAllRegionsAssigned(VisibilityConstants.LABELS_TABLE_NAME); + UTIL_SECURITY.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME, 50000); + UTIL_SECURITY.waitTableEnabled(VisibilityConstants.LABELS_TABLE_NAME, 50000); + // create users + USER_ADMIN = User.createUserForTesting(UTIL_SECURITY.getConfiguration(), "admin2", new String[0]); + USER_OWNER = User.createUserForTesting(UTIL_SECURITY.getConfiguration(), "owner", new String[0]); + USER_RX = User.createUserForTesting(UTIL_SECURITY.getConfiguration(), "rxuser", new String[0]); + USER_XO = User.createUserForTesting(UTIL_SECURITY.getConfiguration(), "xouser", new String[0]); + USER_RO = User.createUserForTesting(UTIL_SECURITY.getConfiguration(), "rouser", new String[0]); + USER_NONE = User.createUserForTesting(UTIL_SECURITY.getConfiguration(), "nouser", new String[0]); + SecureTestUtil.grantGlobal(UTIL_SECURITY, USER_ADMIN.getShortName(), + Permission.Action.ADMIN, + Permission.Action.CREATE, + Permission.Action.EXEC, + Permission.Action.READ, + Permission.Action.WRITE); + addLabels(UTIL_SECURITY.getConfiguration(), Arrays.asList(USER_OWNER), + Arrays.asList(PRIVATE, CONFIDENTIAL, SECRET, TOPSECRET)); } - @AfterClass public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); + UTIL_SECURITY.shutdownMiniCluster(); } @Rule @@ -129,49 +228,331 @@ LOG.info("Running " + name.getMethodName()); } - @Before + @After - public void cleanup() throws Exception { - FileSystem fs = FileSystem.get(UTIL.getConfiguration()); - fs.delete(new Path(OUTPUT_DIR), true); + public void cleanup() throws IOException { + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); } - /** - * Runs an export job with the specified command line args - * @param args - * @return true if job completed successfully - * @throws IOException - * @throws InterruptedException - * @throws ClassNotFoundException - */ - boolean runExport(String[] args) throws Exception { - // need to make a copy of the configuration because to make sure different temp dirs are used. - int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Export(), args); - return status == 0; + private static void deleteOutput(final Configuration conf, final String dir) throws IOException { + FileSystem fs = FileSystem.get(conf); + fs.delete(new Path(dir), true); } /** * Runs an import job with the specified command line args * @param args * @return true if job completed successfully - * @throws IOException - * @throws InterruptedException - * @throws ClassNotFoundException - */ - boolean runImport(String[] args) throws Exception { - // need to make a copy of the configuration because to make sure different temp dirs are used. - int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args); - return status == 0; - } - - /** - * Test simple replication case with column mapping * @throws Exception */ + boolean runImport(final Configuration conf, String[] args) throws Exception { + // need to make a copy of the configuration because to make sure different temp dirs are used. + int status = ToolRunner.run(new Configuration(conf), new Import(), args); + return status == 0; + } + /** + * Test the writer's options. + * @throws IOException + */ @Test - public void testSimpleCase() throws Exception { - String EXPORT_TABLE = "exportSimpleCase"; - try (Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);) { + public void testOutputFileFormat() throws IOException, Throwable { + String exportTable = "testOutputFileFormat"; + for (Exporter exporter : EXPORTERS) { + testOutputFileFormat(exportTable, exporter); + UTIL.deleteTable(TableName.valueOf(exportTable)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } + } + /** + * Test the writer's options. + * @param exportTable + * @param exporter + * @throws IOException + */ + public void testOutputFileFormat(final String exportTable, final Exporter exporter) throws IOException, Throwable { + String codec = BZip2Codec.class.getName(); + String type = SequenceFile.CompressionType.RECORD.name(); + try (Table t = UTIL.createTable(TableName.valueOf(exportTable), FAMILYA, 3);) { + Put p = new Put(ROW1); + p.addColumn(FAMILYA, QUAL, now, QUAL); + t.put(p); + p = new Put(ROW2); + p.addColumn(FAMILYA, QUAL, now, QUAL); + t.put(p); + p = new Put(ROW3); + p.addColumn(FAMILYA, QUAL, now, QUAL); + t.put(p); + } + //use compress + String[] args = new String[] { + // Only export row1 & row2. + "-D" + FileOutputFormat.COMPRESS + "=true", + "-D" + FileOutputFormat.COMPRESS_CODEC + "=" + codec, + "-D" + FileOutputFormat.COMPRESS_TYPE + "=" + type, + exportTable, + FQ_OUTPUT_DIR + }; + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); + FileSystem fs = FileSystem.get(UTIL.getConfiguration()); + List files = Arrays.asList(fs.listStatus(new Path(FQ_OUTPUT_DIR))); + assertEquals(exporter.toString(), false, files.isEmpty()); + Configuration copy = new Configuration(UTIL.getConfiguration()); + //need to make a copy of the configuration because to make sure the Exporter has set the "io.serializations" + copy.setStrings("io.serializations", copy.get("io.serializations"), + ResultSerialization.class.getName()); + for (FileStatus file : files) { + Path path = file.getPath(); + //skips the MR meta output + if (path.getName().equals("_SUCCESS")) { + continue; + } + try (SequenceFile.Reader reader = new SequenceFile.Reader( + copy, SequenceFile.Reader.file(file.getPath()))) { + assertEquals(exporter.toString(), reader.getCompressionCodec().getClass().getName(), codec); + assertEquals(exporter.toString(), reader.getCompressionType().name(), type); + } + } + } + @Test + public void testVisibilityLabels() throws Throwable { + String exportTable = "testVisibilityLabels_output"; + String importTable = "testVisibilityLabels_import"; + for (Exporter exporter : EXPORTERS) { + testVisibilityLabels(exportTable, importTable, exporter); + } + } + void testVisibilityLabels(final String exportTable, final String importTable, + final Exporter exporter) throws IOException, Throwable { + final HTableDescriptor exportHtd = new HTableDescriptor(TableName.valueOf(exportTable)); + exportHtd.addFamily(new HColumnDescriptor(FAMILYA)); + exportHtd.setOwner(USER_OWNER); + SecureTestUtil.createTable(UTIL_SECURITY, exportHtd, new byte[][] { Bytes.toBytes("s") }); + AccessTestAction putAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + Put p1 = new Put(ROW1); + p1.addColumn(FAMILYA, QUAL, now, QUAL); + p1.setCellVisibility(new CellVisibility(SECRET)); + Put p2 = new Put(ROW2); + p2.addColumn(FAMILYA, QUAL, now, QUAL); + p2.setCellVisibility(new CellVisibility(PRIVATE + " & " + CONFIDENTIAL)); + Put p3 = new Put(ROW3); + p3.addColumn(FAMILYA, QUAL, now, QUAL); + p3.setCellVisibility(new CellVisibility("!" + CONFIDENTIAL + " & " + TOPSECRET)); + try(Connection conn = ConnectionFactory.createConnection(UTIL_SECURITY.getConfiguration()); + Table t = conn.getTable(TableName.valueOf(exportTable))) { + t.put(p1); + t.put(p2); + t.put(p3); + } + return null; + } + }; + SecureTestUtil.verifyAllowed(putAction, USER_OWNER); + List, Integer>> labelsAndRowCounts = new LinkedList<>(); + labelsAndRowCounts.add(new Pair<>(Arrays.asList(SECRET), 1)); + labelsAndRowCounts.add(new Pair<>(Arrays.asList(PRIVATE, CONFIDENTIAL), 1)); + labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET), 1)); + labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL), 0)); + labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL, PRIVATE, SECRET), 2)); + for (final Pair, Integer> labelsAndRowCount : labelsAndRowCounts) { + final List labels = labelsAndRowCount.getFirst(); + final int rowCount = labelsAndRowCount.getSecond(); + //create a open permission directory. + final FileSystem fs = FileSystem.get(UTIL_SECURITY.getConfiguration()); + final Path openDir = new Path("testAccessCase"); + fs.mkdirs(openDir); + fs.setPermission(openDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); + final String FQ_OUTPUT_DIR_SECURITY = new Path(openDir, OUTPUT_DIR).makeQualified( + fs.getUri(), fs.getWorkingDirectory()).toString(); + AccessTestAction exportAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + StringBuilder buf = new StringBuilder(); + for (String label : labels) { + buf.append(label).append(","); + } + buf.deleteCharAt(buf.length() - 1); + try { + String[] args = new String[] { + "-D " + Export.EXPORT_VISIBILITY_LABELS + "=" + buf.toString(), + exportTable, + FQ_OUTPUT_DIR_SECURITY, + }; + assertEquals(true, exporter.runExport(UTIL_SECURITY.getConfiguration(), args)); + return null; + } catch (ServiceException | IOException ex) { + throw ex; + } catch (Throwable ex) { + throw new Exception(ex); + } + } + }; + SecureTestUtil.verifyAllowed(exportAction, USER_OWNER); + final HTableDescriptor importHtd = new HTableDescriptor(TableName.valueOf(importTable)); + importHtd.addFamily(new HColumnDescriptor(FAMILYB)); + importHtd.setOwner(USER_OWNER); + SecureTestUtil.createTable(UTIL_SECURITY, importHtd, new byte[][]{Bytes.toBytes("s")}); + AccessTestAction importAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + String[] args = new String[]{ + "-D" + Import.CF_RENAME_PROP + "=" + FAMILYA_STRING + ":" + FAMILYB_STRING, + importTable, + FQ_OUTPUT_DIR_SECURITY + }; + assertTrue(exporter.toString(), runImport(UTIL_SECURITY.getConfiguration(), args)); + return null; + } + }; + SecureTestUtil.verifyAllowed(importAction, USER_OWNER); + AccessTestAction scanAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + Scan scan = new Scan(); + scan.setAuthorizations(new Authorizations(labels)); + try (Connection conn = ConnectionFactory.createConnection(UTIL_SECURITY.getConfiguration()); + Table table = conn.getTable(importHtd.getTableName()); + ResultScanner scanner = table.getScanner(scan)) { + int count = 0; + for (Result r : scanner) { + ++count; + } + assertEquals(rowCount, count); + } + return null; + } + }; + SecureTestUtil.verifyAllowed(scanAction, USER_OWNER); + AccessTestAction deleteAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + UTIL_SECURITY.deleteTable(importHtd.getTableName()); + return null; + } + }; + SecureTestUtil.verifyAllowed(deleteAction, USER_OWNER); + deleteOutput(UTIL_SECURITY.getConfiguration(), FQ_OUTPUT_DIR_SECURITY); + } + AccessTestAction deleteAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + UTIL_SECURITY.deleteTable(exportHtd.getTableName()); + return null; + } + }; + SecureTestUtil.verifyAllowed(deleteAction, USER_OWNER); + } + /** + * Test the ExportEndpoint's access levels. + * The {@link Export} test is ignored since the access exceptions cannot be + * collected from the mappers. + * @throws java.io.IOException + */ + @Test + public void testAccessCase() throws IOException, Throwable { + final String exportTable = "testAccessCase"; + HTableDescriptor exportHtd = new HTableDescriptor(TableName.valueOf(exportTable)); + exportHtd.addFamily(new HColumnDescriptor(FAMILYA)); + exportHtd.setOwner(USER_OWNER); + SecureTestUtil.createTable(UTIL_SECURITY, exportHtd, new byte[][] { Bytes.toBytes("s") }); + SecureTestUtil.grantOnTable(UTIL_SECURITY, USER_RO.getShortName(), + TableName.valueOf(exportTable), null, null, + Permission.Action.READ); + SecureTestUtil.grantOnTable(UTIL_SECURITY, USER_RX.getShortName(), + TableName.valueOf(exportTable), null, null, + Permission.Action.READ, + Permission.Action.EXEC); + SecureTestUtil.grantOnTable(UTIL_SECURITY, USER_XO.getShortName(), + TableName.valueOf(exportTable), null, null, + Permission.Action.EXEC); + assertEquals(4, SecureTestUtil.getTablePermissions(UTIL_SECURITY.getConfiguration(), + TableName.valueOf(exportTable)).size()); + ListMultimap permissions = + SecureTestUtil.getTablePermissions(UTIL_SECURITY.getConfiguration(), TableName.valueOf(exportTable)); + AccessTestAction putAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + Put p = new Put(ROW1); + p.addColumn(FAMILYA, QUAL, now, QUAL); + try(Connection conn = ConnectionFactory.createConnection(UTIL_SECURITY.getConfiguration()); + Table t = conn.getTable(TableName.valueOf(exportTable))) { + t.put(p); + } + return null; + } + }; + SecureTestUtil.verifyAllowed(putAction, USER_ADMIN, USER_OWNER); + SecureTestUtil.verifyDenied(putAction, USER_RO, USER_XO, USER_RX, USER_NONE); + + //create a open permission directory. + final FileSystem fs = FileSystem.get(UTIL_SECURITY.getConfiguration()); + final Path openDir = new Path("testAccessCase"); + fs.mkdirs(openDir); + fs.setPermission(openDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); + final String FQ_OUTPUT_DIR_SECURITY = new Path(openDir, OUTPUT_DIR).makeQualified( + fs.getUri(), fs.getWorkingDirectory()).toString(); + AccessTestAction exportAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + try { + String[] args = new String[] {exportTable, FQ_OUTPUT_DIR_SECURITY}; + Map result + = ExportEndpoint.run(new Configuration(UTIL_SECURITY.getConfiguration()), args); + long rowCount = 0; + long cellCount = 0; + for (ExportProtos.ExportResponse r : result.values()) { + rowCount += r.getRowCount(); + cellCount += r.getCellCount(); + } + assertEquals(1, rowCount); + assertEquals(1, cellCount); + return null; + } catch (ServiceException | IOException ex) { + throw ex; + } catch (Throwable ex) { + throw new Exception(ex); + } finally { + deleteOutput(UTIL_SECURITY.getConfiguration(), FQ_OUTPUT_DIR_SECURITY); + } + } + }; + SecureTestUtil.verifyDenied(exportAction, USER_RO, USER_XO, USER_NONE); + SecureTestUtil.verifyAllowed(exportAction, USER_ADMIN, USER_OWNER, USER_RX); + AccessTestAction deleteAction = new AccessTestAction() { + @Override + public Object run() throws Exception { + UTIL_SECURITY.deleteTable(TableName.valueOf(exportTable)); + return null; + } + }; + SecureTestUtil.verifyAllowed(deleteAction, USER_OWNER); + } + /** + * Test simple replication case with column mapping + * @throws IOException + */ + @Test + public void testSimpleCase() throws IOException, Throwable { + String exportTable = "exportSimpleCase"; + String importTable = "importTableSimpleCase"; + for (Exporter exporter : EXPORTERS) { + testSimpleCase(exportTable, importTable, exporter); + UTIL.deleteTable(TableName.valueOf(exportTable)); + UTIL.deleteTable(TableName.valueOf(importTable)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } + } + /** + * Test simple replication case with column mapping. + * @param exportTable + * @param importTable + * @param exporter + * @throws java.io.IOException + */ + void testSimpleCase(final String exportTable, final String importTable, + final Exporter exporter) throws IOException, Throwable { + try (Table t = UTIL.createTable(TableName.valueOf(exportTable), FAMILYA, 3);) { Put p = new Put(ROW1); p.addColumn(FAMILYA, QUAL, now, QUAL); p.addColumn(FAMILYA, QUAL, now + 1, QUAL); @@ -189,59 +570,59 @@ t.put(p); } - String[] args = new String[] { - // Only export row1 & row2. - "-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1", - "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", - EXPORT_TABLE, - FQ_OUTPUT_DIR, - "1000", // max number of key versions per key to export + String[] args = new String[] { + // Only export row1 & row2. + "-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1", + "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", + exportTable, + FQ_OUTPUT_DIR, + "1000", // max number of key versions per key to export + }; + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); + + try (Table t = UTIL.createTable(TableName.valueOf(importTable), FAMILYB, 3);) { + args = new String[] { + "-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING, + importTable, + FQ_OUTPUT_DIR }; - assertTrue(runExport(args)); + assertTrue(exporter.toString(), runImport(UTIL.getConfiguration(), args)); - String IMPORT_TABLE = "importTableSimpleCase"; - try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3);) { - args = new String[] { - "-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING, - IMPORT_TABLE, - FQ_OUTPUT_DIR - }; - assertTrue(runImport(args)); - - Get g = new Get(ROW1); - g.setMaxVersions(); - Result r = t.get(g); - assertEquals(3, r.size()); - g = new Get(ROW2); - g.setMaxVersions(); - r = t.get(g); - assertEquals(3, r.size()); - g = new Get(ROW3); - r = t.get(g); - assertEquals(0, r.size()); - } + Get g = new Get(ROW1); + g.setMaxVersions(); + Result r = t.get(g); + assertEquals(exporter.toString(), 3, r.size()); + g = new Get(ROW2); + g.setMaxVersions(); + r = t.get(g); + assertEquals(exporter.toString(), 3, r.size()); + g = new Get(ROW3); + r = t.get(g); + assertEquals(exporter.toString(), 0, r.size()); + } } - /** * Test export hbase:meta table * - * @throws Exception + * @throws IOException */ @Test - public void testMetaExport() throws Exception { - String EXPORT_TABLE = TableName.META_TABLE_NAME.getNameAsString(); - String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1", "0", "0" }; - assertTrue(runExport(args)); + public void testMetaExport() throws IOException, Throwable { + String exportTable = TableName.META_TABLE_NAME.getNameAsString(); + String[] args = new String[] { exportTable, FQ_OUTPUT_DIR, "1", "0", "0" }; + for (Exporter exporter : EXPORTERS) { + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } } - /** - * Test import data from 0.94 exported file + * Test import data from 0.94 exported file. * @throws Exception */ @Test public void testImport94Table() throws Exception { - final String name = "exportedTableIn94Format"; - URL url = TestImportExport.class.getResource(name); + final String importName = "exportedTableIn94Format"; + URL url = TestImportExport.class.getResource(importName); File f = new File(url.toURI()); if (!f.exists()) { LOG.warn("FAILED TO FIND " + f + "; skipping out on test"); @@ -251,14 +632,14 @@ LOG.info("FILE=" + f); Path importPath = new Path(f.toURI()); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); - fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + name)); - String IMPORT_TABLE = name; - try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3);) { + fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + importName)); + String importTable = importName; + try (Table t = UTIL.createTable(TableName.valueOf(importTable), Bytes.toBytes("f1"), 3);) { String[] args = new String[] { "-Dhbase.import.version=0.94" , - IMPORT_TABLE, FQ_OUTPUT_DIR + importTable, FQ_OUTPUT_DIR }; - assertTrue(runImport(args)); + assertTrue(runImport(UTIL.getConfiguration(), args)); /* exportedTableIn94Format contains 5 rows ROW COLUMN+CELL r1 column=f1:c1, timestamp=1383766761171, value=val1 @@ -270,18 +651,32 @@ assertEquals(5, UTIL.countRows(t)); } } - /** * Test export scanner batching + * @throws java.io.IOException + * @throws java.lang.Throwable */ @Test - public void testExportScannerBatching() throws Exception { - String BATCH_TABLE = "exportWithBatch"; - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(BATCH_TABLE)); + public void testExportScannerBatching() throws IOException, Throwable { + String exportTable = "exportWithBatch"; + for (Exporter exporter : EXPORTERS) { + testExportScannerBatching(exportTable, exporter); + UTIL.deleteTable(TableName.valueOf(exportTable)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } + } + /** + * Test export scanner batching. + * @param exportTable + * @param exporter + * @throws java.io.IOException + */ + public void testExportScannerBatching(final String exportTable, final Exporter exporter) throws IOException, Throwable { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(exportTable)); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(1) ); - UTIL.getHBaseAdmin().createTable(desc); + UTIL.getAdmin().createTable(desc); try (Table t = UTIL.getConnection().getTable(desc.getTableName());) { Put p = new Put(ROW1); @@ -294,25 +689,34 @@ String[] args = new String[] { "-D" + Export.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE, // added scanner batching arg. - BATCH_TABLE, + exportTable, FQ_OUTPUT_DIR }; - assertTrue(runExport(args)); + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); fs.delete(new Path(FQ_OUTPUT_DIR), true); } } - @Test - public void testWithDeletes() throws Exception { - String EXPORT_TABLE = "exportWithDeletes"; - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE)); + public void testWithDeletes() throws IOException, Throwable { + String exportTable = "exportWithDeletes"; + String importTable = "importWithDeletes"; + for (Exporter exporter : EXPORTERS) { + testWithDeletes(exportTable, importTable, exporter); + UTIL.deleteTable(TableName.valueOf(exportTable)); + UTIL.deleteTable(TableName.valueOf(importTable)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } + } + void testWithDeletes(final String exportTable, final String importTable, + final Exporter exporter) throws IOException, Throwable { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(exportTable)); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); - UTIL.getHBaseAdmin().createTable(desc); + UTIL.getAdmin().createTable(desc); try (Table t = UTIL.getConnection().getTable(desc.getTableName());) { Put p = new Put(ROW1); @@ -332,25 +736,23 @@ String[] args = new String[] { "-D" + Export.RAW_SCAN + "=true", - EXPORT_TABLE, + exportTable, FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export }; - assertTrue(runExport(args)); - - String IMPORT_TABLE = "importWithDeletes"; - desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE)); + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); + desc = new HTableDescriptor(TableName.valueOf(importTable)); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); - UTIL.getHBaseAdmin().createTable(desc); + UTIL.getAdmin().createTable(desc); try (Table t = UTIL.getConnection().getTable(desc.getTableName());) { args = new String[] { - IMPORT_TABLE, - FQ_OUTPUT_DIR + importTable, + FQ_OUTPUT_DIR }; - assertTrue(runImport(args)); + assertTrue(exporter.toString(), runImport(UTIL.getConfiguration(), args)); Scan s = new Scan(); s.setMaxVersions(); @@ -358,29 +760,36 @@ ResultScanner scanner = t.getScanner(s); Result r = scanner.next(); Cell[] res = r.rawCells(); - assertTrue(CellUtil.isDeleteFamily(res[0])); - assertEquals(now+4, res[1].getTimestamp()); - assertEquals(now+3, res[2].getTimestamp()); - assertTrue(CellUtil.isDelete(res[3])); - assertEquals(now+2, res[4].getTimestamp()); - assertEquals(now+1, res[5].getTimestamp()); - assertEquals(now, res[6].getTimestamp()); + assertTrue(exporter.toString(), CellUtil.isDeleteFamily(res[0])); + assertEquals(exporter.toString(), now+4, res[1].getTimestamp()); + assertEquals(exporter.toString(), now+3, res[2].getTimestamp()); + assertTrue(exporter.toString(), CellUtil.isDelete(res[3])); + assertEquals(exporter.toString(), now+2, res[4].getTimestamp()); + assertEquals(exporter.toString(), now+1, res[5].getTimestamp()); + assertEquals(exporter.toString(), now, res[6].getTimestamp()); } } - - @Test - public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception { - TableName EXPORT_TABLE = - TableName.valueOf("exportWithMultipleDeleteFamilyMarkersOfSameRowSameFamily"); - HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE); + public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws IOException, Throwable { + String exportTable = "exportWithMultipleDeleteFamilyMarkersOfSameRowSameFamily"; + String importTable = "importWithMultipleDeleteFamilyMarkersOfSameRowSameFamily"; + for (Exporter exporter : EXPORTERS) { + testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily(exportTable, importTable, exporter); + UTIL.deleteTable(TableName.valueOf(exportTable)); + UTIL.deleteTable(TableName.valueOf(importTable)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } + } + void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily(final String exportTable, final String importTable, + final Exporter exporter) throws IOException, Throwable { + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(exportTable)); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); - UTIL.getHBaseAdmin().createTable(desc); + UTIL.getAdmin().createTable(desc); - Table exportT = UTIL.getConnection().getTable(EXPORT_TABLE); + Table exportT = UTIL.getConnection().getTable(desc.getTableName()); //Add first version of QUAL Put p = new Put(ROW1); @@ -402,26 +811,24 @@ String[] args = new String[] { - "-D" + Export.RAW_SCAN + "=true", EXPORT_TABLE.getNameAsString(), + "-D" + Export.RAW_SCAN + "=true", exportTable, FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export }; - assertTrue(runExport(args)); - - String IMPORT_TABLE = "importWithMultipleDeleteFamilyMarkersOfSameRowSameFamily"; - desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE)); + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); + desc = new HTableDescriptor(TableName.valueOf(importTable)); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); - UTIL.getHBaseAdmin().createTable(desc); + UTIL.getAdmin().createTable(desc); - Table importT = UTIL.getConnection().getTable(TableName.valueOf(IMPORT_TABLE)); + Table importT = UTIL.getConnection().getTable(TableName.valueOf(importTable)); args = new String[] { - IMPORT_TABLE, + importTable, FQ_OUTPUT_DIR }; - assertTrue(runImport(args)); + assertTrue(exporter.toString(), runImport(UTIL.getConfiguration(), args)); Scan s = new Scan(); s.setMaxVersions(); @@ -434,75 +841,86 @@ Result exportedTResult = exportedTScanner.next(); try { Result.compareResults(exportedTResult, importedTResult); - } catch (Exception e) { + } catch (IOException e) { fail("Original and imported tables data comparision failed with error:"+e.getMessage()); } finally { exportT.close(); importT.close(); } } - /** * Create a simple table, run an Export Job on it, Import with filtering on, verify counts, * attempt with invalid values. + * @throws java.io.IOException */ @Test - public void testWithFilter() throws Exception { + public void testWithFilter() throws IOException, Throwable { + String exportTable = "exportSimpleCase_ImportWithFilter"; + String importTable = "importWithFilter"; + for (Exporter exporter : EXPORTERS) { + testWithFilter(exportTable, importTable, exporter); + UTIL.deleteTable(TableName.valueOf(exportTable)); + UTIL.deleteTable(TableName.valueOf(importTable)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } + } + /** + * Create a simple table, run an Export Job on it, Import with filtering on, verify counts, + * attempt with invalid values. + * @param exporter + * @throws java.io.IOException + */ + void testWithFilter(final String exportTable, final String importTable, + final Exporter exporter) throws IOException, Throwable { // Create simple table to export - String EXPORT_TABLE = "exportSimpleCase_ImportWithFilter"; - HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE)); + HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(exportTable)); desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5)); - UTIL.getHBaseAdmin().createTable(desc); - Table exportTable = UTIL.getConnection().getTable(desc.getTableName()); + UTIL.getAdmin().createTable(desc); + try (Table exportT = UTIL.getConnection().getTable(desc.getTableName())) { + Put p1 = new Put(ROW1); + p1.addColumn(FAMILYA, QUAL, now, QUAL); + p1.addColumn(FAMILYA, QUAL, now + 1, QUAL); + p1.addColumn(FAMILYA, QUAL, now + 2, QUAL); + p1.addColumn(FAMILYA, QUAL, now + 3, QUAL); + p1.addColumn(FAMILYA, QUAL, now + 4, QUAL); - Put p1 = new Put(ROW1); - p1.addColumn(FAMILYA, QUAL, now, QUAL); - p1.addColumn(FAMILYA, QUAL, now + 1, QUAL); - p1.addColumn(FAMILYA, QUAL, now + 2, QUAL); - p1.addColumn(FAMILYA, QUAL, now + 3, QUAL); - p1.addColumn(FAMILYA, QUAL, now + 4, QUAL); + // Having another row would actually test the filter. + Put p2 = new Put(ROW2); + p2.addColumn(FAMILYA, QUAL, now, QUAL); - // Having another row would actually test the filter. - Put p2 = new Put(ROW2); - p2.addColumn(FAMILYA, QUAL, now, QUAL); + exportT.put(Arrays.asList(p1, p2)); + // Export the simple table + String[] args = new String[] { exportTable, FQ_OUTPUT_DIR, "1000" }; + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); - exportTable.put(Arrays.asList(p1, p2)); + // Import to a new table + desc = new HTableDescriptor(TableName.valueOf(importTable)); + desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5)); + UTIL.getAdmin().createTable(desc); - // Export the simple table - String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1000" }; - assertTrue(runExport(args)); + try (Table importT = UTIL.getConnection().getTable(desc.getTableName())) { + args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(), + "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), importTable, + FQ_OUTPUT_DIR, + "1000" }; + assertTrue(exporter.toString(), runImport(UTIL.getConfiguration(), args)); - // Import to a new table - String IMPORT_TABLE = "importWithFilter"; - desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE)); - desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5)); - UTIL.getHBaseAdmin().createTable(desc); + // get the count of the source table for that time range + PrefixFilter filter = new PrefixFilter(ROW1); + int count = getCount(exportT, filter); - Table importTable = UTIL.getConnection().getTable(desc.getTableName()); - args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(), - "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, - FQ_OUTPUT_DIR, - "1000" }; - assertTrue(runImport(args)); + Assert.assertEquals("Unexpected row count between export(" + exporter.toString() + ") and import tables", count, + getCount(importT, null)); - // get the count of the source table for that time range - PrefixFilter filter = new PrefixFilter(ROW1); - int count = getCount(exportTable, filter); + // and then test that a broken command doesn't bork everything - easier here because we don't + // need to re-run the export job - Assert.assertEquals("Unexpected row count between export and import tables", count, - getCount(importTable, null)); - - // and then test that a broken command doesn't bork everything - easier here because we don't - // need to re-run the export job - - args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(), - "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", EXPORT_TABLE, - FQ_OUTPUT_DIR, "1000" }; - assertFalse(runImport(args)); - - // cleanup - exportTable.close(); - importTable.close(); + args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(), + "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", exportTable, + FQ_OUTPUT_DIR, "1000" }; + assertFalse(runImport(UTIL.getConfiguration(), args)); + } + } } /** @@ -516,17 +934,18 @@ private int getCount(Table table, Filter filter) throws IOException { Scan scan = new Scan(); scan.setFilter(filter); - ResultScanner results = table.getScanner(scan); - int count = 0; - for (Result res : results) { - count += res.size(); + try (ResultScanner results = table.getScanner(scan)) { + int count = 0; + for (Result res : results) { + count += res.size(); + } + return count; } - results.close(); - return count; } /** * test main method. Import should print help and call System.exit + * @throws java.lang.Exception */ @Test public void testImportMain() throws Exception { @@ -556,6 +975,7 @@ /** * test main method. Export should print help and call System.exit + * @throws java.lang.Exception */ @Test public void testExportMain() throws Exception { @@ -578,18 +998,63 @@ "[ []] [^[regex pattern] or [Prefix] to filter]]")); assertTrue(data.toString().contains("-D hbase.mapreduce.scan.column.family=")); assertTrue(data.toString().contains("-D hbase.mapreduce.include.deleted.rows=true")); - assertTrue(data.toString().contains("-Dhbase.client.scanner.caching=100")); - assertTrue(data.toString().contains("-Dmapreduce.map.speculative=false")); - assertTrue(data.toString().contains("-Dmapreduce.reduce.speculative=false")); - assertTrue(data.toString().contains("-Dhbase.export.scanner.batch=10")); + assertTrue(data.toString().contains("-D hbase.client.scanner.caching=100")); + assertTrue(data.toString().contains("-D mapreduce.map.speculative=false")); + assertTrue(data.toString().contains("-D mapreduce.reduce.speculative=false")); + assertTrue(data.toString().contains("-D hbase.export.scanner.batch=10")); + assertTrue(data.toString().contains("-D " + Export.EXPORT_VISIBILITY_LABELS + "=")); } finally { System.setErr(oldPrintStream); System.setSecurityManager(SECURITY_MANAGER); } } - + @Test + public void testExportScan() throws Exception { + int version = 100; + long startTime = System.currentTimeMillis(); + long endTime = startTime + 1; + String prefix = "row"; + String label_0 = "label_0"; + String label_1 = "label_1"; + String[] args = { + "table", + "outputDir", + String.valueOf(version), + String.valueOf(startTime), + String.valueOf(endTime), + prefix + }; + Scan scan = Export.getConfiguredScanForJob(UTIL.getConfiguration(), args); + assertEquals(version, scan.getMaxVersions()); + assertEquals(startTime, scan.getTimeRange().getMin()); + assertEquals(endTime, scan.getTimeRange().getMax()); + assertEquals(true, (scan.getFilter() instanceof PrefixFilter)); + assertEquals(0, Bytes.compareTo(((PrefixFilter)scan.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix))); + String[] argsWithLabels = { + "-D " + Export.EXPORT_VISIBILITY_LABELS + "=" + label_0 + "," + label_1, + "table", + "outputDir", + String.valueOf(version), + String.valueOf(startTime), + String.valueOf(endTime), + prefix + }; + Configuration conf = new Configuration(UTIL.getConfiguration()); + // parse the "-D" options + String[] otherArgs = new GenericOptionsParser(conf, argsWithLabels).getRemainingArgs(); + Scan scanWithLabels = Export.getConfiguredScanForJob(conf, otherArgs); + assertEquals(version, scanWithLabels.getMaxVersions()); + assertEquals(startTime, scanWithLabels.getTimeRange().getMin()); + assertEquals(endTime, scanWithLabels.getTimeRange().getMax()); + assertEquals(true, (scanWithLabels.getFilter() instanceof PrefixFilter)); + assertEquals(0, Bytes.compareTo(((PrefixFilter)scanWithLabels.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix))); + assertEquals(2, scanWithLabels.getAuthorizations().getLabels().size()); + assertEquals(label_0, scanWithLabels.getAuthorizations().getLabels().get(0)); + assertEquals(label_1, scanWithLabels.getAuthorizations().getLabels().get(1)); + } /** * Test map method of Importer + * @throws java.lang.Exception */ @SuppressWarnings({ "unchecked", "rawtypes" }) @Test @@ -626,51 +1091,61 @@ /** * Test addFilterAndArguments method of Import This method set couple * parameters into Configuration + * @throws java.io.IOException */ @Test public void testAddFilterAndArguments() throws IOException { Configuration configuration = new Configuration(); - - List args = new ArrayList(); + List args = new ArrayList<>(); args.add("param1"); args.add("param2"); Import.addFilterAndArguments(configuration, FilterBase.class, args); assertEquals("org.apache.hadoop.hbase.filter.FilterBase", - configuration.get(Import.FILTER_CLASS_CONF_KEY)); + configuration.get(Import.FILTER_CLASS_CONF_KEY)); assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY)); } - @Test - public void testDurability() throws Exception { + public void testDurability() throws IOException, Throwable { + String exportTable = "exporttestDurability"; + String importTable = "importTestDurability1"; + String importTableV2 = "importTestDurability2"; + for (Exporter exporter : EXPORTERS) { + testDurability(exportTable, importTable, importTableV2, exporter); + UTIL.deleteTable(TableName.valueOf(exportTable)); + UTIL.deleteTable(TableName.valueOf(importTable)); + UTIL.deleteTable(TableName.valueOf(importTableV2)); + deleteOutput(UTIL.getConfiguration(), OUTPUT_DIR); + } + } + void testDurability(final String exportTable, final String importTable, final String importTable2, + final Exporter exporter) throws IOException, Throwable { // Create an export table. - String exportTableName = "exporttestDurability"; - try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);) { + try (Table exportT = UTIL.createTable(TableName.valueOf(exportTable), FAMILYA, 3);) { // Insert some data Put put = new Put(ROW1); put.addColumn(FAMILYA, QUAL, now, QUAL); put.addColumn(FAMILYA, QUAL, now + 1, QUAL); put.addColumn(FAMILYA, QUAL, now + 2, QUAL); - exportTable.put(put); + exportT.put(put); put = new Put(ROW2); put.addColumn(FAMILYA, QUAL, now, QUAL); put.addColumn(FAMILYA, QUAL, now + 1, QUAL); put.addColumn(FAMILYA, QUAL, now + 2, QUAL); - exportTable.put(put); + exportT.put(put); // Run the export - String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000"}; - assertTrue(runExport(args)); + String[] args = new String[] { exportTable, FQ_OUTPUT_DIR, "1000"}; + assertTrue(exporter.toString(), exporter.runExport(UTIL.getConfiguration(), args)); // Create the table for import - String importTableName = "importTestDurability1"; - Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); + Table importT = UTIL.createTable(TableName.valueOf(importTable), FAMILYA, 3); // Register the wal listener for the import table HRegionInfo region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer() - .getOnlineRegions(importTable.getName()).get(0).getRegionInfo(); + .getOnlineRegions(importT.getName()).get(0).getRegionInfo(); TableWALActionListener walListener = new TableWALActionListener(region); WAL wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region); wal.registerWALActionsListener(walListener); @@ -678,27 +1153,26 @@ // Run the import with SKIP_WAL args = new String[] { "-D" + Import.WAL_DURABILITY + "=" + Durability.SKIP_WAL.name(), - importTableName, FQ_OUTPUT_DIR }; - assertTrue(runImport(args)); + importTable, FQ_OUTPUT_DIR }; + assertTrue(exporter.toString(), runImport(UTIL.getConfiguration(), args)); //Assert that the wal is not visisted - assertTrue(!walListener.isWALVisited()); + assertTrue(exporter.toString(), !walListener.isWALVisited()); //Ensure that the count is 2 (only one version of key value is obtained) - assertTrue(getCount(importTable, null) == 2); + assertTrue(exporter.toString(), getCount(importT, null) == 2); // Run the import with the default durability option - importTableName = "importTestDurability2"; - importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); + importT = UTIL.createTable(TableName.valueOf(importTable2), FAMILYA, 3); region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer() - .getOnlineRegions(importTable.getName()).get(0).getRegionInfo(); + .getOnlineRegions(importT.getName()).get(0).getRegionInfo(); wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region); walListener = new TableWALActionListener(region); wal.registerWALActionsListener(walListener); - args = new String[] { importTableName, FQ_OUTPUT_DIR }; - assertTrue(runImport(args)); + args = new String[] { importTable2, FQ_OUTPUT_DIR }; + assertTrue(exporter.toString(), runImport(UTIL.getConfiguration(), args)); //Assert that the wal is visisted - assertTrue(walListener.isWALVisited()); + assertTrue(exporter.toString(), walListener.isWALVisited()); //Ensure that the count is 2 (only one version of key value is obtained) - assertTrue(getCount(importTable, null) == 2); + assertTrue(exporter.toString(), getCount(importT, null) == 2); } } @@ -708,7 +1182,7 @@ */ private static class TableWALActionListener extends WALActionsListener.Base { - private HRegionInfo regionInfo; + private final HRegionInfo regionInfo; private boolean isVisited = false; public TableWALActionListener(HRegionInfo region) { @@ -727,4 +1201,26 @@ return isVisited; } } + private static void addLabels(final Configuration conf, final List users, final List labels) throws Exception { + PrivilegedExceptionAction action + = new PrivilegedExceptionAction() { + @Override + public VisibilityLabelsProtos.VisibilityLabelsResponse run() throws Exception { +// String[] labels = {SECRET, TOPSECRET, CONFIDENTIAL, PRIVATE}; + try (Connection conn = ConnectionFactory.createConnection(conf)) { + VisibilityClient.addLabels(conn, labels.toArray(new String[labels.size()])); + for (User user : users) { + VisibilityClient.setAuths(conn, labels.toArray(new String[labels.size()]), user.getName()); + } + } catch (Throwable t) { + throw new IOException(t); + } + return null; + } + }; + USER_ADMIN.runAs(action); + } + interface Exporter { + boolean runExport(final Configuration conf, final String[] args) throws ServiceException, IOException, Throwable; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index f0e7ac9..042644a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.security.access; +import com.google.common.collect.ListMultimap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; @@ -68,6 +69,7 @@ import com.google.common.collect.Maps; import com.google.protobuf.BlockingRpcChannel; import com.google.protobuf.ServiceException; +import static org.apache.hadoop.hbase.security.access.AccessControlLists.getPermissions; /** * Utility methods for testing security @@ -168,7 +170,7 @@ * To indicate the action was not allowed, either throw an AccessDeniedException * or return an empty list of KeyValues. */ - static interface AccessTestAction extends PrivilegedExceptionAction { } + public static interface AccessTestAction extends PrivilegedExceptionAction { } /** This fails only in case of ADE or empty list for any of the actions. */ public static void verifyAllowed(User user, AccessTestAction... actions) throws Exception { @@ -327,7 +329,6 @@ private static void updateACLs(final HBaseTestingUtility util, Callable c) throws Exception { // Get the current mtimes for all access controllers final Map oldMTimes = getAuthManagerMTimes(util.getHBaseCluster()); - // Run the update action c.call(); @@ -491,6 +492,10 @@ }); } + public static ListMultimap getTablePermissions(Configuration conf, + TableName tableName) throws IOException { + return AccessControlLists.getTablePermissions(conf, tableName); + } /** * Grant permissions on a table to the given user. Will wait until all active * AccessController instances have updated their permissions caches or will