diff --git hbase-protocol/pom.xml hbase-protocol/pom.xml
index b7846ca..765edff 100644
--- hbase-protocol/pom.xml
+++ hbase-protocol/pom.xml
@@ -180,6 +180,7 @@
Comparator.proto
Encryption.proto
ErrorHandling.proto
+ Export.proto
FS.proto
Filter.proto
HBase.proto
diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java
new file mode 100644
index 0000000..0f19647
--- /dev/null
+++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java
@@ -0,0 +1,2197 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: Export.proto
+
+package org.apache.hadoop.hbase.protobuf.generated;
+
+public final class ExportProtos {
+ private ExportProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface ExportRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .hbase.pb.Scan scan = 1;
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ boolean hasScan();
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
+
+ // required string outputPath = 2;
+ /**
+ * required string outputPath = 2;
+ */
+ boolean hasOutputPath();
+ /**
+ * required string outputPath = 2;
+ */
+ java.lang.String getOutputPath();
+ /**
+ * required string outputPath = 2;
+ */
+ com.google.protobuf.ByteString
+ getOutputPathBytes();
+
+ // optional bool compressed = 3 [default = false];
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ boolean hasCompressed();
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ boolean getCompressed();
+
+ // optional string compressType = 4;
+ /**
+ * optional string compressType = 4;
+ */
+ boolean hasCompressType();
+ /**
+ * optional string compressType = 4;
+ */
+ java.lang.String getCompressType();
+ /**
+ * optional string compressType = 4;
+ */
+ com.google.protobuf.ByteString
+ getCompressTypeBytes();
+
+ // optional string compressCodec = 5;
+ /**
+ * optional string compressCodec = 5;
+ */
+ boolean hasCompressCodec();
+ /**
+ * optional string compressCodec = 5;
+ */
+ java.lang.String getCompressCodec();
+ /**
+ * optional string compressCodec = 5;
+ */
+ com.google.protobuf.ByteString
+ getCompressCodecBytes();
+
+ // optional .hbase.pb.DelegationToken fsToken = 6;
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ boolean hasFsToken();
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken();
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ExportRequest}
+ */
+ public static final class ExportRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements ExportRequestOrBuilder {
+ // Use ExportRequest.newBuilder() to construct.
+ private ExportRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ExportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ExportRequest defaultInstance;
+ public static ExportRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ExportRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ExportRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = scan_.toBuilder();
+ }
+ scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(scan_);
+ scan_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ outputPath_ = input.readBytes();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ compressed_ = input.readBool();
+ break;
+ }
+ case 34: {
+ bitField0_ |= 0x00000008;
+ compressType_ = input.readBytes();
+ break;
+ }
+ case 42: {
+ bitField0_ |= 0x00000010;
+ compressCodec_ = input.readBytes();
+ break;
+ }
+ case 50: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ subBuilder = fsToken_.toBuilder();
+ }
+ fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(fsToken_);
+ fsToken_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000020;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public ExportRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ExportRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required .hbase.pb.Scan scan = 1;
+ public static final int SCAN_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ return scan_;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ return scan_;
+ }
+
+ // required string outputPath = 2;
+ public static final int OUTPUTPATH_FIELD_NUMBER = 2;
+ private java.lang.Object outputPath_;
+ /**
+ * required string outputPath = 2;
+ */
+ public boolean hasOutputPath() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required string outputPath = 2;
+ */
+ public java.lang.String getOutputPath() {
+ java.lang.Object ref = outputPath_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ outputPath_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string outputPath = 2;
+ */
+ public com.google.protobuf.ByteString
+ getOutputPathBytes() {
+ java.lang.Object ref = outputPath_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ outputPath_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional bool compressed = 3 [default = false];
+ public static final int COMPRESSED_FIELD_NUMBER = 3;
+ private boolean compressed_;
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ public boolean hasCompressed() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ public boolean getCompressed() {
+ return compressed_;
+ }
+
+ // optional string compressType = 4;
+ public static final int COMPRESSTYPE_FIELD_NUMBER = 4;
+ private java.lang.Object compressType_;
+ /**
+ * optional string compressType = 4;
+ */
+ public boolean hasCompressType() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional string compressType = 4;
+ */
+ public java.lang.String getCompressType() {
+ java.lang.Object ref = compressType_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ compressType_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * optional string compressType = 4;
+ */
+ public com.google.protobuf.ByteString
+ getCompressTypeBytes() {
+ java.lang.Object ref = compressType_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ compressType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string compressCodec = 5;
+ public static final int COMPRESSCODEC_FIELD_NUMBER = 5;
+ private java.lang.Object compressCodec_;
+ /**
+ * optional string compressCodec = 5;
+ */
+ public boolean hasCompressCodec() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * optional string compressCodec = 5;
+ */
+ public java.lang.String getCompressCodec() {
+ java.lang.Object ref = compressCodec_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ compressCodec_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * optional string compressCodec = 5;
+ */
+ public com.google.protobuf.ByteString
+ getCompressCodecBytes() {
+ java.lang.Object ref = compressCodec_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ compressCodec_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional .hbase.pb.DelegationToken fsToken = 6;
+ public static final int FSTOKEN_FIELD_NUMBER = 6;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_;
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public boolean hasFsToken() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
+ return fsToken_;
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
+ return fsToken_;
+ }
+
+ private void initFields() {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ outputPath_ = "";
+ compressed_ = false;
+ compressType_ = "";
+ compressCodec_ = "";
+ fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasScan()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasOutputPath()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, scan_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getOutputPathBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBool(3, compressed_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeBytes(4, getCompressTypeBytes());
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeBytes(5, getCompressCodecBytes());
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeMessage(6, fsToken_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, scan_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getOutputPathBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(3, compressed_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(4, getCompressTypeBytes());
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(5, getCompressCodecBytes());
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(6, fsToken_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) obj;
+
+ boolean result = true;
+ result = result && (hasScan() == other.hasScan());
+ if (hasScan()) {
+ result = result && getScan()
+ .equals(other.getScan());
+ }
+ result = result && (hasOutputPath() == other.hasOutputPath());
+ if (hasOutputPath()) {
+ result = result && getOutputPath()
+ .equals(other.getOutputPath());
+ }
+ result = result && (hasCompressed() == other.hasCompressed());
+ if (hasCompressed()) {
+ result = result && (getCompressed()
+ == other.getCompressed());
+ }
+ result = result && (hasCompressType() == other.hasCompressType());
+ if (hasCompressType()) {
+ result = result && getCompressType()
+ .equals(other.getCompressType());
+ }
+ result = result && (hasCompressCodec() == other.hasCompressCodec());
+ if (hasCompressCodec()) {
+ result = result && getCompressCodec()
+ .equals(other.getCompressCodec());
+ }
+ result = result && (hasFsToken() == other.hasFsToken());
+ if (hasFsToken()) {
+ result = result && getFsToken()
+ .equals(other.getFsToken());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasScan()) {
+ hash = (37 * hash) + SCAN_FIELD_NUMBER;
+ hash = (53 * hash) + getScan().hashCode();
+ }
+ if (hasOutputPath()) {
+ hash = (37 * hash) + OUTPUTPATH_FIELD_NUMBER;
+ hash = (53 * hash) + getOutputPath().hashCode();
+ }
+ if (hasCompressed()) {
+ hash = (37 * hash) + COMPRESSED_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getCompressed());
+ }
+ if (hasCompressType()) {
+ hash = (37 * hash) + COMPRESSTYPE_FIELD_NUMBER;
+ hash = (53 * hash) + getCompressType().hashCode();
+ }
+ if (hasCompressCodec()) {
+ hash = (37 * hash) + COMPRESSCODEC_FIELD_NUMBER;
+ hash = (53 * hash) + getCompressCodec().hashCode();
+ }
+ if (hasFsToken()) {
+ hash = (37 * hash) + FSTOKEN_FIELD_NUMBER;
+ hash = (53 * hash) + getFsToken().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ExportRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getScanFieldBuilder();
+ getFsTokenFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (scanBuilder_ == null) {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ outputPath_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ compressed_ = false;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ compressType_ = "";
+ bitField0_ = (bitField0_ & ~0x00000008);
+ compressCodec_ = "";
+ bitField0_ = (bitField0_ & ~0x00000010);
+ if (fsTokenBuilder_ == null) {
+ fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
+ } else {
+ fsTokenBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000020);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (scanBuilder_ == null) {
+ result.scan_ = scan_;
+ } else {
+ result.scan_ = scanBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.outputPath_ = outputPath_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.compressed_ = compressed_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.compressType_ = compressType_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.compressCodec_ = compressCodec_;
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ if (fsTokenBuilder_ == null) {
+ result.fsToken_ = fsToken_;
+ } else {
+ result.fsToken_ = fsTokenBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this;
+ if (other.hasScan()) {
+ mergeScan(other.getScan());
+ }
+ if (other.hasOutputPath()) {
+ bitField0_ |= 0x00000002;
+ outputPath_ = other.outputPath_;
+ onChanged();
+ }
+ if (other.hasCompressed()) {
+ setCompressed(other.getCompressed());
+ }
+ if (other.hasCompressType()) {
+ bitField0_ |= 0x00000008;
+ compressType_ = other.compressType_;
+ onChanged();
+ }
+ if (other.hasCompressCodec()) {
+ bitField0_ |= 0x00000010;
+ compressCodec_ = other.compressCodec_;
+ onChanged();
+ }
+ if (other.hasFsToken()) {
+ mergeFsToken(other.getFsToken());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasScan()) {
+
+ return false;
+ }
+ if (!hasOutputPath()) {
+
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .hbase.pb.Scan scan = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ if (scanBuilder_ == null) {
+ return scan_;
+ } else {
+ return scanBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ scan_ = value;
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder setScan(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
+ if (scanBuilder_ == null) {
+ scan_ = builderForValue.build();
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
+ scan_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
+ } else {
+ scan_ = value;
+ }
+ onChanged();
+ } else {
+ scanBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder clearScan() {
+ if (scanBuilder_ == null) {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ onChanged();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getScanFieldBuilder().getBuilder();
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ if (scanBuilder_ != null) {
+ return scanBuilder_.getMessageOrBuilder();
+ } else {
+ return scan_;
+ }
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
+ getScanFieldBuilder() {
+ if (scanBuilder_ == null) {
+ scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
+ scan_,
+ getParentForChildren(),
+ isClean());
+ scan_ = null;
+ }
+ return scanBuilder_;
+ }
+
+ // required string outputPath = 2;
+ private java.lang.Object outputPath_ = "";
+ /**
+ * required string outputPath = 2;
+ */
+ public boolean hasOutputPath() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required string outputPath = 2;
+ */
+ public java.lang.String getOutputPath() {
+ java.lang.Object ref = outputPath_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ outputPath_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string outputPath = 2;
+ */
+ public com.google.protobuf.ByteString
+ getOutputPathBytes() {
+ java.lang.Object ref = outputPath_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ outputPath_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string outputPath = 2;
+ */
+ public Builder setOutputPath(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ outputPath_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string outputPath = 2;
+ */
+ public Builder clearOutputPath() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ outputPath_ = getDefaultInstance().getOutputPath();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string outputPath = 2;
+ */
+ public Builder setOutputPathBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ outputPath_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional bool compressed = 3 [default = false];
+ private boolean compressed_ ;
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ public boolean hasCompressed() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ public boolean getCompressed() {
+ return compressed_;
+ }
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ public Builder setCompressed(boolean value) {
+ bitField0_ |= 0x00000004;
+ compressed_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bool compressed = 3 [default = false];
+ */
+ public Builder clearCompressed() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ compressed_ = false;
+ onChanged();
+ return this;
+ }
+
+ // optional string compressType = 4;
+ private java.lang.Object compressType_ = "";
+ /**
+ * optional string compressType = 4;
+ */
+ public boolean hasCompressType() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional string compressType = 4;
+ */
+ public java.lang.String getCompressType() {
+ java.lang.Object ref = compressType_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ compressType_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * optional string compressType = 4;
+ */
+ public com.google.protobuf.ByteString
+ getCompressTypeBytes() {
+ java.lang.Object ref = compressType_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ compressType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * optional string compressType = 4;
+ */
+ public Builder setCompressType(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ compressType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional string compressType = 4;
+ */
+ public Builder clearCompressType() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ compressType_ = getDefaultInstance().getCompressType();
+ onChanged();
+ return this;
+ }
+ /**
+ * optional string compressType = 4;
+ */
+ public Builder setCompressTypeBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ compressType_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional string compressCodec = 5;
+ private java.lang.Object compressCodec_ = "";
+ /**
+ * optional string compressCodec = 5;
+ */
+ public boolean hasCompressCodec() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * optional string compressCodec = 5;
+ */
+ public java.lang.String getCompressCodec() {
+ java.lang.Object ref = compressCodec_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ compressCodec_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * optional string compressCodec = 5;
+ */
+ public com.google.protobuf.ByteString
+ getCompressCodecBytes() {
+ java.lang.Object ref = compressCodec_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ compressCodec_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * optional string compressCodec = 5;
+ */
+ public Builder setCompressCodec(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000010;
+ compressCodec_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional string compressCodec = 5;
+ */
+ public Builder clearCompressCodec() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ compressCodec_ = getDefaultInstance().getCompressCodec();
+ onChanged();
+ return this;
+ }
+ /**
+ * optional string compressCodec = 5;
+ */
+ public Builder setCompressCodecBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000010;
+ compressCodec_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional .hbase.pb.DelegationToken fsToken = 6;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_;
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public boolean hasFsToken() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
+ if (fsTokenBuilder_ == null) {
+ return fsToken_;
+ } else {
+ return fsTokenBuilder_.getMessage();
+ }
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
+ if (fsTokenBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ fsToken_ = value;
+ onChanged();
+ } else {
+ fsTokenBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000020;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public Builder setFsToken(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) {
+ if (fsTokenBuilder_ == null) {
+ fsToken_ = builderForValue.build();
+ onChanged();
+ } else {
+ fsTokenBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000020;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
+ if (fsTokenBuilder_ == null) {
+ if (((bitField0_ & 0x00000020) == 0x00000020) &&
+ fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) {
+ fsToken_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial();
+ } else {
+ fsToken_ = value;
+ }
+ onChanged();
+ } else {
+ fsTokenBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000020;
+ return this;
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public Builder clearFsToken() {
+ if (fsTokenBuilder_ == null) {
+ fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
+ onChanged();
+ } else {
+ fsTokenBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000020);
+ return this;
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() {
+ bitField0_ |= 0x00000020;
+ onChanged();
+ return getFsTokenFieldBuilder().getBuilder();
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
+ if (fsTokenBuilder_ != null) {
+ return fsTokenBuilder_.getMessageOrBuilder();
+ } else {
+ return fsToken_;
+ }
+ }
+ /**
+ * optional .hbase.pb.DelegationToken fsToken = 6;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>
+ getFsTokenFieldBuilder() {
+ if (fsTokenBuilder_ == null) {
+ fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>(
+ fsToken_,
+ getParentForChildren(),
+ isClean());
+ fsToken_ = null;
+ }
+ return fsTokenBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ExportRequest)
+ }
+
+ static {
+ defaultInstance = new ExportRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ExportRequest)
+ }
+
+ public interface ExportResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 rowCount = 1;
+ /**
+ * required uint64 rowCount = 1;
+ */
+ boolean hasRowCount();
+ /**
+ * required uint64 rowCount = 1;
+ */
+ long getRowCount();
+
+ // required uint64 cellCount = 2;
+ /**
+ * required uint64 cellCount = 2;
+ */
+ boolean hasCellCount();
+ /**
+ * required uint64 cellCount = 2;
+ */
+ long getCellCount();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ExportResponse}
+ */
+ public static final class ExportResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ExportResponseOrBuilder {
+ // Use ExportResponse.newBuilder() to construct.
+ private ExportResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ExportResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ExportResponse defaultInstance;
+ public static ExportResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ExportResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ExportResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ rowCount_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ cellCount_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public ExportResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ExportResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required uint64 rowCount = 1;
+ public static final int ROWCOUNT_FIELD_NUMBER = 1;
+ private long rowCount_;
+ /**
+ * required uint64 rowCount = 1;
+ */
+ public boolean hasRowCount() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required uint64 rowCount = 1;
+ */
+ public long getRowCount() {
+ return rowCount_;
+ }
+
+ // required uint64 cellCount = 2;
+ public static final int CELLCOUNT_FIELD_NUMBER = 2;
+ private long cellCount_;
+ /**
+ * required uint64 cellCount = 2;
+ */
+ public boolean hasCellCount() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required uint64 cellCount = 2;
+ */
+ public long getCellCount() {
+ return cellCount_;
+ }
+
+ private void initFields() {
+ rowCount_ = 0L;
+ cellCount_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRowCount()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasCellCount()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, rowCount_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, cellCount_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, rowCount_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, cellCount_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse other = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) obj;
+
+ boolean result = true;
+ result = result && (hasRowCount() == other.hasRowCount());
+ if (hasRowCount()) {
+ result = result && (getRowCount()
+ == other.getRowCount());
+ }
+ result = result && (hasCellCount() == other.hasCellCount());
+ if (hasCellCount()) {
+ result = result && (getCellCount()
+ == other.getCellCount());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRowCount()) {
+ hash = (37 * hash) + ROWCOUNT_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRowCount());
+ }
+ if (hasCellCount()) {
+ hash = (37 * hash) + CELLCOUNT_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getCellCount());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ExportResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ rowCount_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ cellCount_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.rowCount_ = rowCount_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.cellCount_ = cellCount_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance()) return this;
+ if (other.hasRowCount()) {
+ setRowCount(other.getRowCount());
+ }
+ if (other.hasCellCount()) {
+ setCellCount(other.getCellCount());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRowCount()) {
+
+ return false;
+ }
+ if (!hasCellCount()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required uint64 rowCount = 1;
+ private long rowCount_ ;
+ /**
+ * required uint64 rowCount = 1;
+ */
+ public boolean hasRowCount() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required uint64 rowCount = 1;
+ */
+ public long getRowCount() {
+ return rowCount_;
+ }
+ /**
+ * required uint64 rowCount = 1;
+ */
+ public Builder setRowCount(long value) {
+ bitField0_ |= 0x00000001;
+ rowCount_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required uint64 rowCount = 1;
+ */
+ public Builder clearRowCount() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ rowCount_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint64 cellCount = 2;
+ private long cellCount_ ;
+ /**
+ * required uint64 cellCount = 2;
+ */
+ public boolean hasCellCount() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required uint64 cellCount = 2;
+ */
+ public long getCellCount() {
+ return cellCount_;
+ }
+ /**
+ * required uint64 cellCount = 2;
+ */
+ public Builder setCellCount(long value) {
+ bitField0_ |= 0x00000002;
+ cellCount_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required uint64 cellCount = 2;
+ */
+ public Builder clearCellCount() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ cellCount_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ExportResponse)
+ }
+
+ static {
+ defaultInstance = new ExportResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ExportResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.pb.ExportService}
+ */
+ public static abstract class ExportService
+ implements com.google.protobuf.Service {
+ protected ExportService() {}
+
+ public interface Interface {
+ /**
+ * rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse);
+ */
+ public abstract void export(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new ExportService() {
+ @java.lang.Override
+ public void export(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.export(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.export(controller, (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse);
+ */
+ public abstract void export(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.export(controller, (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void export(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse export(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse export(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ExportService)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ExportRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ExportRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ExportResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ExportResponse_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\014Export.proto\022\010hbase.pb\032\014Client.proto\"\265" +
+ "\001\n\rExportRequest\022\034\n\004scan\030\001 \002(\0132\016.hbase.p" +
+ "b.Scan\022\022\n\noutputPath\030\002 \002(\t\022\031\n\ncompressed" +
+ "\030\003 \001(\010:\005false\022\024\n\014compressType\030\004 \001(\t\022\025\n\rc" +
+ "ompressCodec\030\005 \001(\t\022*\n\007fsToken\030\006 \001(\0132\031.hb" +
+ "ase.pb.DelegationToken\"5\n\016ExportResponse" +
+ "\022\020\n\010rowCount\030\001 \002(\004\022\021\n\tcellCount\030\002 \002(\0042L\n" +
+ "\rExportService\022;\n\006export\022\027.hbase.pb.Expo" +
+ "rtRequest\032\030.hbase.pb.ExportResponseBB\n*o" +
+ "rg.apache.hadoop.hbase.protobuf.generate",
+ "dB\014ExportProtosH\001\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_hbase_pb_ExportRequest_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_hbase_pb_ExportRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_ExportRequest_descriptor,
+ new java.lang.String[] { "Scan", "OutputPath", "Compressed", "CompressType", "CompressCodec", "FsToken", });
+ internal_static_hbase_pb_ExportResponse_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_hbase_pb_ExportResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_ExportResponse_descriptor,
+ new java.lang.String[] { "RowCount", "CellCount", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git hbase-protocol/src/main/protobuf/Export.proto hbase-protocol/src/main/protobuf/Export.proto
new file mode 100644
index 0000000..5e6c262
--- /dev/null
+++ hbase-protocol/src/main/protobuf/Export.proto
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.protobuf.generated";
+option java_outer_classname = "ExportProtos";
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+option java_generic_services = true;
+
+import "Client.proto";
+
+service ExportService {
+ rpc export (ExportRequest) returns (ExportResponse);
+}
+
+message ExportRequest {
+ required Scan scan = 1;
+ required string outputPath = 2;
+ optional bool compressed = 3 [default = false];
+ optional string compressType = 4;
+ optional string compressCodec = 5;
+ optional DelegationToken fsToken = 6;
+}
+message ExportResponse {
+ required uint64 rowCount = 1;
+ required uint64 cellCount = 2;
+}
+
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
new file mode 100644
index 0000000..f5cc4a8
--- /dev/null
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -0,0 +1,502 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hbase.coprocessor;
+
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
+import com.google.protobuf.ServiceException;
+import java.io.Closeable;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.coprocessor.Batch;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
+import org.apache.hadoop.hbase.ipc.RpcServer;
+import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.mapreduce.ExportUtils;
+import org.apache.hadoop.hbase.mapreduce.Import;
+import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.ResponseConverter;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken;
+import org.apache.hadoop.hbase.protobuf.generated.ExportProtos;
+import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.Region;
+import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.security.token.FsDelegationToken;
+import org.apache.hadoop.hbase.util.ByteStringer;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * Export an HBase table. Writes content to sequence files up in HDFS. Use
+ * {@link Import} to read it back in again. It is implemented by the endpoint
+ * technique.
+ *
+ * @see Export
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class Export extends ExportProtos.ExportService
+ implements Coprocessor, CoprocessorService {
+
+ private static final Log LOG = LogFactory.getLog(Export.class);
+ private static final Class extends CompressionCodec> DEFAULT_CODEC = DefaultCodec.class;
+ private static final SequenceFile.CompressionType DEFAULT_TYPE = SequenceFile.CompressionType.RECORD;
+ private RegionCoprocessorEnvironment env = null;
+ private UserProvider userProvider;
+
+ public static void main(String[] args) throws IOException, Throwable {
+ run(HBaseConfiguration.create(), args);
+ }
+
+ public static Map run(final Configuration conf,
+ final String[] args) throws ServiceException, IOException, Throwable {
+ String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
+ if (!ExportUtils.checkArguments(otherArgs)) {
+ ExportUtils.usage("Wrong number of arguments: " + otherArgs.length);
+ System.exit(-1);
+ }
+ TableName tableName = TableName.valueOf(otherArgs[0]);
+ Path dir = new Path(otherArgs[1]);
+ FileSystem fs = dir.getFileSystem(conf);
+ UserProvider userProvider = UserProvider.instantiate(conf);
+ checkDir(fs, dir);
+ Scan scan = ExportUtils.getConfiguredScanFromCommandLine(conf, otherArgs);
+ FsDelegationToken fsDelegationToken = new FsDelegationToken(userProvider, "renewer");
+ fsDelegationToken.acquireDelegationToken(fs);
+ try {
+ final ExportProtos.ExportRequest request = getConfiguredRequest(conf, dir,
+ scan, fsDelegationToken.getUserToken());
+ try (Connection con = ConnectionFactory.createConnection(conf);
+ Table table = con.getTable(tableName)) {
+ Map result = table.coprocessorService(ExportProtos.ExportService.class,
+ scan.getStartRow(),
+ scan.getStopRow(), new Batch.Call() {
+ @Override
+ public ExportProtos.ExportResponse call(ExportProtos.ExportService service) throws IOException {
+ ServerRpcController controller = new ServerRpcController();
+ BlockingRpcCallback rpcCallback = new BlockingRpcCallback<>();
+ service.export(controller, request, rpcCallback);
+ if (controller.failedOnException()) {
+ throw controller.getFailedOn();
+ }
+ return rpcCallback.get();
+ }
+ });
+ long cellCount = 0;
+ long rowCount = 0;
+ for (ExportProtos.ExportResponse rp : result.values()) {
+ cellCount += rp.getCellCount();
+ rowCount += rp.getRowCount();
+ }
+ return result;
+ } catch (Throwable e) {
+ fs.delete(dir, true);
+ throw e;
+ }
+ } finally {
+ fsDelegationToken.releaseDelegationToken();
+ }
+ }
+
+ private static boolean getCompression(final ExportProtos.ExportRequest request) {
+ if (request.hasCompressed()) {
+ return request.getCompressed();
+ } else {
+ return false;
+ }
+ }
+
+ private static SequenceFile.CompressionType getCompressionType(final ExportProtos.ExportRequest request) {
+ if (request.hasCompressType()) {
+ return SequenceFile.CompressionType.valueOf(request.getCompressType());
+ } else {
+ return DEFAULT_TYPE;
+ }
+ }
+
+ private static CompressionCodec getCompressionCodec(final Configuration conf, final ExportProtos.ExportRequest request) {
+ try {
+ Class extends CompressionCodec> codecClass;
+ if (request.hasCompressCodec()) {
+ codecClass = conf.getClassByName(request.getCompressCodec()).asSubclass(CompressionCodec.class);
+ } else {
+ codecClass = DEFAULT_CODEC;
+ }
+ return ReflectionUtils.newInstance(codecClass, conf);
+ } catch (ClassNotFoundException e) {
+ throw new IllegalArgumentException("Compression codec "
+ + request.getCompressCodec() + " was not found.", e);
+ }
+ }
+
+ private static SequenceFile.Writer.Option getOutputPath(final Configuration conf,
+ final HRegionInfo info, final ExportProtos.ExportRequest request) throws IOException {
+ Path file = new Path(request.getOutputPath(), "export-" + info.getEncodedName());
+ FileSystem fs = file.getFileSystem(conf);
+ if (fs.exists(file)) {
+ throw new IOException(file + " exists");
+ }
+ return SequenceFile.Writer.file(file);
+ }
+
+ private static List getWriterOptions(final Configuration conf,
+ final HRegionInfo info, final ExportProtos.ExportRequest request) throws IOException {
+ List rval = new LinkedList<>();
+ rval.add(SequenceFile.Writer.keyClass(ImmutableBytesWritable.class));
+ rval.add(SequenceFile.Writer.valueClass(Result.class));
+ rval.add(getOutputPath(conf, info, request));
+ if (getCompression(request)) {
+ rval.add(SequenceFile.Writer.compression(getCompressionType(request), getCompressionCodec(conf, request)));
+ } else {
+ rval.add(SequenceFile.Writer.compression(SequenceFile.CompressionType.NONE));
+ }
+ return rval;
+ }
+
+ private static ExportProtos.ExportResponse processData(final Region region, final Configuration conf,
+ final UserProvider userProvider, final Scan scan, final Token userToken,
+ final List opts) throws IOException {
+ ScanCoprocessor cp = new ScanCoprocessor(region);
+ RegionScanner scanner = null;
+ try (RegionOp regionOp = new RegionOp(region);
+ SecureWriter out = new SecureWriter(conf, userProvider, userToken, opts)) {
+ scanner = cp.checkScannerOpen(scan);
+ ImmutableBytesWritable key = new ImmutableBytesWritable();
+ long rowCount = 0;
+ long cellCount = 0;
+ List results = new ArrayList<>();
+ List cells = new ArrayList<>();
+ boolean hasMore;
+ do {
+ boolean bypass = cp.preScannerNext(scanner, results, scan.getBatch());
+ if (bypass) {
+ hasMore = false;
+ } else {
+ hasMore = scanner.nextRaw(cells);
+ if (cells.isEmpty()) {
+ continue;
+ }
+ Cell firstCell = cells.get(0);
+ for (Cell cell : cells) {
+ if (Bytes.compareTo(firstCell.getRowArray(), firstCell.getRowOffset(), firstCell.getRowLength(),
+ cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()) != 0) {
+ throw new IOException("Why the RegionScanner#nextRaw returns the data of different rows??"
+ + " first row=" + Bytes.toHex(firstCell.getRowArray(), firstCell.getRowOffset(), firstCell.getRowLength())
+ + ", current row=" + Bytes.toHex(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
+ }
+ }
+ results.add(Result.create(cells));
+ cells.clear();
+ cp.postScannerNext(scanner, results, scan.getBatch(), hasMore);
+ }
+ for (Result r : results) {
+ key.set(r.getRow());
+ out.append(key, r);
+ ++rowCount;
+ cellCount += r.size();
+ }
+ results.clear();
+ } while (hasMore);
+ return ExportProtos.ExportResponse.newBuilder()
+ .setRowCount(rowCount)
+ .setCellCount(cellCount)
+ .build();
+ } finally {
+ cp.checkScannerClose(scanner);
+ }
+ }
+
+ private static void checkDir(final FileSystem fs, final Path dir) throws IOException {
+ if (fs.exists(dir)) {
+ throw new RuntimeException("The " + dir + " exists");
+ }
+ if (!fs.mkdirs(dir)) {
+ throw new IOException("Failed to create the " + dir);
+ }
+ }
+
+ private static ExportProtos.ExportRequest getConfiguredRequest(Configuration conf,
+ Path dir, final Scan scan, final Token> userToken) throws IOException {
+ boolean compressed = conf.getBoolean(FileOutputFormat.COMPRESS, false);
+ String compressionType = conf.get(FileOutputFormat.COMPRESS_TYPE,
+ DEFAULT_TYPE.toString());
+ String compressionCodec = conf.get(FileOutputFormat.COMPRESS_CODEC,
+ DEFAULT_CODEC.getName());
+ DelegationToken protoToken = null;
+ if (userToken != null) {
+ protoToken = DelegationToken.newBuilder()
+ .setIdentifier(ByteStringer.wrap(userToken.getIdentifier()))
+ .setPassword(ByteStringer.wrap(userToken.getPassword()))
+ .setKind(userToken.getKind().toString())
+ .setService(userToken.getService().toString()).build();
+ }
+ LOG.info("compressed=" + compressed
+ + ", compression type=" + compressionType
+ + ", compression codec=" + compressionCodec
+ + ", userToken=" + userToken);
+ ExportProtos.ExportRequest.Builder builder = ExportProtos.ExportRequest.newBuilder()
+ .setScan(ProtobufUtil.toScan(scan))
+ .setOutputPath(dir.toString())
+ .setCompressed(compressed)
+ .setCompressCodec(compressionCodec)
+ .setCompressType(compressionType);
+ if (protoToken != null) {
+ builder.setFsToken(protoToken);
+ }
+ return builder.build();
+ }
+
+
+ @Override
+ public void start(CoprocessorEnvironment environment) throws IOException {
+ if (environment instanceof RegionCoprocessorEnvironment) {
+ env = (RegionCoprocessorEnvironment) environment;
+ userProvider = UserProvider.instantiate(env.getConfiguration());
+ } else {
+ throw new CoprocessorException("Must be loaded on a table region!");
+ }
+ }
+
+ @Override
+ public void stop(CoprocessorEnvironment env) throws IOException {
+ }
+
+ @Override
+ public Service getService() {
+ return this;
+ }
+
+ @Override
+ public void export(RpcController controller, ExportProtos.ExportRequest request,
+ RpcCallback done) {
+ Region region = env.getRegion();
+ Configuration conf = HBaseConfiguration.create(env.getConfiguration());
+ conf.setStrings("io.serializations", conf.get("io.serializations"), ResultSerialization.class.getName());
+ try {
+ Scan scan = validateKey(region.getRegionInfo(), request);
+ Token userToken = null;
+ if (userProvider.isHadoopSecurityEnabled() && !request.hasFsToken()) {
+ LOG.warn("Hadoop security is enable, but no found of user token");
+ } else if (userProvider.isHadoopSecurityEnabled()) {
+ userToken = new Token(request.getFsToken().getIdentifier().toByteArray(),
+ request.getFsToken().getPassword().toByteArray(),
+ new Text(request.getFsToken().getKind()),
+ new Text(request.getFsToken().getService()));
+ }
+ ExportProtos.ExportResponse response = processData(region, conf, userProvider,
+ scan, userToken, getWriterOptions(conf, region.getRegionInfo(), request));
+ done.run(response);
+ } catch (IOException e) {
+ ResponseConverter.setControllerException(controller, e);
+ LOG.error(e);
+ }
+ }
+
+ private Scan validateKey(final HRegionInfo region, final ExportProtos.ExportRequest request) throws IOException {
+ Scan scan = ProtobufUtil.toScan(request.getScan());
+ byte[] regionStartKey = region.getStartKey();
+ byte[] originStartKey = scan.getStartRow();
+ if (originStartKey == null
+ || Bytes.compareTo(originStartKey, regionStartKey) < 0) {
+ scan.setStartRow(regionStartKey);
+ }
+ byte[] regionEndKey = region.getEndKey();
+ byte[] originEndKey = scan.getStopRow();
+ if (originEndKey == null
+ || Bytes.compareTo(originEndKey, regionEndKey) > 0) {
+ scan.setStartRow(regionEndKey);
+ }
+ return scan;
+ }
+
+ private static class RegionOp implements Closeable {
+
+ private final Region region;
+
+ RegionOp(final Region region) throws IOException {
+ this.region = region;
+ region.startRegionOperation();
+ }
+
+ @Override
+ public void close() throws IOException {
+ region.closeRegionOperation();
+ }
+ }
+
+ private static class ScanCoprocessor {
+
+ private final Region region;
+
+ ScanCoprocessor(final Region region) {
+ this.region = region;
+ }
+
+ RegionScanner checkScannerOpen(final Scan scan) throws IOException {
+ RegionScanner scanner;
+ if (region.getCoprocessorHost() == null) {
+ scanner = region.getScanner(scan);
+ } else {
+ scanner = region.getCoprocessorHost().preScannerOpen(scan);
+ if (scanner == null) {
+ scanner = region.getScanner(scan);
+ }
+ scanner = region.getCoprocessorHost().postScannerOpen(scan, scanner);
+ }
+ if (scanner == null) {
+ throw new IOException("Failed to open region scanner");
+ }
+ return scanner;
+ }
+
+ void checkScannerClose(final InternalScanner s) throws IOException {
+ if (s == null) {
+ return;
+ }
+ if (region.getCoprocessorHost() == null) {
+ s.close();
+ return;
+ }
+ if (region.getCoprocessorHost().preScannerClose(s)) {
+ return;
+ }
+ try {
+ s.close();
+ } finally {
+ region.getCoprocessorHost().postScannerClose(s);
+ }
+ }
+
+ boolean preScannerNext(final InternalScanner s,
+ final List results, final int limit) throws IOException {
+ if (region.getCoprocessorHost() == null) {
+ return false;
+ } else {
+ Boolean bypass = region.getCoprocessorHost().preScannerNext(s, results, limit);
+ return bypass == null ? false : bypass;
+ }
+ }
+
+ boolean postScannerNext(final InternalScanner s,
+ final List results, final int limit, boolean hasMore)
+ throws IOException {
+ if (region.getCoprocessorHost() == null) {
+ return false;
+ } else {
+ return region.getCoprocessorHost().postScannerNext(s, results, limit, hasMore);
+ }
+ }
+ }
+
+ private static class SecureWriter implements Closeable {
+ private final PrivilegedWriter privilegedWriter;
+ SecureWriter(final Configuration conf, final UserProvider userProvider, final Token userToken,
+ final List opts) throws IOException {
+ privilegedWriter = new PrivilegedWriter(getActiveUser(userProvider, userToken),
+ SequenceFile.createWriter(conf, opts.toArray(new SequenceFile.Writer.Option[opts.size()])));
+ }
+ void append(final Object key, final Object value) throws IOException {
+ privilegedWriter.append(key, value);
+ }
+ private static User getActiveUser(final UserProvider userProvider, final Token userToken) throws IOException {
+ User user = RpcServer.getRequestUser();
+ if (user == null) {
+ user = userProvider.getCurrent();
+ }
+ if (user == null && userToken != null) {
+ LOG.warn("No found of user credentials, but a token was got from user request");
+ } else if (user != null && userToken != null) {
+ user.addToken(userToken);
+ }
+ return user;
+ }
+
+ @Override
+ public void close() throws IOException {
+ privilegedWriter.close();
+ }
+ }
+ private static class PrivilegedWriter implements PrivilegedExceptionAction, Closeable {
+ private final User user;
+ private final SequenceFile.Writer out;
+ private Object key;
+ private Object value;
+ PrivilegedWriter(final User user, final SequenceFile.Writer out) {
+ this.user = user;
+ this.out = out;
+ }
+ void append(final Object key, final Object value) throws IOException {
+ if (user == null) {
+ out.append(key, value);
+ } else {
+ this.key = key;
+ this.value = value;
+ try {
+ user.runAs(this);
+ } catch (InterruptedException ex) {
+ throw new IOException(ex);
+ }
+ }
+ }
+ @Override
+ public Boolean run() throws Exception {
+ out.append(key, value);
+ return true;
+ }
+
+ @Override
+ public void close() throws IOException {
+ out.close();
+ }
+ }
+}
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
index 56d229a..769204c 100644
--- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
@@ -30,14 +30,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.filter.RegexStringComparator;
-import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
@@ -53,11 +46,8 @@ import org.apache.hadoop.util.ToolRunner;
@InterfaceStability.Stable
public class Export extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(Export.class);
- final static String NAME = "export";
- final static String RAW_SCAN = "hbase.mapreduce.include.deleted.rows";
- final static String EXPORT_BATCHING = "hbase.export.scanner.batch";
-
- private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
+ public static final String NAME = "export";
+ public static final String JOB_NAME_CONF_KEY = "mapreduce.job.name";
/**
* Sets up the actual job.
@@ -75,7 +65,7 @@ public class Export extends Configured implements Tool {
job.setJobName(NAME + "_" + tableName);
job.setJarByClass(Export.class);
// Set optional scan parameters
- Scan s = getConfiguredScanForJob(conf, args);
+ Scan s = ExportUtils.getConfiguredScanFromCommandLine(conf, args);
IdentityTableMapper.initJob(tableName, s, IdentityTableMapper.class, job);
// No reducers. Just write straight to output files.
job.setNumReduceTasks(0);
@@ -86,102 +76,18 @@ public class Export extends Configured implements Tool {
return job;
}
- private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException {
- Scan s = new Scan();
- // Optional arguments.
- // Set Scan Versions
- int versions = args.length > 2? Integer.parseInt(args[2]): 1;
- s.setMaxVersions(versions);
- // Set Scan Range
- long startTime = args.length > 3? Long.parseLong(args[3]): 0L;
- long endTime = args.length > 4? Long.parseLong(args[4]): Long.MAX_VALUE;
- s.setTimeRange(startTime, endTime);
- // Set cache blocks
- s.setCacheBlocks(false);
- // set Start and Stop row
- if (conf.get(TableInputFormat.SCAN_ROW_START) != null) {
- s.setStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START)));
- }
- if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) {
- s.setStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP)));
- }
- // Set Scan Column Family
- boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN));
- if (raw) {
- s.setRaw(raw);
- }
-
- if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) {
- s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY)));
- }
- // Set RowFilter or Prefix Filter if applicable.
- Filter exportFilter = getExportFilter(args);
- if (exportFilter!= null) {
- LOG.info("Setting Scan Filter for Export.");
- s.setFilter(exportFilter);
- }
-
- int batching = conf.getInt(EXPORT_BATCHING, -1);
- if (batching != -1){
- try {
- s.setBatch(batching);
- } catch (IncompatibleFilterException e) {
- LOG.error("Batching could not be set", e);
- }
- }
- LOG.info("versions=" + versions + ", starttime=" + startTime +
- ", endtime=" + endTime + ", keepDeletedCells=" + raw);
- return s;
- }
-
- private static Filter getExportFilter(String[] args) {
- Filter exportFilter = null;
- String filterCriteria = (args.length > 5) ? args[5]: null;
- if (filterCriteria == null) return null;
- if (filterCriteria.startsWith("^")) {
- String regexPattern = filterCriteria.substring(1, filterCriteria.length());
- exportFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator(regexPattern));
- } else {
- exportFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
- }
- return exportFilter;
- }
-
- /*
- * @param errorMsg Error message. Can be null.
- */
- private static void usage(final String errorMsg) {
- if (errorMsg != null && errorMsg.length() > 0) {
- System.err.println("ERROR: " + errorMsg);
- }
- System.err.println("Usage: Export [-D ]* [ " +
- "[ []] [^[regex pattern] or [Prefix] to filter]]\n");
- System.err.println(" Note: -D properties will be applied to the conf used. ");
- System.err.println(" For example: ");
- System.err.println(" -D mapreduce.output.fileoutputformat.compress=true");
- System.err.println(" -D mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.GzipCodec");
- System.err.println(" -D mapreduce.output.fileoutputformat.compress.type=BLOCK");
- System.err.println(" Additionally, the following SCAN properties can be specified");
- System.err.println(" to control/limit what is exported..");
- System.err.println(" -D " + TableInputFormat.SCAN_COLUMN_FAMILY + "=");
- System.err.println(" -D " + RAW_SCAN + "=true");
- System.err.println(" -D " + TableInputFormat.SCAN_ROW_START + "=");
- System.err.println(" -D " + TableInputFormat.SCAN_ROW_STOP + "=");
- System.err.println(" -D " + JOB_NAME_CONF_KEY
- + "=jobName - use the specified mapreduce job name for the export");
- System.err.println("For performance consider the following properties:\n"
- + " -Dhbase.client.scanner.caching=100\n"
- + " -Dmapreduce.map.speculative=false\n"
- + " -Dmapreduce.reduce.speculative=false");
- System.err.println("For tables with very wide rows consider setting the batch size as below:\n"
- + " -D" + EXPORT_BATCHING + "=10");
- }
-
-
@Override
public int run(String[] args) throws Exception {
- if (args.length < 2) {
- usage("Wrong number of arguments: " + args.length);
+ if (!ExportUtils.checkArguments(args)) {
+ ExportUtils.usage("Wrong number of arguments: " + args.length);
+ System.err.println(" -D " + JOB_NAME_CONF_KEY
+ + "=jobName - use the specified mapreduce job name for the export");
+ System.err.println("For MR performance consider the following properties:");
+ System.err.println(" -D mapreduce.map.speculative=false");
+ System.err.println(" -D mapreduce.reduce.speculative=false");
+ System.err.println("For tables with very wide rows consider setting the batch size as below:\n"
+ + " -D " + ExportUtils.EXPORT_BATCHING + "=10\n"
+ + " -D " + ExportUtils.EXPORT_CACHING + "=100");
return -1;
}
Job job = createSubmittableJob(getConf(), args);
diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
new file mode 100644
index 0000000..72d8a05
--- /dev/null
+++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
@@ -0,0 +1,149 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.mapreduce;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.CompareFilter;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.filter.RegexStringComparator;
+import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hbase.security.visibility.Authorizations;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+public final class ExportUtils {
+ private static final Log LOG = LogFactory.getLog(ExportUtils.class);
+ public static final String RAW_SCAN = "hbase.mapreduce.include.deleted.rows";
+ public static final String EXPORT_BATCHING = "hbase.export.scanner.batch";
+ public static final String EXPORT_CACHING = "hbase.export.scanner.caching";
+ public static final String EXPORT_VISIBILITY_LABELS = "hbase.export.visibility.labels";
+ public static boolean checkArguments(final String[] args) {
+ return args.length >= 2;
+ }
+ /**
+ * Common usage for other export tools.
+ * @param errorMsg Error message. Can be null.
+ */
+ public static void usage(final String errorMsg) {
+ if (errorMsg != null && errorMsg.length() > 0) {
+ System.err.println("ERROR: " + errorMsg);
+ }
+ System.err.println("Usage: Export [-D ]* [ " +
+ "[ []] [^[regex pattern] or [Prefix] to filter]]\n");
+ System.err.println(" Note: -D properties will be applied to the conf used. ");
+ System.err.println(" For example: ");
+ System.err.println(" -D " + FileOutputFormat.COMPRESS + "=true");
+ System.err.println(" -D " + FileOutputFormat.COMPRESS_CODEC + "=org.apache.hadoop.io.compress.GzipCodec");
+ System.err.println(" -D " + FileOutputFormat.COMPRESS_TYPE + "=BLOCK");
+ System.err.println(" Additionally, the following SCAN properties can be specified");
+ System.err.println(" to control/limit what is exported..");
+ System.err.println(" -D " + TableInputFormat.SCAN_COLUMN_FAMILY + "=");
+ System.err.println(" -D " + RAW_SCAN + "=true");
+ System.err.println(" -D " + TableInputFormat.SCAN_ROW_START + "=");
+ System.err.println(" -D " + TableInputFormat.SCAN_ROW_STOP + "=");
+ System.err.println(" -D " + HConstants.HBASE_CLIENT_SCANNER_CACHING + "=100");
+ System.err.println(" -D " + EXPORT_VISIBILITY_LABELS + "=");
+ }
+ public static Filter getExportFilter(String[] args) {
+ Filter exportFilter;
+ String filterCriteria = (args.length > 5) ? args[5]: null;
+ if (filterCriteria == null) return null;
+ if (filterCriteria.startsWith("^")) {
+ String regexPattern = filterCriteria.substring(1, filterCriteria.length());
+ exportFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regexPattern));
+ } else {
+ exportFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
+ }
+ return exportFilter;
+ }
+ public static Scan getConfiguredScanFromCommandLine(Configuration conf, String[] args) throws IOException {
+ Scan s = new Scan();
+ // Optional arguments.
+ // Set Scan Versions
+ int versions = args.length > 2? Integer.parseInt(args[2]): 1;
+ s.setMaxVersions(versions);
+ // Set Scan Range
+ long startTime = args.length > 3? Long.parseLong(args[3]): 0L;
+ long endTime = args.length > 4? Long.parseLong(args[4]): Long.MAX_VALUE;
+ s.setTimeRange(startTime, endTime);
+ // Set cache blocks
+ s.setCacheBlocks(false);
+ // set Start and Stop row
+ if (conf.get(TableInputFormat.SCAN_ROW_START) != null) {
+ s.setStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START)));
+ }
+ if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) {
+ s.setStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP)));
+ }
+ // Set Scan Column Family
+ boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN));
+ if (raw) {
+ s.setRaw(raw);
+ }
+ if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) {
+ s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY)));
+ }
+ // Set RowFilter or Prefix Filter if applicable.
+ Filter exportFilter = getExportFilter(args);
+ if (exportFilter!= null) {
+ LOG.info("Setting Scan Filter for Export.");
+ s.setFilter(exportFilter);
+ }
+ List labels = null;
+ if (conf.get(EXPORT_VISIBILITY_LABELS) != null) {
+ labels = Arrays.asList(conf.getStrings(EXPORT_VISIBILITY_LABELS));
+ if (!labels.isEmpty()) {
+ s.setAuthorizations(new Authorizations(labels));
+ }
+ }
+
+ int batching = conf.getInt(EXPORT_BATCHING, -1);
+ if (batching != -1){
+ try {
+ s.setBatch(batching);
+ } catch (IncompatibleFilterException e) {
+ LOG.error("Batching could not be set", e);
+ }
+ }
+
+ int caching = conf.getInt(EXPORT_CACHING, 100);
+ if (caching != -1){
+ try {
+ s.setCaching(caching);
+ } catch (IncompatibleFilterException e) {
+ LOG.error("Caching could not be set", e);
+ }
+ }
+ LOG.info("versions=" + versions + ", starttime=" + startTime
+ + ", endtime=" + endTime + ", keepDeletedCells=" + raw
+ + ", visibility labels=" + labels);
+ return s;
+ }
+ private ExportUtils() {
+ }
+}
diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 50146fd..fc5c932 100644
--- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -34,7 +34,6 @@ import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import java.util.NavigableMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -59,6 +58,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.filter.PrefixFilter;
@@ -73,24 +73,34 @@ import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LauncherSecurityManager;
import org.apache.hadoop.mapreduce.Mapper.Context;
+import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
+import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* Tests the table import and table export MR job functionality
*/
+@RunWith(Parameterized.class)
@Category({VerySlowMapReduceTests.class, MediumTests.class})
public class TestImportExport {
+ public interface Exporter {
+ boolean run(Configuration config, String[] args) throws Throwable;
+ void main(String[] args) throws Throwable;
+ }
private static final Log LOG = LogFactory.getLog(TestImportExport.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
@@ -105,19 +115,53 @@ public class TestImportExport {
private static String FQ_OUTPUT_DIR;
private static final String EXPORT_BATCH_SIZE = "100";
- private static long now = System.currentTimeMillis();
-
+ private static final long NOW = System.currentTimeMillis();
+ private final Exporter exporter;
+ private final TableName EXPORT_TABLE = TableName.valueOf("export_table");
+ private final TableName IMPORT_TABLE = TableName.valueOf("import_table");
+ @Parameters
+ public static Object[] params() {
+ Object mrExport = new Exporter() {
+ @Override
+ public boolean run(Configuration config, String[] args) throws Throwable {
+ int status = ToolRunner.run(config, new Export(), args);
+ return status == 0;
+ }
+ @Override
+ public void main(String[] args) throws Throwable {
+ Export.main(args);
+ }
+ };
+ Object epExport = new Exporter() {
+ @Override
+ public boolean run(Configuration config, String[] args) throws Throwable {
+ org.apache.hadoop.hbase.coprocessor.Export.run(config, args);
+ return true;
+ }
+
+ @Override
+ public void main(String[] args) throws Throwable {
+ org.apache.hadoop.hbase.coprocessor.Export.main(args);
+ }
+ };
+ return new Object[] {mrExport, epExport};
+ }
+ public TestImportExport(Exporter exporter) {
+ this.exporter = exporter;
+ }
@BeforeClass
- public static void beforeClass() throws Exception {
+ public static void beforeClass() throws Throwable {
// Up the handlers; this test needs more than usual.
UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10);
+ UTIL.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+ org.apache.hadoop.hbase.coprocessor.Export.class.getName());
UTIL.startMiniCluster();
FQ_OUTPUT_DIR =
new Path(OUTPUT_DIR).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString();
}
@AfterClass
- public static void afterClass() throws Exception {
+ public static void afterClass() throws Throwable {
UTIL.shutdownMiniCluster();
}
@@ -129,11 +173,16 @@ public class TestImportExport {
LOG.info("Running " + name.getMethodName());
}
- @Before
@After
- public void cleanup() throws Exception {
+ public void cleanup() throws Throwable {
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
fs.delete(new Path(OUTPUT_DIR), true);
+ if (UTIL.getAdmin().tableExists(EXPORT_TABLE)) {
+ UTIL.deleteTable(EXPORT_TABLE);
+ }
+ if (UTIL.getAdmin().tableExists(IMPORT_TABLE)) {
+ UTIL.deleteTable(IMPORT_TABLE);
+ }
}
/**
@@ -144,10 +193,9 @@ public class TestImportExport {
* @throws InterruptedException
* @throws ClassNotFoundException
*/
- boolean runExport(String[] args) throws Exception {
+ boolean runExport(String[] args) throws Throwable {
// need to make a copy of the configuration because to make sure different temp dirs are used.
- int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Export(), args);
- return status == 0;
+ return exporter.run(new Configuration(UTIL.getConfiguration()), args);
}
/**
@@ -158,7 +206,7 @@ public class TestImportExport {
* @throws InterruptedException
* @throws ClassNotFoundException
*/
- boolean runImport(String[] args) throws Exception {
+ boolean runImport(String[] args) throws Throwable {
// need to make a copy of the configuration because to make sure different temp dirs are used.
int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args);
return status == 0;
@@ -166,26 +214,25 @@ public class TestImportExport {
/**
* Test simple replication case with column mapping
- * @throws Exception
+ * @throws Throwable
*/
@Test
- public void testSimpleCase() throws Exception {
- String EXPORT_TABLE = "exportSimpleCase";
- try (Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);) {
+ public void testSimpleCase() throws Throwable {
+ try (Table t = UTIL.createTable(EXPORT_TABLE, FAMILYA, 3);) {
Put p = new Put(ROW1);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
t.put(p);
p = new Put(ROW2);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
t.put(p);
p = new Put(ROW3);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
t.put(p);
}
@@ -193,17 +240,15 @@ public class TestImportExport {
// Only export row1 & row2.
"-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1",
"-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3",
- EXPORT_TABLE,
+ EXPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR,
"1000", // max number of key versions per key to export
};
assertTrue(runExport(args));
-
- String IMPORT_TABLE = "importTableSimpleCase";
- try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3);) {
+ try (Table t = UTIL.createTable(IMPORT_TABLE, FAMILYB, 3);) {
args = new String[] {
"-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING,
- IMPORT_TABLE,
+ IMPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR
};
assertTrue(runImport(args));
@@ -225,21 +270,21 @@ public class TestImportExport {
/**
* Test export hbase:meta table
*
- * @throws Exception
+ * @throws Throwable
*/
@Test
- public void testMetaExport() throws Exception {
- String EXPORT_TABLE = TableName.META_TABLE_NAME.getNameAsString();
- String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1", "0", "0" };
+ public void testMetaExport() throws Throwable {
+ String[] args = new String[] { TableName.META_TABLE_NAME.getNameAsString(),
+ FQ_OUTPUT_DIR, "1", "0", "0" };
assertTrue(runExport(args));
}
/**
* Test import data from 0.94 exported file
- * @throws Exception
+ * @throws Throwable
*/
@Test
- public void testImport94Table() throws Exception {
+ public void testImport94Table() throws Throwable {
final String name = "exportedTableIn94Format";
URL url = TestImportExport.class.getResource(name);
File f = new File(url.toURI());
@@ -252,11 +297,10 @@ public class TestImportExport {
Path importPath = new Path(f.toURI());
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + name));
- String IMPORT_TABLE = name;
- try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3);) {
+ try (Table t = UTIL.createTable(IMPORT_TABLE, Bytes.toBytes("f1"), 3);) {
String[] args = new String[] {
"-Dhbase.import.version=0.94" ,
- IMPORT_TABLE, FQ_OUTPUT_DIR
+ IMPORT_TABLE.getNameAsString(), FQ_OUTPUT_DIR
};
assertTrue(runImport(args));
/* exportedTableIn94Format contains 5 rows
@@ -275,9 +319,8 @@ public class TestImportExport {
* Test export scanner batching
*/
@Test
- public void testExportScannerBatching() throws Exception {
- String BATCH_TABLE = "exportWithBatch";
- HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(BATCH_TABLE));
+ public void testExportScannerBatching() throws Throwable {
+ HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE);
desc.addFamily(new HColumnDescriptor(FAMILYA)
.setMaxVersions(1)
);
@@ -285,16 +328,16 @@ public class TestImportExport {
try (Table t = UTIL.getConnection().getTable(desc.getTableName());) {
Put p = new Put(ROW1);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 3, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 4, QUAL);
t.put(p);
String[] args = new String[] {
- "-D" + Export.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE, // added scanner batching arg.
- BATCH_TABLE,
+ "-D" + ExportUtils.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE, // added scanner batching arg.
+ EXPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR
};
assertTrue(runExport(args));
@@ -305,9 +348,8 @@ public class TestImportExport {
}
@Test
- public void testWithDeletes() throws Exception {
- String EXPORT_TABLE = "exportWithDeletes";
- HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
+ public void testWithDeletes() throws Throwable {
+ HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE);
desc.addFamily(new HColumnDescriptor(FAMILYA)
.setMaxVersions(5)
.setKeepDeletedCells(KeepDeletedCells.TRUE)
@@ -316,30 +358,29 @@ public class TestImportExport {
try (Table t = UTIL.getConnection().getTable(desc.getTableName());) {
Put p = new Put(ROW1);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
- p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 3, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW + 4, QUAL);
t.put(p);
- Delete d = new Delete(ROW1, now+3);
+ Delete d = new Delete(ROW1, NOW+3);
t.delete(d);
d = new Delete(ROW1);
- d.addColumns(FAMILYA, QUAL, now+2);
+ d.addColumns(FAMILYA, QUAL, NOW+2);
t.delete(d);
}
String[] args = new String[] {
- "-D" + Export.RAW_SCAN + "=true",
- EXPORT_TABLE,
+ "-D" + ExportUtils.RAW_SCAN + "=true",
+ EXPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR,
"1000", // max number of key versions per key to export
};
assertTrue(runExport(args));
- String IMPORT_TABLE = "importWithDeletes";
- desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
+ desc = new HTableDescriptor(IMPORT_TABLE);
desc.addFamily(new HColumnDescriptor(FAMILYA)
.setMaxVersions(5)
.setKeepDeletedCells(KeepDeletedCells.TRUE)
@@ -347,7 +388,7 @@ public class TestImportExport {
UTIL.getHBaseAdmin().createTable(desc);
try (Table t = UTIL.getConnection().getTable(desc.getTableName());) {
args = new String[] {
- IMPORT_TABLE,
+ IMPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR
};
assertTrue(runImport(args));
@@ -359,20 +400,18 @@ public class TestImportExport {
Result r = scanner.next();
Cell[] res = r.rawCells();
assertTrue(CellUtil.isDeleteFamily(res[0]));
- assertEquals(now+4, res[1].getTimestamp());
- assertEquals(now+3, res[2].getTimestamp());
+ assertEquals(NOW+4, res[1].getTimestamp());
+ assertEquals(NOW+3, res[2].getTimestamp());
assertTrue(CellUtil.isDelete(res[3]));
- assertEquals(now+2, res[4].getTimestamp());
- assertEquals(now+1, res[5].getTimestamp());
- assertEquals(now, res[6].getTimestamp());
+ assertEquals(NOW+2, res[4].getTimestamp());
+ assertEquals(NOW+1, res[5].getTimestamp());
+ assertEquals(NOW, res[6].getTimestamp());
}
}
@Test
- public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
- TableName EXPORT_TABLE =
- TableName.valueOf("exportWithMultipleDeleteFamilyMarkersOfSameRowSameFamily");
+ public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Throwable {
HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE);
desc.addFamily(new HColumnDescriptor(FAMILYA)
.setMaxVersions(5)
@@ -384,41 +423,39 @@ public class TestImportExport {
//Add first version of QUAL
Put p = new Put(ROW1);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, NOW, QUAL);
exportT.put(p);
//Add Delete family marker
- Delete d = new Delete(ROW1, now+3);
+ Delete d = new Delete(ROW1, NOW+3);
exportT.delete(d);
//Add second version of QUAL
p = new Put(ROW1);
- p.addColumn(FAMILYA, QUAL, now + 5, "s".getBytes());
+ p.addColumn(FAMILYA, QUAL, NOW + 5, "s".getBytes());
exportT.put(p);
//Add second Delete family marker
- d = new Delete(ROW1, now+7);
+ d = new Delete(ROW1, NOW+7);
exportT.delete(d);
-
String[] args = new String[] {
- "-D" + Export.RAW_SCAN + "=true", EXPORT_TABLE.getNameAsString(),
+ "-D" + ExportUtils.RAW_SCAN + "=true", EXPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR,
"1000", // max number of key versions per key to export
};
assertTrue(runExport(args));
- String IMPORT_TABLE = "importWithMultipleDeleteFamilyMarkersOfSameRowSameFamily";
- desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
+ desc = new HTableDescriptor(IMPORT_TABLE);
desc.addFamily(new HColumnDescriptor(FAMILYA)
.setMaxVersions(5)
.setKeepDeletedCells(KeepDeletedCells.TRUE)
);
UTIL.getHBaseAdmin().createTable(desc);
- Table importT = UTIL.getConnection().getTable(TableName.valueOf(IMPORT_TABLE));
+ Table importT = UTIL.getConnection().getTable(IMPORT_TABLE);
args = new String[] {
- IMPORT_TABLE,
+ IMPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR
};
assertTrue(runImport(args));
@@ -434,7 +471,7 @@ public class TestImportExport {
Result exportedTResult = exportedTScanner.next();
try {
Result.compareResults(exportedTResult, importedTResult);
- } catch (Exception e) {
+ } catch (Throwable e) {
fail("Original and imported tables data comparision failed with error:"+e.getMessage());
} finally {
exportT.close();
@@ -447,40 +484,38 @@ public class TestImportExport {
* attempt with invalid values.
*/
@Test
- public void testWithFilter() throws Exception {
+ public void testWithFilter() throws Throwable {
// Create simple table to export
- String EXPORT_TABLE = "exportSimpleCase_ImportWithFilter";
- HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
+ HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE);
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
UTIL.getHBaseAdmin().createTable(desc);
Table exportTable = UTIL.getConnection().getTable(desc.getTableName());
Put p1 = new Put(ROW1);
- p1.addColumn(FAMILYA, QUAL, now, QUAL);
- p1.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- p1.addColumn(FAMILYA, QUAL, now + 2, QUAL);
- p1.addColumn(FAMILYA, QUAL, now + 3, QUAL);
- p1.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+ p1.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p1.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ p1.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
+ p1.addColumn(FAMILYA, QUAL, NOW + 3, QUAL);
+ p1.addColumn(FAMILYA, QUAL, NOW + 4, QUAL);
// Having another row would actually test the filter.
Put p2 = new Put(ROW2);
- p2.addColumn(FAMILYA, QUAL, now, QUAL);
+ p2.addColumn(FAMILYA, QUAL, NOW, QUAL);
exportTable.put(Arrays.asList(p1, p2));
// Export the simple table
- String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1000" };
+ String[] args = new String[] { EXPORT_TABLE.getNameAsString(), FQ_OUTPUT_DIR, "1000" };
assertTrue(runExport(args));
// Import to a new table
- String IMPORT_TABLE = "importWithFilter";
- desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
+ desc = new HTableDescriptor(IMPORT_TABLE);
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
UTIL.getHBaseAdmin().createTable(desc);
Table importTable = UTIL.getConnection().getTable(desc.getTableName());
args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(),
- "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE,
+ "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR,
"1000" };
assertTrue(runImport(args));
@@ -496,7 +531,7 @@ public class TestImportExport {
// need to re-run the export job
args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(),
- "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", EXPORT_TABLE,
+ "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", EXPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR, "1000" };
assertFalse(runImport(args));
@@ -529,7 +564,7 @@ public class TestImportExport {
* test main method. Import should print help and call System.exit
*/
@Test
- public void testImportMain() throws Exception {
+ public void testImportMain() throws Throwable {
PrintStream oldPrintStream = System.err;
SecurityManager SECURITY_MANAGER = System.getSecurityManager();
LauncherSecurityManager newSecurityManager= new LauncherSecurityManager();
@@ -554,11 +589,56 @@ public class TestImportExport {
}
}
+ @Test
+ public void testExportScan() throws Exception {
+ int version = 100;
+ long startTime = System.currentTimeMillis();
+ long endTime = startTime + 1;
+ String prefix = "row";
+ String label_0 = "label_0";
+ String label_1 = "label_1";
+ String[] args = {
+ "table",
+ "outputDir",
+ String.valueOf(version),
+ String.valueOf(startTime),
+ String.valueOf(endTime),
+ prefix
+ };
+ Scan scan = ExportUtils.getConfiguredScanFromCommandLine(UTIL.getConfiguration(), args);
+ assertEquals(version, scan.getMaxVersions());
+ assertEquals(startTime, scan.getTimeRange().getMin());
+ assertEquals(endTime, scan.getTimeRange().getMax());
+ assertEquals(true, (scan.getFilter() instanceof PrefixFilter));
+ assertEquals(0, Bytes.compareTo(((PrefixFilter) scan.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
+ String[] argsWithLabels = {
+ "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + label_0 + "," + label_1,
+ "table",
+ "outputDir",
+ String.valueOf(version),
+ String.valueOf(startTime),
+ String.valueOf(endTime),
+ prefix
+ };
+ Configuration conf = new Configuration(UTIL.getConfiguration());
+ // parse the "-D" options
+ String[] otherArgs = new GenericOptionsParser(conf, argsWithLabels).getRemainingArgs();
+ Scan scanWithLabels = ExportUtils.getConfiguredScanFromCommandLine(conf, otherArgs);
+ assertEquals(version, scanWithLabels.getMaxVersions());
+ assertEquals(startTime, scanWithLabels.getTimeRange().getMin());
+ assertEquals(endTime, scanWithLabels.getTimeRange().getMax());
+ assertEquals(true, (scanWithLabels.getFilter() instanceof PrefixFilter));
+ assertEquals(0, Bytes.compareTo(((PrefixFilter) scanWithLabels.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
+ assertEquals(2, scanWithLabels.getAuthorizations().getLabels().size());
+ assertEquals(label_0, scanWithLabels.getAuthorizations().getLabels().get(0));
+ assertEquals(label_1, scanWithLabels.getAuthorizations().getLabels().get(1));
+ }
+
/**
* test main method. Export should print help and call System.exit
*/
@Test
- public void testExportMain() throws Exception {
+ public void testExportMain() throws Throwable {
PrintStream oldPrintStream = System.err;
SecurityManager SECURITY_MANAGER = System.getSecurityManager();
LauncherSecurityManager newSecurityManager= new LauncherSecurityManager();
@@ -568,7 +648,7 @@ public class TestImportExport {
System.setErr(new PrintStream(data));
try {
System.setErr(new PrintStream(data));
- Export.main(args);
+ exporter.main(args);
fail("should be SecurityException");
} catch (SecurityException e) {
assertEquals(-1, newSecurityManager.getExitCode());
@@ -576,12 +656,9 @@ public class TestImportExport {
assertTrue(data.toString().contains(
"Usage: Export [-D ]* [ " +
"[ []] [^[regex pattern] or [Prefix] to filter]]"));
- assertTrue(data.toString().contains("-D hbase.mapreduce.scan.column.family="));
- assertTrue(data.toString().contains("-D hbase.mapreduce.include.deleted.rows=true"));
- assertTrue(data.toString().contains("-Dhbase.client.scanner.caching=100"));
- assertTrue(data.toString().contains("-Dmapreduce.map.speculative=false"));
- assertTrue(data.toString().contains("-Dmapreduce.reduce.speculative=false"));
- assertTrue(data.toString().contains("-Dhbase.export.scanner.batch=10"));
+ assertTrue(data.toString().contains("-D " + TableInputFormat.SCAN_COLUMN_FAMILY + "="));
+ assertTrue(data.toString().contains("-D " + ExportUtils.RAW_SCAN + "=true"));
+ assertTrue(data.toString().contains("-D " + HConstants.HBASE_CLIENT_SCANNER_CACHING + "=100"));
} finally {
System.setErr(oldPrintStream);
System.setSecurityManager(SECURITY_MANAGER);
@@ -593,7 +670,7 @@ public class TestImportExport {
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
- public void testKeyValueImporter() throws Exception {
+ public void testKeyValueImporter() throws Throwable {
KeyValueImporter importer = new KeyValueImporter();
Configuration configuration = new Configuration();
Context ctx = mock(Context.class);
@@ -642,31 +719,29 @@ public class TestImportExport {
}
@Test
- public void testDurability() throws Exception {
+ public void testDurability() throws Throwable {
// Create an export table.
- String exportTableName = "exporttestDurability";
- try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);) {
+ try (Table exportTable = UTIL.createTable(EXPORT_TABLE, FAMILYA, 3);) {
// Insert some data
Put put = new Put(ROW1);
- put.addColumn(FAMILYA, QUAL, now, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ put.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ put.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ put.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
exportTable.put(put);
put = new Put(ROW2);
- put.addColumn(FAMILYA, QUAL, now, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ put.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ put.addColumn(FAMILYA, QUAL, NOW + 1, QUAL);
+ put.addColumn(FAMILYA, QUAL, NOW + 2, QUAL);
exportTable.put(put);
// Run the export
- String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000"};
+ String[] args = new String[] { EXPORT_TABLE.getNameAsString(), FQ_OUTPUT_DIR, "1000"};
assertTrue(runExport(args));
// Create the table for import
- String importTableName = "importTestDurability1";
- Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
+ Table importTable = UTIL.createTable(IMPORT_TABLE, FAMILYA, 3);
// Register the wal listener for the import table
HRegionInfo region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
@@ -678,7 +753,7 @@ public class TestImportExport {
// Run the import with SKIP_WAL
args =
new String[] { "-D" + Import.WAL_DURABILITY + "=" + Durability.SKIP_WAL.name(),
- importTableName, FQ_OUTPUT_DIR };
+ IMPORT_TABLE.getNameAsString(), FQ_OUTPUT_DIR };
assertTrue(runImport(args));
//Assert that the wal is not visisted
assertTrue(!walListener.isWALVisited());
@@ -686,19 +761,20 @@ public class TestImportExport {
assertTrue(getCount(importTable, null) == 2);
// Run the import with the default durability option
- importTableName = "importTestDurability2";
- importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
+ TableName tableWithDefaultDurability = TableName.valueOf("importTestDurability2");
+ importTable = UTIL.createTable(tableWithDefaultDurability, FAMILYA, 3);
region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
.getOnlineRegions(importTable.getName()).get(0).getRegionInfo();
wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
walListener = new TableWALActionListener(region);
wal.registerWALActionsListener(walListener);
- args = new String[] { importTableName, FQ_OUTPUT_DIR };
+ args = new String[] { tableWithDefaultDurability.getNameAsString(), FQ_OUTPUT_DIR };
assertTrue(runImport(args));
//Assert that the wal is visisted
assertTrue(walListener.isWALVisited());
//Ensure that the count is 2 (only one version of key value is obtained)
assertTrue(getCount(importTable, null) == 2);
+ UTIL.deleteTable(tableWithDefaultDurability);
}
}
diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestSecureExport.java hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestSecureExport.java
new file mode 100644
index 0000000..45b6441
--- /dev/null
+++ hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestSecureExport.java
@@ -0,0 +1,465 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.security.access;
+
+import com.google.protobuf.ServiceException;
+import java.io.File;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.coprocessor.Export;
+import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.hbase.mapreduce.ExportUtils;
+import org.apache.hadoop.hbase.mapreduce.HadoopSecurityEnabledUserProviderForTesting;
+import org.apache.hadoop.hbase.mapreduce.Import;
+import org.apache.hadoop.hbase.protobuf.generated.ExportProtos;
+import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos;
+import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction;
+import org.apache.hadoop.hbase.security.visibility.Authorizations;
+import org.apache.hadoop.hbase.security.visibility.CellVisibility;
+import org.apache.hadoop.hbase.security.visibility.VisibilityClient;
+import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
+import org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.junit.After;
+import org.junit.AfterClass;
+import static org.junit.Assert.assertEquals;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+@Category({SmallTests.class})
+public class TestSecureExport {
+ private static final Log LOG = LogFactory.getLog(TestSecureExport.class);
+ private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ private static MiniKdc KDC;
+ private static final File KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
+ private static String USERNAME;
+ private static String SERVER_PRINCIPAL;
+ private static String HTTP_PRINCIPAL;
+ private static final String FAMILYA_STRING = "fma";
+ private static final String FAMILYB_STRING = "fma";
+ private static final byte[] FAMILYA = Bytes.toBytes(FAMILYA_STRING);
+ private static final byte[] FAMILYB = Bytes.toBytes(FAMILYB_STRING);
+ private static final byte[] ROW1 = Bytes.toBytes("row1");
+ private static final byte[] ROW2 = Bytes.toBytes("row2");
+ private static final byte[] ROW3 = Bytes.toBytes("row3");
+ private static final byte[] QUAL = Bytes.toBytes("qual");
+ private static final String LOCALHOST = "localhost";
+ private static final long NOW = System.currentTimeMillis();
+ // user granted with all global permission
+ private static final String USER_ADMIN = "admin";
+ // user is table owner. will have all permissions on table
+ private static final String USER_OWNER = "owner";
+ // user with rx permissions.
+ private static final String USER_RX = "rxuser";
+ // user with exe-only permissions.
+ private static final String USER_XO = "xouser";
+ // user with read-only permissions.
+ private static final String USER_RO = "rouser";
+ // user with no permissions
+ private static final String USER_NONE = "noneuser";
+ private static final String PRIVATE = "private";
+ private static final String CONFIDENTIAL = "confidential";
+ private static final String SECRET = "secret";
+ private static final String TOPSECRET = "topsecret";
+ @Rule
+ public final TestName name = new TestName();
+ private static void setUpKdcServer() throws Exception {
+ Properties conf = MiniKdc.createConf();
+ conf.put(MiniKdc.DEBUG, true);
+ File kdcFile = new File(UTIL.getDataTestDir("kdc").toUri().getPath());
+ KDC = new MiniKdc(conf, kdcFile);
+ KDC.start();
+ USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
+ SERVER_PRINCIPAL = USERNAME + "/" + LOCALHOST;
+ HTTP_PRINCIPAL = "HTTP/" + LOCALHOST;
+ KDC.createPrincipal(KEYTAB_FILE,
+ SERVER_PRINCIPAL,
+ HTTP_PRINCIPAL,
+ USER_ADMIN + "/" + LOCALHOST,
+ USER_OWNER + "/" + LOCALHOST,
+ USER_RX + "/" + LOCALHOST,
+ USER_RO + "/" + LOCALHOST,
+ USER_XO + "/" + LOCALHOST,
+ USER_NONE + "/" + LOCALHOST);
+ }
+ private static User getUserByLogin(final String user) throws IOException {
+ return User.create(UserGroupInformation.loginUserFromKeytabAndReturnUGI(getPrinciple(user), KEYTAB_FILE.getAbsolutePath()));
+ }
+ private static String getPrinciple(final String user) {
+ return user + "/" + LOCALHOST + "@" + KDC.getRealm();
+ }
+ private static void setUpClusterKdc() throws Exception {
+ HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
+ HBaseKerberosUtils.setPrincipalForTesting(SERVER_PRINCIPAL + "@" + KDC.getRealm());
+ HBaseKerberosUtils.setSecuredConfiguration(UTIL.getConfiguration());
+ // if we drop support for hadoop-2.4.0 and hadoop-2.4.1,
+ // the following key should be changed.
+ // 1) DFS_NAMENODE_USER_NAME_KEY -> DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY
+ // 2) DFS_DATANODE_USER_NAME_KEY -> DFS_DATANODE_KERBEROS_PRINCIPAL_KEY
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, SERVER_PRINCIPAL + "@" + KDC.getRealm());
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, SERVER_PRINCIPAL + "@" + KDC.getRealm());
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, KEYTAB_FILE.getAbsolutePath());
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, KEYTAB_FILE.getAbsolutePath());
+ // set yarn principal
+ UTIL.getConfiguration().set(YarnConfiguration.RM_PRINCIPAL, SERVER_PRINCIPAL + "@" + KDC.getRealm());
+ UTIL.getConfiguration().set(YarnConfiguration.NM_PRINCIPAL, SERVER_PRINCIPAL + "@" + KDC.getRealm());
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, HTTP_PRINCIPAL + "@" + KDC.getRealm());
+ UTIL.getConfiguration().setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, LOCALHOST + ":0");
+ UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, LOCALHOST + ":0");
+
+ File keystoresDir = new File(UTIL.getDataTestDir("keystore").toUri().getPath());
+ keystoresDir.mkdirs();
+ String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureExport.class);
+ KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, UTIL.getConfiguration(), false);
+
+ UTIL.getConfiguration().setBoolean("ignore.secure.ports.for.testing", true);
+ UserGroupInformation.setConfiguration(UTIL.getConfiguration());
+ UTIL.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, UTIL.getConfiguration().get(
+ CoprocessorHost.REGION_COPROCESSOR_CONF_KEY) + "," + Export.class.getName());
+ }
+ private static void addLabels(final Configuration conf, final List users, final List labels) throws Exception {
+ PrivilegedExceptionAction action
+ = new PrivilegedExceptionAction() {
+ @Override
+ public VisibilityLabelsProtos.VisibilityLabelsResponse run() throws Exception {
+ try (Connection conn = ConnectionFactory.createConnection(conf)) {
+ VisibilityClient.addLabels(conn, labels.toArray(new String[labels.size()]));
+ for (String user : users) {
+ VisibilityClient.setAuths(conn, labels.toArray(new String[labels.size()]), user);
+ }
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ getUserByLogin(USER_ADMIN).runAs(action);
+ }
+
+ @Before
+ public void announce() {
+ LOG.info("Running " + name.getMethodName());
+ }
+
+ @After
+ public void cleanup() throws IOException {
+ }
+ private static void clearOutput(Path path) throws IOException {
+ FileSystem fs = path.getFileSystem(UTIL.getConfiguration());
+ if (fs.exists(path)) {
+ assertEquals(true, fs.delete(path, true));
+ }
+ }
+ /**
+ * Sets the security firstly for getting the correct default realm.
+ * @throws Exception
+ */
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ UserProvider.setUserProviderForTesting(UTIL.getConfiguration(), HadoopSecurityEnabledUserProviderForTesting.class);
+ setUpKdcServer();
+ SecureTestUtil.enableSecurity(UTIL.getConfiguration());
+ UTIL.getConfiguration().setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, true);
+ VisibilityTestUtil.enableVisiblityLabels(UTIL.getConfiguration());
+ SecureTestUtil.verifyConfiguration(UTIL.getConfiguration());
+ setUpClusterKdc();
+ UTIL.startMiniCluster();
+ UTIL.waitUntilAllRegionsAssigned(AccessControlLists.ACL_TABLE_NAME);
+ UTIL.waitUntilAllRegionsAssigned(VisibilityConstants.LABELS_TABLE_NAME);
+ UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME, 50000);
+ UTIL.waitTableEnabled(VisibilityConstants.LABELS_TABLE_NAME, 50000);
+ SecureTestUtil.grantGlobal(UTIL, USER_ADMIN,
+ Permission.Action.ADMIN,
+ Permission.Action.CREATE,
+ Permission.Action.EXEC,
+ Permission.Action.READ,
+ Permission.Action.WRITE);
+ addLabels(UTIL.getConfiguration(), Arrays.asList(USER_OWNER),
+ Arrays.asList(PRIVATE, CONFIDENTIAL, SECRET, TOPSECRET));
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ if (KDC != null) {
+ KDC.stop();
+ }
+ UTIL.shutdownMiniCluster();
+ }
+
+ /**
+ * Test the ExportEndpoint's access levels. The {@link Export} test is ignored
+ * since the access exceptions cannot be collected from the mappers.
+ *
+ * @throws java.io.IOException
+ */
+ @Test
+ public void testAccessCase() throws IOException, Throwable {
+ final String exportTable = name.getMethodName();
+ HTableDescriptor exportHtd = new HTableDescriptor(TableName.valueOf(exportTable));
+ exportHtd.addFamily(new HColumnDescriptor(FAMILYA));
+ exportHtd.setOwnerString(USER_OWNER);
+ SecureTestUtil.createTable(UTIL, exportHtd, new byte[][]{Bytes.toBytes("s")});
+ SecureTestUtil.grantOnTable(UTIL, USER_RO,
+ TableName.valueOf(exportTable), null, null,
+ Permission.Action.READ);
+ SecureTestUtil.grantOnTable(UTIL, USER_RX,
+ TableName.valueOf(exportTable), null, null,
+ Permission.Action.READ,
+ Permission.Action.EXEC);
+ SecureTestUtil.grantOnTable(UTIL, USER_XO,
+ TableName.valueOf(exportTable), null, null,
+ Permission.Action.EXEC);
+ assertEquals(4, AccessControlLists.getTablePermissions(UTIL.getConfiguration(),
+ TableName.valueOf(exportTable)).size());
+ AccessTestAction putAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILYA, Bytes.toBytes("qual_0"), NOW, QUAL);
+ p.addColumn(FAMILYA, Bytes.toBytes("qual_1"), NOW, QUAL);
+ try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
+ Table t = conn.getTable(TableName.valueOf(exportTable))) {
+ t.put(p);
+ }
+ return null;
+ }
+ };
+ // no hdfs access.
+ SecureTestUtil.verifyAllowed(putAction,
+ getUserByLogin(USER_ADMIN),
+ getUserByLogin(USER_OWNER));
+ SecureTestUtil.verifyDenied(putAction,
+ getUserByLogin(USER_RO),
+ getUserByLogin(USER_XO),
+ getUserByLogin(USER_RX),
+ getUserByLogin(USER_NONE));
+
+ final FileSystem fs = UTIL.getDFSCluster().getFileSystem();
+ final Path openDir = fs.makeQualified(new Path("testAccessCase"));
+ fs.mkdirs(openDir);
+ fs.setPermission(openDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
+ final Path output = fs.makeQualified(new Path(openDir, "output"));
+ AccessTestAction exportAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ try {
+ String[] args = new String[]{exportTable, output.toString()};
+ Map result
+ = Export.run(new Configuration(UTIL.getConfiguration()), args);
+ long rowCount = 0;
+ long cellCount = 0;
+ for (ExportProtos.ExportResponse r : result.values()) {
+ rowCount += r.getRowCount();
+ cellCount += r.getCellCount();
+ }
+ assertEquals(1, rowCount);
+ assertEquals(2, cellCount);
+ return null;
+ } catch (ServiceException | IOException ex) {
+ throw ex;
+ } catch (Throwable ex) {
+ LOG.error(ex);
+ throw new Exception(ex);
+ } finally {
+ clearOutput(output);
+ }
+ }
+ };
+ SecureTestUtil.verifyDenied(exportAction,
+ getUserByLogin(USER_RO),
+ getUserByLogin(USER_XO),
+ getUserByLogin(USER_NONE));
+ SecureTestUtil.verifyAllowed(exportAction,
+ getUserByLogin(USER_ADMIN),
+ getUserByLogin(USER_OWNER),
+ getUserByLogin(USER_RX));
+ AccessTestAction deleteAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ UTIL.deleteTable(TableName.valueOf(exportTable));
+ return null;
+ }
+ };
+ SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
+ fs.delete(openDir, true);
+ }
+ @Test
+ public void testVisibilityLabels() throws IOException, Throwable {
+ final String exportTable = name.getMethodName() + "_export";
+ final String importTable = name.getMethodName() + "_import";
+ final HTableDescriptor exportHtd = new HTableDescriptor(TableName.valueOf(exportTable));
+ exportHtd.addFamily(new HColumnDescriptor(FAMILYA));
+ exportHtd.setOwnerString(USER_OWNER);
+ SecureTestUtil.createTable(UTIL, exportHtd, new byte[][]{Bytes.toBytes("s")});
+ AccessTestAction putAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ Put p1 = new Put(ROW1);
+ p1.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p1.setCellVisibility(new CellVisibility(SECRET));
+ Put p2 = new Put(ROW2);
+ p2.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p2.setCellVisibility(new CellVisibility(PRIVATE + " & " + CONFIDENTIAL));
+ Put p3 = new Put(ROW3);
+ p3.addColumn(FAMILYA, QUAL, NOW, QUAL);
+ p3.setCellVisibility(new CellVisibility("!" + CONFIDENTIAL + " & " + TOPSECRET));
+ try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
+ Table t = conn.getTable(TableName.valueOf(exportTable))) {
+ t.put(p1);
+ t.put(p2);
+ t.put(p3);
+ }
+ return null;
+ }
+ };
+ SecureTestUtil.verifyAllowed(putAction, getUserByLogin(USER_OWNER));
+ List, Integer>> labelsAndRowCounts = new LinkedList<>();
+ labelsAndRowCounts.add(new Pair<>(Arrays.asList(SECRET), 1));
+ labelsAndRowCounts.add(new Pair<>(Arrays.asList(PRIVATE, CONFIDENTIAL), 1));
+ labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET), 1));
+ labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL), 0));
+ labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL, PRIVATE, SECRET), 2));
+ for (final Pair, Integer> labelsAndRowCount : labelsAndRowCounts) {
+ final List labels = labelsAndRowCount.getFirst();
+ final int rowCount = labelsAndRowCount.getSecond();
+ //create a open permission directory.
+ final Path openDir = new Path("testAccessCase");
+ final FileSystem fs = openDir.getFileSystem(UTIL.getConfiguration());
+ fs.mkdirs(openDir);
+ fs.setPermission(openDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
+ final Path output = fs.makeQualified(new Path(openDir, "output"));
+ AccessTestAction exportAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ StringBuilder buf = new StringBuilder();
+ for (String label : labels) {
+ buf.append(label).append(",");
+ }
+ buf.deleteCharAt(buf.length() - 1);
+ try {
+ String[] args = new String[]{
+ "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + buf.toString(),
+ exportTable,
+ output.toString(),};
+ Export.run(new Configuration(UTIL.getConfiguration()), args);
+ return null;
+ } catch (ServiceException | IOException ex) {
+ throw ex;
+ } catch (Throwable ex) {
+ throw new Exception(ex);
+ }
+ }
+ };
+ SecureTestUtil.verifyAllowed(exportAction, getUserByLogin(USER_OWNER));
+ final HTableDescriptor importHtd = new HTableDescriptor(TableName.valueOf(importTable));
+ importHtd.addFamily(new HColumnDescriptor(FAMILYB));
+ importHtd.setOwnerString(USER_OWNER);
+ SecureTestUtil.createTable(UTIL, importHtd, new byte[][]{Bytes.toBytes("s")});
+ AccessTestAction importAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ String[] args = new String[]{
+ "-D" + Import.CF_RENAME_PROP + "=" + FAMILYA_STRING + ":" + FAMILYB_STRING,
+ importTable,
+ output.toString()
+ };
+ assertEquals(0, ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args));
+ return null;
+ }
+ };
+ SecureTestUtil.verifyAllowed(importAction, getUserByLogin(USER_OWNER));
+ AccessTestAction scanAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ Scan scan = new Scan();
+ scan.setAuthorizations(new Authorizations(labels));
+ try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
+ Table table = conn.getTable(importHtd.getTableName());
+ ResultScanner scanner = table.getScanner(scan)) {
+ int count = 0;
+ for (Result r : scanner) {
+ ++count;
+ }
+ assertEquals(rowCount, count);
+ }
+ return null;
+ }
+ };
+ SecureTestUtil.verifyAllowed(scanAction, getUserByLogin(USER_OWNER));
+ AccessTestAction deleteAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ UTIL.deleteTable(importHtd.getTableName());
+ return null;
+ }
+ };
+ SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
+ clearOutput(output);
+ }
+ AccessTestAction deleteAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ UTIL.deleteTable(exportHtd.getTableName());
+ return null;
+ }
+ };
+ SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
+ }
+}
|