diff --git metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java index 9f2a88c..b15b0de 100644 --- metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java +++ metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java @@ -36396,6 +36396,4830 @@ public Builder removeRange(int index) { // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator) } + public interface PrimaryKeyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string pk_name = 1; + /** + * required string pk_name = 1; + */ + boolean hasPkName(); + /** + * required string pk_name = 1; + */ + java.lang.String getPkName(); + /** + * required string pk_name = 1; + */ + com.google.protobuf.ByteString + getPkNameBytes(); + + // repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + java.util.List + getColsList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getCols(int index); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + int getColsCount(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + java.util.List + getColsOrBuilderList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder getColsOrBuilder( + int index); + + // optional bool enable_constraint = 3; + /** + * optional bool enable_constraint = 3; + */ + boolean hasEnableConstraint(); + /** + * optional bool enable_constraint = 3; + */ + boolean getEnableConstraint(); + + // optional bool validate_constraint = 4; + /** + * optional bool validate_constraint = 4; + */ + boolean hasValidateConstraint(); + /** + * optional bool validate_constraint = 4; + */ + boolean getValidateConstraint(); + + // optional bool rely_constraint = 5; + /** + * optional bool rely_constraint = 5; + */ + boolean hasRelyConstraint(); + /** + * optional bool rely_constraint = 5; + */ + boolean getRelyConstraint(); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey} + */ + public static final class PrimaryKey extends + com.google.protobuf.GeneratedMessage + implements PrimaryKeyOrBuilder { + // Use PrimaryKey.newBuilder() to construct. + private PrimaryKey(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private PrimaryKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final PrimaryKey defaultInstance; + public static PrimaryKey getDefaultInstance() { + return defaultInstance; + } + + public PrimaryKey getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrimaryKey( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + pkName_ = input.readBytes(); + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + cols_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000002; + } + cols_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.PARSER, extensionRegistry)); + break; + } + case 24: { + bitField0_ |= 0x00000002; + enableConstraint_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000004; + validateConstraint_ = input.readBool(); + break; + } + case 40: { + bitField0_ |= 0x00000008; + relyConstraint_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + cols_ = java.util.Collections.unmodifiableList(cols_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PrimaryKey parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrimaryKey(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public interface PrimaryKeyColumnOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string column_name = 1; + /** + * required string column_name = 1; + */ + boolean hasColumnName(); + /** + * required string column_name = 1; + */ + java.lang.String getColumnName(); + /** + * required string column_name = 1; + */ + com.google.protobuf.ByteString + getColumnNameBytes(); + + // required sint32 key_seq = 2; + /** + * required sint32 key_seq = 2; + */ + boolean hasKeySeq(); + /** + * required sint32 key_seq = 2; + */ + int getKeySeq(); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn} + */ + public static final class PrimaryKeyColumn extends + com.google.protobuf.GeneratedMessage + implements PrimaryKeyColumnOrBuilder { + // Use PrimaryKeyColumn.newBuilder() to construct. + private PrimaryKeyColumn(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private PrimaryKeyColumn(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final PrimaryKeyColumn defaultInstance; + public static PrimaryKeyColumn getDefaultInstance() { + return defaultInstance; + } + + public PrimaryKeyColumn getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private PrimaryKeyColumn( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + columnName_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + keySeq_ = input.readSInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public PrimaryKeyColumn parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new PrimaryKeyColumn(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string column_name = 1; + public static final int COLUMN_NAME_FIELD_NUMBER = 1; + private java.lang.Object columnName_; + /** + * required string column_name = 1; + */ + public boolean hasColumnName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string column_name = 1; + */ + public java.lang.String getColumnName() { + java.lang.Object ref = columnName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + columnName_ = s; + } + return s; + } + } + /** + * required string column_name = 1; + */ + public com.google.protobuf.ByteString + getColumnNameBytes() { + java.lang.Object ref = columnName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + columnName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required sint32 key_seq = 2; + public static final int KEY_SEQ_FIELD_NUMBER = 2; + private int keySeq_; + /** + * required sint32 key_seq = 2; + */ + public boolean hasKeySeq() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required sint32 key_seq = 2; + */ + public int getKeySeq() { + return keySeq_; + } + + private void initFields() { + columnName_ = ""; + keySeq_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasColumnName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasKeySeq()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getColumnNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeSInt32(2, keySeq_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getColumnNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeSInt32Size(2, keySeq_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder.class); + } + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + columnName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + keySeq_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.columnName_ = columnName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.keySeq_ = keySeq_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance()) return this; + if (other.hasColumnName()) { + bitField0_ |= 0x00000001; + columnName_ = other.columnName_; + onChanged(); + } + if (other.hasKeySeq()) { + setKeySeq(other.getKeySeq()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasColumnName()) { + + return false; + } + if (!hasKeySeq()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string column_name = 1; + private java.lang.Object columnName_ = ""; + /** + * required string column_name = 1; + */ + public boolean hasColumnName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string column_name = 1; + */ + public java.lang.String getColumnName() { + java.lang.Object ref = columnName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + columnName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string column_name = 1; + */ + public com.google.protobuf.ByteString + getColumnNameBytes() { + java.lang.Object ref = columnName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + columnName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string column_name = 1; + */ + public Builder setColumnName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + columnName_ = value; + onChanged(); + return this; + } + /** + * required string column_name = 1; + */ + public Builder clearColumnName() { + bitField0_ = (bitField0_ & ~0x00000001); + columnName_ = getDefaultInstance().getColumnName(); + onChanged(); + return this; + } + /** + * required string column_name = 1; + */ + public Builder setColumnNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + columnName_ = value; + onChanged(); + return this; + } + + // required sint32 key_seq = 2; + private int keySeq_ ; + /** + * required sint32 key_seq = 2; + */ + public boolean hasKeySeq() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required sint32 key_seq = 2; + */ + public int getKeySeq() { + return keySeq_; + } + /** + * required sint32 key_seq = 2; + */ + public Builder setKeySeq(int value) { + bitField0_ |= 0x00000002; + keySeq_ = value; + onChanged(); + return this; + } + /** + * required sint32 key_seq = 2; + */ + public Builder clearKeySeq() { + bitField0_ = (bitField0_ & ~0x00000002); + keySeq_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn) + } + + static { + defaultInstance = new PrimaryKeyColumn(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn) + } + + private int bitField0_; + // required string pk_name = 1; + public static final int PK_NAME_FIELD_NUMBER = 1; + private java.lang.Object pkName_; + /** + * required string pk_name = 1; + */ + public boolean hasPkName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string pk_name = 1; + */ + public java.lang.String getPkName() { + java.lang.Object ref = pkName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + pkName_ = s; + } + return s; + } + } + /** + * required string pk_name = 1; + */ + public com.google.protobuf.ByteString + getPkNameBytes() { + java.lang.Object ref = pkName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + public static final int COLS_FIELD_NUMBER = 2; + private java.util.List cols_; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public java.util.List getColsList() { + return cols_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public java.util.List + getColsOrBuilderList() { + return cols_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public int getColsCount() { + return cols_.size(); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getCols(int index) { + return cols_.get(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder getColsOrBuilder( + int index) { + return cols_.get(index); + } + + // optional bool enable_constraint = 3; + public static final int ENABLE_CONSTRAINT_FIELD_NUMBER = 3; + private boolean enableConstraint_; + /** + * optional bool enable_constraint = 3; + */ + public boolean hasEnableConstraint() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bool enable_constraint = 3; + */ + public boolean getEnableConstraint() { + return enableConstraint_; + } + + // optional bool validate_constraint = 4; + public static final int VALIDATE_CONSTRAINT_FIELD_NUMBER = 4; + private boolean validateConstraint_; + /** + * optional bool validate_constraint = 4; + */ + public boolean hasValidateConstraint() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bool validate_constraint = 4; + */ + public boolean getValidateConstraint() { + return validateConstraint_; + } + + // optional bool rely_constraint = 5; + public static final int RELY_CONSTRAINT_FIELD_NUMBER = 5; + private boolean relyConstraint_; + /** + * optional bool rely_constraint = 5; + */ + public boolean hasRelyConstraint() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional bool rely_constraint = 5; + */ + public boolean getRelyConstraint() { + return relyConstraint_; + } + + private void initFields() { + pkName_ = ""; + cols_ = java.util.Collections.emptyList(); + enableConstraint_ = false; + validateConstraint_ = false; + relyConstraint_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPkName()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColsCount(); i++) { + if (!getCols(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getPkNameBytes()); + } + for (int i = 0; i < cols_.size(); i++) { + output.writeMessage(2, cols_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, enableConstraint_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(4, validateConstraint_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(5, relyConstraint_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getPkNameBytes()); + } + for (int i = 0; i < cols_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, cols_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, enableConstraint_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, validateConstraint_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, relyConstraint_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.PrimaryKey} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKeyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.Builder.class); + } + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + pkName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + if (colsBuilder_ == null) { + cols_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + colsBuilder_.clear(); + } + enableConstraint_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + validateConstraint_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + relyConstraint_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.pkName_ = pkName_; + if (colsBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + cols_ = java.util.Collections.unmodifiableList(cols_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.cols_ = cols_; + } else { + result.cols_ = colsBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.enableConstraint_ = enableConstraint_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + result.validateConstraint_ = validateConstraint_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000008; + } + result.relyConstraint_ = relyConstraint_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.getDefaultInstance()) return this; + if (other.hasPkName()) { + bitField0_ |= 0x00000001; + pkName_ = other.pkName_; + onChanged(); + } + if (colsBuilder_ == null) { + if (!other.cols_.isEmpty()) { + if (cols_.isEmpty()) { + cols_ = other.cols_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureColsIsMutable(); + cols_.addAll(other.cols_); + } + onChanged(); + } + } else { + if (!other.cols_.isEmpty()) { + if (colsBuilder_.isEmpty()) { + colsBuilder_.dispose(); + colsBuilder_ = null; + cols_ = other.cols_; + bitField0_ = (bitField0_ & ~0x00000002); + colsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColsFieldBuilder() : null; + } else { + colsBuilder_.addAllMessages(other.cols_); + } + } + } + if (other.hasEnableConstraint()) { + setEnableConstraint(other.getEnableConstraint()); + } + if (other.hasValidateConstraint()) { + setValidateConstraint(other.getValidateConstraint()); + } + if (other.hasRelyConstraint()) { + setRelyConstraint(other.getRelyConstraint()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPkName()) { + + return false; + } + for (int i = 0; i < getColsCount(); i++) { + if (!getCols(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string pk_name = 1; + private java.lang.Object pkName_ = ""; + /** + * required string pk_name = 1; + */ + public boolean hasPkName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string pk_name = 1; + */ + public java.lang.String getPkName() { + java.lang.Object ref = pkName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + pkName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string pk_name = 1; + */ + public com.google.protobuf.ByteString + getPkNameBytes() { + java.lang.Object ref = pkName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string pk_name = 1; + */ + public Builder setPkName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + pkName_ = value; + onChanged(); + return this; + } + /** + * required string pk_name = 1; + */ + public Builder clearPkName() { + bitField0_ = (bitField0_ & ~0x00000001); + pkName_ = getDefaultInstance().getPkName(); + onChanged(); + return this; + } + /** + * required string pk_name = 1; + */ + public Builder setPkNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + pkName_ = value; + onChanged(); + return this; + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + private java.util.List cols_ = + java.util.Collections.emptyList(); + private void ensureColsIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + cols_ = new java.util.ArrayList(cols_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder> colsBuilder_; + + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public java.util.List getColsList() { + if (colsBuilder_ == null) { + return java.util.Collections.unmodifiableList(cols_); + } else { + return colsBuilder_.getMessageList(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public int getColsCount() { + if (colsBuilder_ == null) { + return cols_.size(); + } else { + return colsBuilder_.getCount(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn getCols(int index) { + if (colsBuilder_ == null) { + return cols_.get(index); + } else { + return colsBuilder_.getMessage(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder setCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn value) { + if (colsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColsIsMutable(); + cols_.set(index, value); + onChanged(); + } else { + colsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder setCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder builderForValue) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.set(index, builderForValue.build()); + onChanged(); + } else { + colsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder addCols(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn value) { + if (colsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColsIsMutable(); + cols_.add(value); + onChanged(); + } else { + colsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder addCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn value) { + if (colsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColsIsMutable(); + cols_.add(index, value); + onChanged(); + } else { + colsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder addCols( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder builderForValue) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.add(builderForValue.build()); + onChanged(); + } else { + colsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder addCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder builderForValue) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.add(index, builderForValue.build()); + onChanged(); + } else { + colsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder addAllCols( + java.lang.Iterable values) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + super.addAll(values, cols_); + onChanged(); + } else { + colsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder clearCols() { + if (colsBuilder_ == null) { + cols_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + colsBuilder_.clear(); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public Builder removeCols(int index) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.remove(index); + onChanged(); + } else { + colsBuilder_.remove(index); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder getColsBuilder( + int index) { + return getColsFieldBuilder().getBuilder(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder getColsOrBuilder( + int index) { + if (colsBuilder_ == null) { + return cols_.get(index); } else { + return colsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public java.util.List + getColsOrBuilderList() { + if (colsBuilder_ != null) { + return colsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(cols_); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder addColsBuilder() { + return getColsFieldBuilder().addBuilder( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder addColsBuilder( + int index) { + return getColsFieldBuilder().addBuilder( + index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.PrimaryKey.PrimaryKeyColumn cols = 2; + */ + public java.util.List + getColsBuilderList() { + return getColsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder> + getColsFieldBuilder() { + if (colsBuilder_ == null) { + colsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumnOrBuilder>( + cols_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + cols_ = null; + } + return colsBuilder_; + } + + // optional bool enable_constraint = 3; + private boolean enableConstraint_ ; + /** + * optional bool enable_constraint = 3; + */ + public boolean hasEnableConstraint() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional bool enable_constraint = 3; + */ + public boolean getEnableConstraint() { + return enableConstraint_; + } + /** + * optional bool enable_constraint = 3; + */ + public Builder setEnableConstraint(boolean value) { + bitField0_ |= 0x00000004; + enableConstraint_ = value; + onChanged(); + return this; + } + /** + * optional bool enable_constraint = 3; + */ + public Builder clearEnableConstraint() { + bitField0_ = (bitField0_ & ~0x00000004); + enableConstraint_ = false; + onChanged(); + return this; + } + + // optional bool validate_constraint = 4; + private boolean validateConstraint_ ; + /** + * optional bool validate_constraint = 4; + */ + public boolean hasValidateConstraint() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional bool validate_constraint = 4; + */ + public boolean getValidateConstraint() { + return validateConstraint_; + } + /** + * optional bool validate_constraint = 4; + */ + public Builder setValidateConstraint(boolean value) { + bitField0_ |= 0x00000008; + validateConstraint_ = value; + onChanged(); + return this; + } + /** + * optional bool validate_constraint = 4; + */ + public Builder clearValidateConstraint() { + bitField0_ = (bitField0_ & ~0x00000008); + validateConstraint_ = false; + onChanged(); + return this; + } + + // optional bool rely_constraint = 5; + private boolean relyConstraint_ ; + /** + * optional bool rely_constraint = 5; + */ + public boolean hasRelyConstraint() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional bool rely_constraint = 5; + */ + public boolean getRelyConstraint() { + return relyConstraint_; + } + /** + * optional bool rely_constraint = 5; + */ + public Builder setRelyConstraint(boolean value) { + bitField0_ |= 0x00000010; + relyConstraint_ = value; + onChanged(); + return this; + } + /** + * optional bool rely_constraint = 5; + */ + public Builder clearRelyConstraint() { + bitField0_ = (bitField0_ & ~0x00000010); + relyConstraint_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey) + } + + static { + defaultInstance = new PrimaryKey(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.PrimaryKey) + } + + public interface ForeignKeysOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + java.util.List + getFksList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getFks(int index); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + int getFksCount(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + java.util.List + getFksOrBuilderList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder getFksOrBuilder( + int index); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys} + */ + public static final class ForeignKeys extends + com.google.protobuf.GeneratedMessage + implements ForeignKeysOrBuilder { + // Use ForeignKeys.newBuilder() to construct. + private ForeignKeys(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ForeignKeys(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ForeignKeys defaultInstance; + public static ForeignKeys getDefaultInstance() { + return defaultInstance; + } + + public ForeignKeys getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ForeignKeys( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + fks_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + fks_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + fks_ = java.util.Collections.unmodifiableList(fks_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ForeignKeys parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ForeignKeys(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public interface ForeignKeyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string fk_name = 1; + /** + * required string fk_name = 1; + */ + boolean hasFkName(); + /** + * required string fk_name = 1; + */ + java.lang.String getFkName(); + /** + * required string fk_name = 1; + */ + com.google.protobuf.ByteString + getFkNameBytes(); + + // required string referenced_db_name = 2; + /** + * required string referenced_db_name = 2; + */ + boolean hasReferencedDbName(); + /** + * required string referenced_db_name = 2; + */ + java.lang.String getReferencedDbName(); + /** + * required string referenced_db_name = 2; + */ + com.google.protobuf.ByteString + getReferencedDbNameBytes(); + + // required string referenced_table_name = 3; + /** + * required string referenced_table_name = 3; + */ + boolean hasReferencedTableName(); + /** + * required string referenced_table_name = 3; + */ + java.lang.String getReferencedTableName(); + /** + * required string referenced_table_name = 3; + */ + com.google.protobuf.ByteString + getReferencedTableNameBytes(); + + // optional string referenced_pk_name = 4; + /** + * optional string referenced_pk_name = 4; + */ + boolean hasReferencedPkName(); + /** + * optional string referenced_pk_name = 4; + */ + java.lang.String getReferencedPkName(); + /** + * optional string referenced_pk_name = 4; + */ + com.google.protobuf.ByteString + getReferencedPkNameBytes(); + + // optional int32 update_rule = 5; + /** + * optional int32 update_rule = 5; + */ + boolean hasUpdateRule(); + /** + * optional int32 update_rule = 5; + */ + int getUpdateRule(); + + // optional int32 delete_rule = 6; + /** + * optional int32 delete_rule = 6; + */ + boolean hasDeleteRule(); + /** + * optional int32 delete_rule = 6; + */ + int getDeleteRule(); + + // repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + java.util.List + getColsList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getCols(int index); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + int getColsCount(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + java.util.List + getColsOrBuilderList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder getColsOrBuilder( + int index); + + // optional bool enable_constraint = 8; + /** + * optional bool enable_constraint = 8; + */ + boolean hasEnableConstraint(); + /** + * optional bool enable_constraint = 8; + */ + boolean getEnableConstraint(); + + // optional bool validate_constraint = 9; + /** + * optional bool validate_constraint = 9; + */ + boolean hasValidateConstraint(); + /** + * optional bool validate_constraint = 9; + */ + boolean getValidateConstraint(); + + // optional bool rely_constraint = 10; + /** + * optional bool rely_constraint = 10; + */ + boolean hasRelyConstraint(); + /** + * optional bool rely_constraint = 10; + */ + boolean getRelyConstraint(); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey} + */ + public static final class ForeignKey extends + com.google.protobuf.GeneratedMessage + implements ForeignKeyOrBuilder { + // Use ForeignKey.newBuilder() to construct. + private ForeignKey(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ForeignKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ForeignKey defaultInstance; + public static ForeignKey getDefaultInstance() { + return defaultInstance; + } + + public ForeignKey getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ForeignKey( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + fkName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + referencedDbName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + referencedTableName_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + referencedPkName_ = input.readBytes(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + updateRule_ = input.readInt32(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + deleteRule_ = input.readInt32(); + break; + } + case 58: { + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + cols_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000040; + } + cols_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.PARSER, extensionRegistry)); + break; + } + case 64: { + bitField0_ |= 0x00000040; + enableConstraint_ = input.readBool(); + break; + } + case 72: { + bitField0_ |= 0x00000080; + validateConstraint_ = input.readBool(); + break; + } + case 80: { + bitField0_ |= 0x00000100; + relyConstraint_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + cols_ = java.util.Collections.unmodifiableList(cols_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ForeignKey parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ForeignKey(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + public interface ForeignKeyColumnOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string column_name = 1; + /** + * required string column_name = 1; + */ + boolean hasColumnName(); + /** + * required string column_name = 1; + */ + java.lang.String getColumnName(); + /** + * required string column_name = 1; + */ + com.google.protobuf.ByteString + getColumnNameBytes(); + + // required string referenced_column_name = 2; + /** + * required string referenced_column_name = 2; + */ + boolean hasReferencedColumnName(); + /** + * required string referenced_column_name = 2; + */ + java.lang.String getReferencedColumnName(); + /** + * required string referenced_column_name = 2; + */ + com.google.protobuf.ByteString + getReferencedColumnNameBytes(); + + // required sint32 key_seq = 3; + /** + * required sint32 key_seq = 3; + */ + boolean hasKeySeq(); + /** + * required sint32 key_seq = 3; + */ + int getKeySeq(); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn} + */ + public static final class ForeignKeyColumn extends + com.google.protobuf.GeneratedMessage + implements ForeignKeyColumnOrBuilder { + // Use ForeignKeyColumn.newBuilder() to construct. + private ForeignKeyColumn(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ForeignKeyColumn(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ForeignKeyColumn defaultInstance; + public static ForeignKeyColumn getDefaultInstance() { + return defaultInstance; + } + + public ForeignKeyColumn getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ForeignKeyColumn( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + columnName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + referencedColumnName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + keySeq_ = input.readSInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ForeignKeyColumn parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ForeignKeyColumn(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string column_name = 1; + public static final int COLUMN_NAME_FIELD_NUMBER = 1; + private java.lang.Object columnName_; + /** + * required string column_name = 1; + */ + public boolean hasColumnName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string column_name = 1; + */ + public java.lang.String getColumnName() { + java.lang.Object ref = columnName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + columnName_ = s; + } + return s; + } + } + /** + * required string column_name = 1; + */ + public com.google.protobuf.ByteString + getColumnNameBytes() { + java.lang.Object ref = columnName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + columnName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string referenced_column_name = 2; + public static final int REFERENCED_COLUMN_NAME_FIELD_NUMBER = 2; + private java.lang.Object referencedColumnName_; + /** + * required string referenced_column_name = 2; + */ + public boolean hasReferencedColumnName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string referenced_column_name = 2; + */ + public java.lang.String getReferencedColumnName() { + java.lang.Object ref = referencedColumnName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + referencedColumnName_ = s; + } + return s; + } + } + /** + * required string referenced_column_name = 2; + */ + public com.google.protobuf.ByteString + getReferencedColumnNameBytes() { + java.lang.Object ref = referencedColumnName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedColumnName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required sint32 key_seq = 3; + public static final int KEY_SEQ_FIELD_NUMBER = 3; + private int keySeq_; + /** + * required sint32 key_seq = 3; + */ + public boolean hasKeySeq() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required sint32 key_seq = 3; + */ + public int getKeySeq() { + return keySeq_; + } + + private void initFields() { + columnName_ = ""; + referencedColumnName_ = ""; + keySeq_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasColumnName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasReferencedColumnName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasKeySeq()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getColumnNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getReferencedColumnNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeSInt32(3, keySeq_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getColumnNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getReferencedColumnNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeSInt32Size(3, keySeq_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder.class); + } + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + columnName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + referencedColumnName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + keySeq_ = 0; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.columnName_ = columnName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.referencedColumnName_ = referencedColumnName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.keySeq_ = keySeq_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance()) return this; + if (other.hasColumnName()) { + bitField0_ |= 0x00000001; + columnName_ = other.columnName_; + onChanged(); + } + if (other.hasReferencedColumnName()) { + bitField0_ |= 0x00000002; + referencedColumnName_ = other.referencedColumnName_; + onChanged(); + } + if (other.hasKeySeq()) { + setKeySeq(other.getKeySeq()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasColumnName()) { + + return false; + } + if (!hasReferencedColumnName()) { + + return false; + } + if (!hasKeySeq()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string column_name = 1; + private java.lang.Object columnName_ = ""; + /** + * required string column_name = 1; + */ + public boolean hasColumnName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string column_name = 1; + */ + public java.lang.String getColumnName() { + java.lang.Object ref = columnName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + columnName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string column_name = 1; + */ + public com.google.protobuf.ByteString + getColumnNameBytes() { + java.lang.Object ref = columnName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + columnName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string column_name = 1; + */ + public Builder setColumnName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + columnName_ = value; + onChanged(); + return this; + } + /** + * required string column_name = 1; + */ + public Builder clearColumnName() { + bitField0_ = (bitField0_ & ~0x00000001); + columnName_ = getDefaultInstance().getColumnName(); + onChanged(); + return this; + } + /** + * required string column_name = 1; + */ + public Builder setColumnNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + columnName_ = value; + onChanged(); + return this; + } + + // required string referenced_column_name = 2; + private java.lang.Object referencedColumnName_ = ""; + /** + * required string referenced_column_name = 2; + */ + public boolean hasReferencedColumnName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string referenced_column_name = 2; + */ + public java.lang.String getReferencedColumnName() { + java.lang.Object ref = referencedColumnName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + referencedColumnName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string referenced_column_name = 2; + */ + public com.google.protobuf.ByteString + getReferencedColumnNameBytes() { + java.lang.Object ref = referencedColumnName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedColumnName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string referenced_column_name = 2; + */ + public Builder setReferencedColumnName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + referencedColumnName_ = value; + onChanged(); + return this; + } + /** + * required string referenced_column_name = 2; + */ + public Builder clearReferencedColumnName() { + bitField0_ = (bitField0_ & ~0x00000002); + referencedColumnName_ = getDefaultInstance().getReferencedColumnName(); + onChanged(); + return this; + } + /** + * required string referenced_column_name = 2; + */ + public Builder setReferencedColumnNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + referencedColumnName_ = value; + onChanged(); + return this; + } + + // required sint32 key_seq = 3; + private int keySeq_ ; + /** + * required sint32 key_seq = 3; + */ + public boolean hasKeySeq() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required sint32 key_seq = 3; + */ + public int getKeySeq() { + return keySeq_; + } + /** + * required sint32 key_seq = 3; + */ + public Builder setKeySeq(int value) { + bitField0_ |= 0x00000004; + keySeq_ = value; + onChanged(); + return this; + } + /** + * required sint32 key_seq = 3; + */ + public Builder clearKeySeq() { + bitField0_ = (bitField0_ & ~0x00000004); + keySeq_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn) + } + + static { + defaultInstance = new ForeignKeyColumn(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn) + } + + private int bitField0_; + // required string fk_name = 1; + public static final int FK_NAME_FIELD_NUMBER = 1; + private java.lang.Object fkName_; + /** + * required string fk_name = 1; + */ + public boolean hasFkName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string fk_name = 1; + */ + public java.lang.String getFkName() { + java.lang.Object ref = fkName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + fkName_ = s; + } + return s; + } + } + /** + * required string fk_name = 1; + */ + public com.google.protobuf.ByteString + getFkNameBytes() { + java.lang.Object ref = fkName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string referenced_db_name = 2; + public static final int REFERENCED_DB_NAME_FIELD_NUMBER = 2; + private java.lang.Object referencedDbName_; + /** + * required string referenced_db_name = 2; + */ + public boolean hasReferencedDbName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string referenced_db_name = 2; + */ + public java.lang.String getReferencedDbName() { + java.lang.Object ref = referencedDbName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + referencedDbName_ = s; + } + return s; + } + } + /** + * required string referenced_db_name = 2; + */ + public com.google.protobuf.ByteString + getReferencedDbNameBytes() { + java.lang.Object ref = referencedDbName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedDbName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string referenced_table_name = 3; + public static final int REFERENCED_TABLE_NAME_FIELD_NUMBER = 3; + private java.lang.Object referencedTableName_; + /** + * required string referenced_table_name = 3; + */ + public boolean hasReferencedTableName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required string referenced_table_name = 3; + */ + public java.lang.String getReferencedTableName() { + java.lang.Object ref = referencedTableName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + referencedTableName_ = s; + } + return s; + } + } + /** + * required string referenced_table_name = 3; + */ + public com.google.protobuf.ByteString + getReferencedTableNameBytes() { + java.lang.Object ref = referencedTableName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedTableName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string referenced_pk_name = 4; + public static final int REFERENCED_PK_NAME_FIELD_NUMBER = 4; + private java.lang.Object referencedPkName_; + /** + * optional string referenced_pk_name = 4; + */ + public boolean hasReferencedPkName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string referenced_pk_name = 4; + */ + public java.lang.String getReferencedPkName() { + java.lang.Object ref = referencedPkName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + referencedPkName_ = s; + } + return s; + } + } + /** + * optional string referenced_pk_name = 4; + */ + public com.google.protobuf.ByteString + getReferencedPkNameBytes() { + java.lang.Object ref = referencedPkName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedPkName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional int32 update_rule = 5; + public static final int UPDATE_RULE_FIELD_NUMBER = 5; + private int updateRule_; + /** + * optional int32 update_rule = 5; + */ + public boolean hasUpdateRule() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int32 update_rule = 5; + */ + public int getUpdateRule() { + return updateRule_; + } + + // optional int32 delete_rule = 6; + public static final int DELETE_RULE_FIELD_NUMBER = 6; + private int deleteRule_; + /** + * optional int32 delete_rule = 6; + */ + public boolean hasDeleteRule() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional int32 delete_rule = 6; + */ + public int getDeleteRule() { + return deleteRule_; + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + public static final int COLS_FIELD_NUMBER = 7; + private java.util.List cols_; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public java.util.List getColsList() { + return cols_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public java.util.List + getColsOrBuilderList() { + return cols_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public int getColsCount() { + return cols_.size(); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getCols(int index) { + return cols_.get(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder getColsOrBuilder( + int index) { + return cols_.get(index); + } + + // optional bool enable_constraint = 8; + public static final int ENABLE_CONSTRAINT_FIELD_NUMBER = 8; + private boolean enableConstraint_; + /** + * optional bool enable_constraint = 8; + */ + public boolean hasEnableConstraint() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * optional bool enable_constraint = 8; + */ + public boolean getEnableConstraint() { + return enableConstraint_; + } + + // optional bool validate_constraint = 9; + public static final int VALIDATE_CONSTRAINT_FIELD_NUMBER = 9; + private boolean validateConstraint_; + /** + * optional bool validate_constraint = 9; + */ + public boolean hasValidateConstraint() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * optional bool validate_constraint = 9; + */ + public boolean getValidateConstraint() { + return validateConstraint_; + } + + // optional bool rely_constraint = 10; + public static final int RELY_CONSTRAINT_FIELD_NUMBER = 10; + private boolean relyConstraint_; + /** + * optional bool rely_constraint = 10; + */ + public boolean hasRelyConstraint() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional bool rely_constraint = 10; + */ + public boolean getRelyConstraint() { + return relyConstraint_; + } + + private void initFields() { + fkName_ = ""; + referencedDbName_ = ""; + referencedTableName_ = ""; + referencedPkName_ = ""; + updateRule_ = 0; + deleteRule_ = 0; + cols_ = java.util.Collections.emptyList(); + enableConstraint_ = false; + validateConstraint_ = false; + relyConstraint_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFkName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasReferencedDbName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasReferencedTableName()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColsCount(); i++) { + if (!getCols(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getFkNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getReferencedDbNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getReferencedTableNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getReferencedPkNameBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeInt32(5, updateRule_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeInt32(6, deleteRule_); + } + for (int i = 0; i < cols_.size(); i++) { + output.writeMessage(7, cols_.get(i)); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeBool(8, enableConstraint_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeBool(9, validateConstraint_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + output.writeBool(10, relyConstraint_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getFkNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getReferencedDbNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getReferencedTableNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getReferencedPkNameBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(5, updateRule_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(6, deleteRule_); + } + for (int i = 0; i < cols_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, cols_.get(i)); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, enableConstraint_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(9, validateConstraint_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(10, relyConstraint_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder.class); + } + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + fkName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + referencedDbName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + referencedTableName_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + referencedPkName_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + updateRule_ = 0; + bitField0_ = (bitField0_ & ~0x00000010); + deleteRule_ = 0; + bitField0_ = (bitField0_ & ~0x00000020); + if (colsBuilder_ == null) { + cols_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + } else { + colsBuilder_.clear(); + } + enableConstraint_ = false; + bitField0_ = (bitField0_ & ~0x00000080); + validateConstraint_ = false; + bitField0_ = (bitField0_ & ~0x00000100); + relyConstraint_ = false; + bitField0_ = (bitField0_ & ~0x00000200); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.fkName_ = fkName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.referencedDbName_ = referencedDbName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.referencedTableName_ = referencedTableName_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.referencedPkName_ = referencedPkName_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.updateRule_ = updateRule_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.deleteRule_ = deleteRule_; + if (colsBuilder_ == null) { + if (((bitField0_ & 0x00000040) == 0x00000040)) { + cols_ = java.util.Collections.unmodifiableList(cols_); + bitField0_ = (bitField0_ & ~0x00000040); + } + result.cols_ = cols_; + } else { + result.cols_ = colsBuilder_.build(); + } + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000040; + } + result.enableConstraint_ = enableConstraint_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000080; + } + result.validateConstraint_ = validateConstraint_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000100; + } + result.relyConstraint_ = relyConstraint_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance()) return this; + if (other.hasFkName()) { + bitField0_ |= 0x00000001; + fkName_ = other.fkName_; + onChanged(); + } + if (other.hasReferencedDbName()) { + bitField0_ |= 0x00000002; + referencedDbName_ = other.referencedDbName_; + onChanged(); + } + if (other.hasReferencedTableName()) { + bitField0_ |= 0x00000004; + referencedTableName_ = other.referencedTableName_; + onChanged(); + } + if (other.hasReferencedPkName()) { + bitField0_ |= 0x00000008; + referencedPkName_ = other.referencedPkName_; + onChanged(); + } + if (other.hasUpdateRule()) { + setUpdateRule(other.getUpdateRule()); + } + if (other.hasDeleteRule()) { + setDeleteRule(other.getDeleteRule()); + } + if (colsBuilder_ == null) { + if (!other.cols_.isEmpty()) { + if (cols_.isEmpty()) { + cols_ = other.cols_; + bitField0_ = (bitField0_ & ~0x00000040); + } else { + ensureColsIsMutable(); + cols_.addAll(other.cols_); + } + onChanged(); + } + } else { + if (!other.cols_.isEmpty()) { + if (colsBuilder_.isEmpty()) { + colsBuilder_.dispose(); + colsBuilder_ = null; + cols_ = other.cols_; + bitField0_ = (bitField0_ & ~0x00000040); + colsBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColsFieldBuilder() : null; + } else { + colsBuilder_.addAllMessages(other.cols_); + } + } + } + if (other.hasEnableConstraint()) { + setEnableConstraint(other.getEnableConstraint()); + } + if (other.hasValidateConstraint()) { + setValidateConstraint(other.getValidateConstraint()); + } + if (other.hasRelyConstraint()) { + setRelyConstraint(other.getRelyConstraint()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFkName()) { + + return false; + } + if (!hasReferencedDbName()) { + + return false; + } + if (!hasReferencedTableName()) { + + return false; + } + for (int i = 0; i < getColsCount(); i++) { + if (!getCols(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string fk_name = 1; + private java.lang.Object fkName_ = ""; + /** + * required string fk_name = 1; + */ + public boolean hasFkName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string fk_name = 1; + */ + public java.lang.String getFkName() { + java.lang.Object ref = fkName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + fkName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string fk_name = 1; + */ + public com.google.protobuf.ByteString + getFkNameBytes() { + java.lang.Object ref = fkName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string fk_name = 1; + */ + public Builder setFkName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + fkName_ = value; + onChanged(); + return this; + } + /** + * required string fk_name = 1; + */ + public Builder clearFkName() { + bitField0_ = (bitField0_ & ~0x00000001); + fkName_ = getDefaultInstance().getFkName(); + onChanged(); + return this; + } + /** + * required string fk_name = 1; + */ + public Builder setFkNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + fkName_ = value; + onChanged(); + return this; + } + + // required string referenced_db_name = 2; + private java.lang.Object referencedDbName_ = ""; + /** + * required string referenced_db_name = 2; + */ + public boolean hasReferencedDbName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string referenced_db_name = 2; + */ + public java.lang.String getReferencedDbName() { + java.lang.Object ref = referencedDbName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + referencedDbName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string referenced_db_name = 2; + */ + public com.google.protobuf.ByteString + getReferencedDbNameBytes() { + java.lang.Object ref = referencedDbName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedDbName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string referenced_db_name = 2; + */ + public Builder setReferencedDbName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + referencedDbName_ = value; + onChanged(); + return this; + } + /** + * required string referenced_db_name = 2; + */ + public Builder clearReferencedDbName() { + bitField0_ = (bitField0_ & ~0x00000002); + referencedDbName_ = getDefaultInstance().getReferencedDbName(); + onChanged(); + return this; + } + /** + * required string referenced_db_name = 2; + */ + public Builder setReferencedDbNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + referencedDbName_ = value; + onChanged(); + return this; + } + + // required string referenced_table_name = 3; + private java.lang.Object referencedTableName_ = ""; + /** + * required string referenced_table_name = 3; + */ + public boolean hasReferencedTableName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required string referenced_table_name = 3; + */ + public java.lang.String getReferencedTableName() { + java.lang.Object ref = referencedTableName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + referencedTableName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string referenced_table_name = 3; + */ + public com.google.protobuf.ByteString + getReferencedTableNameBytes() { + java.lang.Object ref = referencedTableName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedTableName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string referenced_table_name = 3; + */ + public Builder setReferencedTableName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + referencedTableName_ = value; + onChanged(); + return this; + } + /** + * required string referenced_table_name = 3; + */ + public Builder clearReferencedTableName() { + bitField0_ = (bitField0_ & ~0x00000004); + referencedTableName_ = getDefaultInstance().getReferencedTableName(); + onChanged(); + return this; + } + /** + * required string referenced_table_name = 3; + */ + public Builder setReferencedTableNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + referencedTableName_ = value; + onChanged(); + return this; + } + + // optional string referenced_pk_name = 4; + private java.lang.Object referencedPkName_ = ""; + /** + * optional string referenced_pk_name = 4; + */ + public boolean hasReferencedPkName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string referenced_pk_name = 4; + */ + public java.lang.String getReferencedPkName() { + java.lang.Object ref = referencedPkName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + referencedPkName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string referenced_pk_name = 4; + */ + public com.google.protobuf.ByteString + getReferencedPkNameBytes() { + java.lang.Object ref = referencedPkName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + referencedPkName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string referenced_pk_name = 4; + */ + public Builder setReferencedPkName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + referencedPkName_ = value; + onChanged(); + return this; + } + /** + * optional string referenced_pk_name = 4; + */ + public Builder clearReferencedPkName() { + bitField0_ = (bitField0_ & ~0x00000008); + referencedPkName_ = getDefaultInstance().getReferencedPkName(); + onChanged(); + return this; + } + /** + * optional string referenced_pk_name = 4; + */ + public Builder setReferencedPkNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + referencedPkName_ = value; + onChanged(); + return this; + } + + // optional int32 update_rule = 5; + private int updateRule_ ; + /** + * optional int32 update_rule = 5; + */ + public boolean hasUpdateRule() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional int32 update_rule = 5; + */ + public int getUpdateRule() { + return updateRule_; + } + /** + * optional int32 update_rule = 5; + */ + public Builder setUpdateRule(int value) { + bitField0_ |= 0x00000010; + updateRule_ = value; + onChanged(); + return this; + } + /** + * optional int32 update_rule = 5; + */ + public Builder clearUpdateRule() { + bitField0_ = (bitField0_ & ~0x00000010); + updateRule_ = 0; + onChanged(); + return this; + } + + // optional int32 delete_rule = 6; + private int deleteRule_ ; + /** + * optional int32 delete_rule = 6; + */ + public boolean hasDeleteRule() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional int32 delete_rule = 6; + */ + public int getDeleteRule() { + return deleteRule_; + } + /** + * optional int32 delete_rule = 6; + */ + public Builder setDeleteRule(int value) { + bitField0_ |= 0x00000020; + deleteRule_ = value; + onChanged(); + return this; + } + /** + * optional int32 delete_rule = 6; + */ + public Builder clearDeleteRule() { + bitField0_ = (bitField0_ & ~0x00000020); + deleteRule_ = 0; + onChanged(); + return this; + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + private java.util.List cols_ = + java.util.Collections.emptyList(); + private void ensureColsIsMutable() { + if (!((bitField0_ & 0x00000040) == 0x00000040)) { + cols_ = new java.util.ArrayList(cols_); + bitField0_ |= 0x00000040; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder> colsBuilder_; + + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public java.util.List getColsList() { + if (colsBuilder_ == null) { + return java.util.Collections.unmodifiableList(cols_); + } else { + return colsBuilder_.getMessageList(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public int getColsCount() { + if (colsBuilder_ == null) { + return cols_.size(); + } else { + return colsBuilder_.getCount(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn getCols(int index) { + if (colsBuilder_ == null) { + return cols_.get(index); + } else { + return colsBuilder_.getMessage(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder setCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn value) { + if (colsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColsIsMutable(); + cols_.set(index, value); + onChanged(); + } else { + colsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder setCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder builderForValue) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.set(index, builderForValue.build()); + onChanged(); + } else { + colsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder addCols(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn value) { + if (colsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColsIsMutable(); + cols_.add(value); + onChanged(); + } else { + colsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder addCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn value) { + if (colsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColsIsMutable(); + cols_.add(index, value); + onChanged(); + } else { + colsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder addCols( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder builderForValue) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.add(builderForValue.build()); + onChanged(); + } else { + colsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder addCols( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder builderForValue) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.add(index, builderForValue.build()); + onChanged(); + } else { + colsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder addAllCols( + java.lang.Iterable values) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + super.addAll(values, cols_); + onChanged(); + } else { + colsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder clearCols() { + if (colsBuilder_ == null) { + cols_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + } else { + colsBuilder_.clear(); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public Builder removeCols(int index) { + if (colsBuilder_ == null) { + ensureColsIsMutable(); + cols_.remove(index); + onChanged(); + } else { + colsBuilder_.remove(index); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder getColsBuilder( + int index) { + return getColsFieldBuilder().getBuilder(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder getColsOrBuilder( + int index) { + if (colsBuilder_ == null) { + return cols_.get(index); } else { + return colsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public java.util.List + getColsOrBuilderList() { + if (colsBuilder_ != null) { + return colsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(cols_); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder addColsBuilder() { + return getColsFieldBuilder().addBuilder( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder addColsBuilder( + int index) { + return getColsFieldBuilder().addBuilder( + index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey.ForeignKeyColumn cols = 7; + */ + public java.util.List + getColsBuilderList() { + return getColsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder> + getColsFieldBuilder() { + if (colsBuilder_ == null) { + colsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumnOrBuilder>( + cols_, + ((bitField0_ & 0x00000040) == 0x00000040), + getParentForChildren(), + isClean()); + cols_ = null; + } + return colsBuilder_; + } + + // optional bool enable_constraint = 8; + private boolean enableConstraint_ ; + /** + * optional bool enable_constraint = 8; + */ + public boolean hasEnableConstraint() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * optional bool enable_constraint = 8; + */ + public boolean getEnableConstraint() { + return enableConstraint_; + } + /** + * optional bool enable_constraint = 8; + */ + public Builder setEnableConstraint(boolean value) { + bitField0_ |= 0x00000080; + enableConstraint_ = value; + onChanged(); + return this; + } + /** + * optional bool enable_constraint = 8; + */ + public Builder clearEnableConstraint() { + bitField0_ = (bitField0_ & ~0x00000080); + enableConstraint_ = false; + onChanged(); + return this; + } + + // optional bool validate_constraint = 9; + private boolean validateConstraint_ ; + /** + * optional bool validate_constraint = 9; + */ + public boolean hasValidateConstraint() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional bool validate_constraint = 9; + */ + public boolean getValidateConstraint() { + return validateConstraint_; + } + /** + * optional bool validate_constraint = 9; + */ + public Builder setValidateConstraint(boolean value) { + bitField0_ |= 0x00000100; + validateConstraint_ = value; + onChanged(); + return this; + } + /** + * optional bool validate_constraint = 9; + */ + public Builder clearValidateConstraint() { + bitField0_ = (bitField0_ & ~0x00000100); + validateConstraint_ = false; + onChanged(); + return this; + } + + // optional bool rely_constraint = 10; + private boolean relyConstraint_ ; + /** + * optional bool rely_constraint = 10; + */ + public boolean hasRelyConstraint() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + /** + * optional bool rely_constraint = 10; + */ + public boolean getRelyConstraint() { + return relyConstraint_; + } + /** + * optional bool rely_constraint = 10; + */ + public Builder setRelyConstraint(boolean value) { + bitField0_ |= 0x00000200; + relyConstraint_ = value; + onChanged(); + return this; + } + /** + * optional bool rely_constraint = 10; + */ + public Builder clearRelyConstraint() { + bitField0_ = (bitField0_ & ~0x00000200); + relyConstraint_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey) + } + + static { + defaultInstance = new ForeignKey(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey) + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + public static final int FKS_FIELD_NUMBER = 1; + private java.util.List fks_; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public java.util.List getFksList() { + return fks_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public java.util.List + getFksOrBuilderList() { + return fks_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public int getFksCount() { + return fks_.size(); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getFks(int index) { + return fks_.get(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder getFksOrBuilder( + int index) { + return fks_.get(index); + } + + private void initFields() { + fks_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getFksCount(); i++) { + if (!getFks(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < fks_.size(); i++) { + output.writeMessage(1, fks_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < fks_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, fks_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.ForeignKeys} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeysOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.Builder.class); + } + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFksFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (fksBuilder_ == null) { + fks_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + fksBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys(this); + int from_bitField0_ = bitField0_; + if (fksBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + fks_ = java.util.Collections.unmodifiableList(fks_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.fks_ = fks_; + } else { + result.fks_ = fksBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.getDefaultInstance()) return this; + if (fksBuilder_ == null) { + if (!other.fks_.isEmpty()) { + if (fks_.isEmpty()) { + fks_ = other.fks_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFksIsMutable(); + fks_.addAll(other.fks_); + } + onChanged(); + } + } else { + if (!other.fks_.isEmpty()) { + if (fksBuilder_.isEmpty()) { + fksBuilder_.dispose(); + fksBuilder_ = null; + fks_ = other.fks_; + bitField0_ = (bitField0_ & ~0x00000001); + fksBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFksFieldBuilder() : null; + } else { + fksBuilder_.addAllMessages(other.fks_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getFksCount(); i++) { + if (!getFks(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + private java.util.List fks_ = + java.util.Collections.emptyList(); + private void ensureFksIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + fks_ = new java.util.ArrayList(fks_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder> fksBuilder_; + + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public java.util.List getFksList() { + if (fksBuilder_ == null) { + return java.util.Collections.unmodifiableList(fks_); + } else { + return fksBuilder_.getMessageList(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public int getFksCount() { + if (fksBuilder_ == null) { + return fks_.size(); + } else { + return fksBuilder_.getCount(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey getFks(int index) { + if (fksBuilder_ == null) { + return fks_.get(index); + } else { + return fksBuilder_.getMessage(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder setFks( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey value) { + if (fksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFksIsMutable(); + fks_.set(index, value); + onChanged(); + } else { + fksBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder setFks( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder builderForValue) { + if (fksBuilder_ == null) { + ensureFksIsMutable(); + fks_.set(index, builderForValue.build()); + onChanged(); + } else { + fksBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder addFks(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey value) { + if (fksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFksIsMutable(); + fks_.add(value); + onChanged(); + } else { + fksBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder addFks( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey value) { + if (fksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFksIsMutable(); + fks_.add(index, value); + onChanged(); + } else { + fksBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder addFks( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder builderForValue) { + if (fksBuilder_ == null) { + ensureFksIsMutable(); + fks_.add(builderForValue.build()); + onChanged(); + } else { + fksBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder addFks( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder builderForValue) { + if (fksBuilder_ == null) { + ensureFksIsMutable(); + fks_.add(index, builderForValue.build()); + onChanged(); + } else { + fksBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder addAllFks( + java.lang.Iterable values) { + if (fksBuilder_ == null) { + ensureFksIsMutable(); + super.addAll(values, fks_); + onChanged(); + } else { + fksBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder clearFks() { + if (fksBuilder_ == null) { + fks_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + fksBuilder_.clear(); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public Builder removeFks(int index) { + if (fksBuilder_ == null) { + ensureFksIsMutable(); + fks_.remove(index); + onChanged(); + } else { + fksBuilder_.remove(index); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder getFksBuilder( + int index) { + return getFksFieldBuilder().getBuilder(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder getFksOrBuilder( + int index) { + if (fksBuilder_ == null) { + return fks_.get(index); } else { + return fksBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public java.util.List + getFksOrBuilderList() { + if (fksBuilder_ != null) { + return fksBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(fks_); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder addFksBuilder() { + return getFksFieldBuilder().addBuilder( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder addFksBuilder( + int index) { + return getFksFieldBuilder().addBuilder( + index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.ForeignKeys.ForeignKey fks = 1; + */ + public java.util.List + getFksBuilderList() { + return getFksFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder> + getFksFieldBuilder() { + if (fksBuilder_ == null) { + fksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ForeignKeys.ForeignKeyOrBuilder>( + fks_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + fks_ = null; + } + return fksBuilder_; + } + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys) + } + + static { + defaultInstance = new ForeignKeys(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.ForeignKeys) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_hadoop_hive_metastore_hbase_AggrStats_descriptor; private static @@ -36596,6 +41420,31 @@ public Builder removeRange(int index) { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -36770,8 +41619,28 @@ public Builder removeRange(int index) { "Operator\022Z\n\004type\030\001 \002(\0162L.org.apache.hado" + "op.hive.metastore.hbase.PartitionKeyComp" + "arator.Operator.Type\022\013\n\003key\030\002 \002(\t\022\013\n\003val" + - "\030\003 \002(\t\"\037\n\004Type\022\010\n\004LIKE\020\000\022\r\n\tNOTEQUALS\020\001*" + - "#\n\rPrincipalType\022\010\n\004USER\020\000\022\010\n\004ROLE\020\001" + "\030\003 \002(\t\"\037\n\004Type\022\010\n\004LIKE\020\000\022\r\n\tNOTEQUALS\020\001\"" + + "\373\001\n\nPrimaryKey\022\017\n\007pk_name\030\001 \002(\t\022Q\n\004cols\030" + + "\002 \003(\0132C.org.apache.hadoop.hive.metastore" + + ".hbase.PrimaryKey.PrimaryKeyColumn\022\031\n\021en" + + "able_constraint\030\003 \001(\010\022\033\n\023validate_constr", + "aint\030\004 \001(\010\022\027\n\017rely_constraint\030\005 \001(\010\0328\n\020P" + + "rimaryKeyColumn\022\023\n\013column_name\030\001 \002(\t\022\017\n\007" + + "key_seq\030\002 \002(\021\"\205\004\n\013ForeignKeys\022K\n\003fks\030\001 \003" + + "(\0132>.org.apache.hadoop.hive.metastore.hb" + + "ase.ForeignKeys.ForeignKey\032\250\003\n\nForeignKe" + + "y\022\017\n\007fk_name\030\001 \002(\t\022\032\n\022referenced_db_name" + + "\030\002 \002(\t\022\035\n\025referenced_table_name\030\003 \002(\t\022\032\n" + + "\022referenced_pk_name\030\004 \001(\t\022\023\n\013update_rule" + + "\030\005 \001(\005\022\023\n\013delete_rule\030\006 \001(\005\022]\n\004cols\030\007 \003(" + + "\0132O.org.apache.hadoop.hive.metastore.hba", + "se.ForeignKeys.ForeignKey.ForeignKeyColu" + + "mn\022\031\n\021enable_constraint\030\010 \001(\010\022\033\n\023validat" + + "e_constraint\030\t \001(\010\022\027\n\017rely_constraint\030\n " + + "\001(\010\032X\n\020ForeignKeyColumn\022\023\n\013column_name\030\001" + + " \002(\t\022\036\n\026referenced_column_name\030\002 \002(\t\022\017\n\007" + + "key_seq\030\003 \002(\021*#\n\rPrincipalType\022\010\n\004USER\020\000" + + "\022\010\n\004ROLE\020\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -37018,6 +41887,36 @@ public Builder removeRange(int index) { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_PartitionKeyComparator_Operator_descriptor, new java.lang.String[] { "Type", "Key", "Val", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor, + new java.lang.String[] { "PkName", "Cols", "EnableConstraint", "ValidateConstraint", "RelyConstraint", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor = + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_descriptor.getNestedTypes().get(0); + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_PrimaryKey_PrimaryKeyColumn_descriptor, + new java.lang.String[] { "ColumnName", "KeySeq", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor, + new java.lang.String[] { "Fks", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor = + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_descriptor.getNestedTypes().get(0); + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor, + new java.lang.String[] { "FkName", "ReferencedDbName", "ReferencedTableName", "ReferencedPkName", "UpdateRule", "DeleteRule", "Cols", "EnableConstraint", "ValidateConstraint", "RelyConstraint", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor = + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_descriptor.getNestedTypes().get(0); + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_ForeignKeys_ForeignKey_ForeignKeyColumn_descriptor, + new java.lang.String[] { "ColumnName", "ReferencedColumnName", "KeySeq", }); return null; } }; diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java index d503cff..e687a69 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java @@ -23,6 +23,8 @@ import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -143,6 +145,8 @@ private final static byte[] REF_COUNT_COL = "ref".getBytes(HBaseUtils.ENCODING); private final static byte[] DELEGATION_TOKEN_COL = "dt".getBytes(HBaseUtils.ENCODING); private final static byte[] MASTER_KEY_COL = "mk".getBytes(HBaseUtils.ENCODING); + private final static byte[] PRIMARY_KEY_COL = "pk".getBytes(HBaseUtils.ENCODING); + private final static byte[] FOREIGN_KEY_COL = "fk".getBytes(HBaseUtils.ENCODING); private final static byte[] GLOBAL_PRIVS_KEY = "gp".getBytes(HBaseUtils.ENCODING); private final static byte[] SEQUENCES_KEY = "seq".getBytes(HBaseUtils.ENCODING); private final static int TABLES_TO_CACHE = 10; @@ -1716,6 +1720,22 @@ private String printOneTable(Result result) throws IOException, TException { ColumnStatisticsObj cso = HBaseUtils.deserializeStatsForOneColumn(pcs, statsCol.getValue()); builder.append(dumpThriftObject(cso)); } + // Add the primary key + List pk = getPrimaryKey(sdParts.containingTable.getDbName(), + sdParts.containingTable.getTableName()); + if (pk != null && pk.size() > 0) { + builder.append(" primary key: "); + for (SQLPrimaryKey pkcol : pk) builder.append(dumpThriftObject(pkcol)); + } + + // Add any foreign keys + List fks = getForeignKeys(sdParts.containingTable.getDbName(), + sdParts.containingTable.getTableName()); + if (fks != null && fks.size() > 0) { + builder.append(" foreign keys: "); + for (SQLForeignKey fkcol : fks) builder.append(dumpThriftObject(fkcol)); + + } return builder.toString(); } @@ -2530,6 +2550,86 @@ long getNextSequence(byte[] sequence) throws IOException { } /********************************************************************************************** + * Constraints (pk/fk) related methods + *********************************************************************************************/ + + /** + * Fetch a primary key + * @param dbName database the table is in + * @param tableName table name + * @return List of primary key objects, which together make up one key + * @throws IOException if there's a read error + */ + List getPrimaryKey(String dbName, String tableName) throws IOException { + byte[] key = HBaseUtils.buildKey(dbName, tableName); + byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, PRIMARY_KEY_COL); + if (serialized == null) return null; + return HBaseUtils.deserializePrimaryKey(dbName, tableName, serialized); + } + + /** + * Fetch a the foreign keys for a table + * @param dbName database the table is in + * @param tableName table name + * @return All of the foreign key columns thrown together in one list. Have fun sorting them out. + * @throws IOException if there's a read error + */ + List getForeignKeys(String dbName, String tableName) throws IOException { + byte[] key = HBaseUtils.buildKey(dbName, tableName); + byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, FOREIGN_KEY_COL); + if (serialized == null) return null; + return HBaseUtils.deserializeForeignKeys(dbName, tableName, serialized); + } + + /** + * Create a primary key on a table. + * @param pk Primary key for this table + * @throws IOException if unable to write the data to the store. + */ + void putPrimaryKey(List pk) throws IOException { + byte[][] serialized = HBaseUtils.serializePrimaryKey(pk); + store(TABLE_TABLE, serialized[0], CATALOG_CF, PRIMARY_KEY_COL, serialized[1]); + } + + /** + * Create one or more foreign keys on a table. Note that this will not add a foreign key, it + * will overwrite whatever is there. So if you wish to add a key to a table that may already + * foreign keys you need to first use {@link #getForeignKeys(String, String)} to fetch the + * existing keys, add to the list, and then call this. + * @param fks Foreign key(s) for this table + * @throws IOException if unable to write the data to the store. + */ + void putForeignKeys(List fks) throws IOException { + byte[][] serialized = HBaseUtils.serializeForeignKeys(fks); + store(TABLE_TABLE, serialized[0], CATALOG_CF, FOREIGN_KEY_COL, serialized[1]); + } + + /** + * Drop the primary key from a table. + * @param dbName database the table is in + * @param tableName table name + * @throws IOException if unable to delete from the store + */ + void deletePrimaryKey(String dbName, String tableName) throws IOException { + byte[] key = HBaseUtils.buildKey(dbName, tableName); + delete(TABLE_TABLE, key, CATALOG_CF, PRIMARY_KEY_COL); + } + + /** + * Drop all foreign keys from a table. Note that this will drop all keys blindly. You should + * only call this if you're sure you want to drop them all. If you just want to drop one you + * should instead all {@link #getForeignKeys(String, String)}, modify the list it returns, and + * then call {@link #putForeignKeys(List)}. + * @param dbName database the table is in + * @param tableName table name + * @throws IOException if unable to delete from the store + */ + void deleteForeignKeys(String dbName, String tableName) throws IOException { + byte[] key = HBaseUtils.buildKey(dbName, tableName); + delete(TABLE_TABLE, key, CATALOG_CF, FOREIGN_KEY_COL); + } + + /********************************************************************************************** * Cache methods *********************************************************************************************/ diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java index f9fad4c..07cc0da 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java @@ -22,6 +22,7 @@ import com.google.common.cache.CacheLoader; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.ObjectPair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -2690,42 +2691,143 @@ public void putFileMetadata(List fileIds, List metadata, } @Override - public List getPrimaryKeys(String db_name, String tbl_name) - throws MetaException { - // TODO: WTF? - return null; + public List getPrimaryKeys(String db_name, String tbl_name) throws MetaException { + boolean commit = false; + openTransaction(); + try { + List pk = getHBase().getPrimaryKey(db_name, tbl_name); + commit = true; + return pk; + } catch (IOException e) { + LOG.error("Unable to get primary key", e); + throw new MetaException("Error reading db " + e.getMessage()); + } finally { + commitOrRoleBack(commit); + } } @Override - public List getForeignKeys(String parent_db_name, - String parent_tbl_name, String foreign_db_name, String foreign_tbl_name) - throws MetaException { - // TODO: WTF? - return null; + public List getForeignKeys(String parent_db_name, String parent_tbl_name, + String foreign_db_name, String foreign_tbl_name) + throws MetaException { + boolean commit = false; + openTransaction(); + try { + List fks = getHBase().getForeignKeys(parent_db_name, parent_tbl_name); + if (fks == null || fks.size() == 0) return null; + List result = new ArrayList<>(fks.size()); + for (SQLForeignKey fkcol : fks) { + if (fkcol.getFktable_db().equals(parent_db_name) && + fkcol.getFktable_name().equals(parent_tbl_name)) { + result.add(fkcol); + } + } + commit = true; + return result; + } catch (IOException e) { + LOG.error("Unable to get foreign key", e); + throw new MetaException("Error reading db " + e.getMessage()); + } finally { + commitOrRoleBack(commit); + } } @Override - public void createTableWithConstraints(Table tbl, - List primaryKeys, List foreignKeys) - throws InvalidObjectException, MetaException { - // TODO: WTF? + public void createTableWithConstraints(Table tbl, List primaryKeys, + List foreignKeys) + throws InvalidObjectException, MetaException { + boolean commit = false; + openTransaction(); + try { + createTable(tbl); + if (primaryKeys != null) addPrimaryKeys(primaryKeys); + if (foreignKeys != null) addForeignKeys(foreignKeys); + commit = true; + } finally { + commitOrRoleBack(commit); + } } @Override - public void dropConstraint(String dbName, String tableName, - String constraintName) throws NoSuchObjectException { - // TODO: WTF? + public void dropConstraint(String dbName, String tableName, String constraintName) + throws NoSuchObjectException { + // This is something of pain, since we have to search both primary key and foreign key to see + // which they want to drop. + boolean commit = false; + openTransaction(); + try { + List pk = getHBase().getPrimaryKey(dbName, tableName); + if (pk != null && pk.size() > 0 && pk.get(0).getPk_name().equals(constraintName)) { + getHBase().deletePrimaryKey(dbName, tableName); + commit = true; + return; + } + + List fks = getHBase().getForeignKeys(dbName, tableName); + if (fks != null && fks.size() > 0) { + List newKeyList = new ArrayList<>(fks.size()); + // Make a new list of keys that excludes all columns from the constraint we're dropping. + for (SQLForeignKey fkcol : fks) { + if (!fkcol.getFk_name().equals(constraintName)) newKeyList.add(fkcol); + } + // If we've dropped only one foreign key out of many keys, than update so that we still + // have the existing keys. Otherwise drop the foreign keys all together. + if (newKeyList.size() > 0) getHBase().putForeignKeys(newKeyList); + else getHBase().deleteForeignKeys(dbName, tableName); + commit = true; + return; + } + + commit = true; + throw new NoSuchObjectException("Unable to find constraint named " + constraintName + + " on table " + tableNameForErrorMsg(dbName, tableName)); + } catch (IOException e) { + LOG.error("Error fetching primary key for table " + tableNameForErrorMsg(dbName, tableName), e); + throw new NoSuchObjectException("Error fetching primary key for table " + + tableNameForErrorMsg(dbName, tableName) + " : " + e.getMessage()); + } finally { + commitOrRoleBack(commit); + } } @Override - public void addPrimaryKeys(List pks) - throws InvalidObjectException, MetaException { - // TODO: WTF? + public void addPrimaryKeys(List pks) throws InvalidObjectException, MetaException { + boolean commit = false; + openTransaction(); + try { + List currentPk = + getHBase().getPrimaryKey(pks.get(0).getTable_db(), pks.get(0).getTable_name()); + if (currentPk != null) { + throw new MetaException(" Primary key already exists for: " + + tableNameForErrorMsg(pks.get(0).getTable_db(), pks.get(0).getTable_name())); + } + getHBase().putPrimaryKey(pks); + commit = true; + } catch (IOException e) { + LOG.error("Error writing primary key", e); + throw new MetaException("Error writing primary key: " + e.getMessage()); + } finally { + commitOrRoleBack(commit); + } } @Override - public void addForeignKeys(List fks) - throws InvalidObjectException, MetaException { - // TODO: WTF? + public void addForeignKeys(List fks) throws InvalidObjectException, MetaException { + boolean commit = false; + openTransaction(); + try { + // Fetch the existing keys (if any) and add in these new ones + List existing = + getHBase().getForeignKeys(fks.get(0).getFktable_db(), fks.get(0).getFktable_name()); + if (existing == null) existing = new ArrayList<>(fks.size()); + existing.addAll(fks); + getHBase().putForeignKeys(existing); + commit = true; + } catch (IOException e) { + LOG.error("Error writing foreign keys", e); + throw new MetaException("Error writing foreign keys: " + e.getMessage()); + } finally { + commitOrRoleBack(commit); + } } } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index 54daa4a..4546d43 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -37,6 +37,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData; @@ -61,6 +62,8 @@ import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; @@ -360,6 +363,7 @@ static Role deserializeRole(byte[] key, byte[] value) return result; } + /** * Deserialize a database. This method should be used when the db anme is already known as it * doesn't have to re-deserialize it. @@ -1495,6 +1499,123 @@ static String deserializeMasterKey(byte[] value) throws InvalidProtocolBufferExc } /** + * Serialize the primary key for a table. + * @param pk Primary key columns. It is expected that all of these match to one pk, since + * anything else is meaningless. + * @return two byte arrays, first containts the hbase key, the second the serialized value. + */ + static byte[][] serializePrimaryKey(List pk) { + // First, figure out the dbName and tableName. We expect this to match for all list entries. + byte[][] result = new byte[2][]; + String dbName = pk.get(0).getTable_db(); + String tableName = pk.get(0).getTable_name(); + result[0] = buildKey(HiveStringUtils.normalizeIdentifier(dbName), + HiveStringUtils.normalizeIdentifier(tableName)); + + HbaseMetastoreProto.PrimaryKey.Builder builder = HbaseMetastoreProto.PrimaryKey.newBuilder(); + // Encode the primary key, if present + builder.setPkName(pk.get(0).getPk_name()); + builder.setEnableConstraint(pk.get(0).isEnable_cstr()); + builder.setValidateConstraint(pk.get(0).isValidate_cstr()); + builder.setRelyConstraint(pk.get(0).isRely_cstr()); + + for (SQLPrimaryKey pkcol : pk) { + HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder pkColBuilder = + HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.newBuilder(); + pkColBuilder.setColumnName(pkcol.getColumn_name()); + pkColBuilder.setKeySeq(pkcol.getKey_seq()); + builder.addCols(pkColBuilder); + } + + result[1] = builder.build().toByteArray(); + return result; + } + + /** + * Serialize the foreign key(s) for a table. + * @param fks Foreign key columns. These may belong to multiple foreign keys. + * @return two byte arrays, first containts the key, the second the serialized value. + */ + static byte[][] serializeForeignKeys(List fks) { + // First, figure out the dbName and tableName. We expect this to match for all list entries. + byte[][] result = new byte[2][]; + String dbName = fks.get(0).getFktable_db(); + String tableName = fks.get(0).getFktable_name(); + result[0] = buildKey(HiveStringUtils.normalizeIdentifier(dbName), + HiveStringUtils.normalizeIdentifier(tableName)); + + HbaseMetastoreProto.ForeignKeys.Builder builder = HbaseMetastoreProto.ForeignKeys.newBuilder(); + + // Encode any foreign keys we find. This can be complex because there may be more than + // one foreign key in here, so we need to detect that. + Map fkBuilders = new HashMap<>(); + + for (SQLForeignKey fkcol : fks) { + HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder fkBuilder = + fkBuilders.get(fkcol.getFk_name()); + if (fkBuilder == null) { + // We haven't seen this key before, so add it + fkBuilder = HbaseMetastoreProto.ForeignKeys.ForeignKey.newBuilder(); + fkBuilder.setFkName(fkcol.getFk_name()); + fkBuilder.setReferencedDbName(fkcol.getPktable_db()); + assert dbName.equals(fkcol.getFktable_db()) : "You switched databases on me!"; + fkBuilder.setReferencedTableName(fkcol.getPktable_name()); + assert tableName.equals(fkcol.getFktable_name()) : "You switched tables on me!"; + fkBuilder.setReferencedPkName(fkcol.getPk_name()); + fkBuilder.setUpdateRule(fkcol.getUpdate_rule()); + fkBuilder.setDeleteRule(fkcol.getDelete_rule()); + fkBuilder.setEnableConstraint(fkcol.isEnable_cstr()); + fkBuilder.setValidateConstraint(fkcol.isValidate_cstr()); + fkBuilder.setRelyConstraint(fkcol.isRely_cstr()); + fkBuilders.put(fkcol.getFk_name(), fkBuilder); + } + HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder fkColBuilder = + HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.newBuilder(); + fkColBuilder.setColumnName(fkcol.getFkcolumn_name()); + fkColBuilder.setReferencedColumnName(fkcol.getPkcolumn_name()); + fkColBuilder.setKeySeq(fkcol.getKey_seq()); + fkBuilder.addCols(fkColBuilder); + } + for (HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder fkBuilder : fkBuilders.values()) { + builder.addFks(fkBuilder); + } + result[1] = builder.build().toByteArray(); + return result; + } + + static List deserializePrimaryKey(String dbName, String tableName, byte[] value) + throws InvalidProtocolBufferException { + HbaseMetastoreProto.PrimaryKey proto = HbaseMetastoreProto.PrimaryKey.parseFrom(value); + List result = new ArrayList<>(); + for (HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn protoPkCol : proto.getColsList()) { + result.add(new SQLPrimaryKey(dbName, tableName, protoPkCol.getColumnName(), + protoPkCol.getKeySeq(), proto.getPkName(), proto.getEnableConstraint(), + proto.getValidateConstraint(), proto.getRelyConstraint())); + } + + return result; + } + + static List deserializeForeignKeys(String dbName, String tableName, byte[] value) + throws InvalidProtocolBufferException { + List result = new ArrayList<>(); + HbaseMetastoreProto.ForeignKeys protoConstraints = + HbaseMetastoreProto.ForeignKeys.parseFrom(value); + + for (HbaseMetastoreProto.ForeignKeys.ForeignKey protoFk : protoConstraints.getFksList()) { + for (HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn protoFkCol : + protoFk.getColsList()) { + result.add(new SQLForeignKey(protoFk.getReferencedDbName(), protoFk.getReferencedTableName(), + protoFkCol.getReferencedColumnName(), dbName, tableName, protoFkCol.getColumnName(), + protoFkCol.getKeySeq(), protoFk.getUpdateRule(), protoFk.getDeleteRule(), + protoFk.getFkName(), protoFk.getReferencedPkName(), protoFk.getEnableConstraint(), + protoFk.getValidateConstraint(), protoFk.getRelyConstraint())); + } + } + return result; + } + + /** * @param keyStart byte array representing the start prefix * @return byte array corresponding to the next possible prefix */ diff --git metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto index 6fbe36c..3f9e4c5 100644 --- metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto +++ metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto @@ -295,3 +295,39 @@ message PartitionKeyComparator { repeated Operator op = 3; repeated Range range = 4; } + +message PrimaryKey { + message PrimaryKeyColumn { + required string column_name = 1; + required sint32 key_seq = 2; + } + + required string pk_name = 1; + repeated PrimaryKeyColumn cols = 2; + optional bool enable_constraint = 3; + optional bool validate_constraint = 4; + optional bool rely_constraint = 5; +} + +message ForeignKeys { + message ForeignKey { + message ForeignKeyColumn { + required string column_name = 1; + required string referenced_column_name = 2; + required sint32 key_seq = 3; + } + + required string fk_name = 1; + required string referenced_db_name = 2; + required string referenced_table_name = 3; + optional string referenced_pk_name = 4; + optional int32 update_rule = 5; + optional int32 delete_rule = 6; + repeated ForeignKeyColumn cols = 7; + optional bool enable_constraint = 8; + optional bool validate_constraint = 9; + optional bool rely_constraint = 10; + } + + repeated ForeignKey fks = 1; +} diff --git metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java index 4894ed3..fb0a8e7 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java @@ -22,10 +22,12 @@ import java.security.MessageDigest; import java.util.ArrayList; import java.util.Arrays; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedMap; +import java.util.SortedSet; import java.util.TreeMap; import org.apache.hadoop.hbase.Cell; @@ -46,6 +48,7 @@ import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; @@ -53,6 +56,8 @@ import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; @@ -62,6 +67,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -1391,6 +1397,366 @@ public void decimalPartitionStatistics() throws Exception { Assert.assertEquals(decimalData.getNumDVs(), decimalDataFromDB.getNumDVs()); } + @Test + public void createTableWithPrimaryKey() throws Exception { + String tableName = "pktable"; + String pkName = "test_pk"; + String pkColNames[] = { "col0" }; + Table table = createMultiColumnTable(tableName, "int"); + + List pk = Arrays.asList( + new SQLPrimaryKey(DB, tableName, pkColNames[0], 0, pkName, true, false, true)); + + store.createTableWithConstraints(table, pk, null); + + pk = store.getPrimaryKeys(DB, tableName); + + Assert.assertNotNull(pk); + Assert.assertEquals(1, pk.size()); + Assert.assertEquals(DB, pk.get(0).getTable_db()); + Assert.assertEquals(tableName, pk.get(0).getTable_name()); + Assert.assertEquals(pkColNames[0], pk.get(0).getColumn_name()); + Assert.assertEquals(0, pk.get(0).getKey_seq()); + Assert.assertEquals(pkName, pk.get(0).getPk_name()); + Assert.assertTrue(pk.get(0).isEnable_cstr()); + Assert.assertFalse(pk.get(0).isValidate_cstr()); + Assert.assertTrue(pk.get(0).isRely_cstr()); + + // Drop the primary key + store.dropConstraint(DB, tableName, pkName); + + pk = store.getPrimaryKeys(DB, tableName); + Assert.assertNull(pk); + } + + @Test + public void createTableWithForeignKey() throws Exception { + String tableName = "fktable"; + String pkTable = "pktable"; + String pkName = "test_pk"; + String fkName = "test_fk"; + String fkColNames[] = { "col0" }; + String pkColNames[] = { "pcol0" }; + Table table = createMultiColumnTable(tableName, "int"); + + List fk = Arrays.asList( + new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[0], 0, 1, 2, + fkName, pkName, true, false, false)); + + store.createTableWithConstraints(table, null, fk); + + fk = store.getForeignKeys(DB, tableName, DB, pkTable); + + Assert.assertNotNull(fk); + Assert.assertEquals(1, fk.size()); + Assert.assertEquals(DB, fk.get(0).getPktable_db()); + Assert.assertEquals(pkTable, fk.get(0).getPktable_name()); + Assert.assertEquals(pkColNames[0], fk.get(0).getPkcolumn_name()); + Assert.assertEquals(DB, fk.get(0).getFktable_db()); + Assert.assertEquals(tableName, fk.get(0).getFktable_name()); + Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name()); + Assert.assertEquals(0, fk.get(0).getKey_seq()); + Assert.assertEquals(1, fk.get(0).getUpdate_rule()); + Assert.assertEquals(2, fk.get(0).getDelete_rule()); + Assert.assertEquals(fkName, fk.get(0).getFk_name()); + Assert.assertEquals(pkName, fk.get(0).getPk_name()); + Assert.assertTrue(fk.get(0).isEnable_cstr()); + Assert.assertFalse(fk.get(0).isValidate_cstr()); + Assert.assertFalse(fk.get(0).isRely_cstr()); + } + + // Test that we can add a primary key with multiple columns + @Test + public void addMultiColPrimaryKey() throws Exception { + String tableName = "mcpktable"; + String pkName = "test_pk"; + String pkColNames[] = { "col0", "col1", "col2" }; + Table table = createMultiColumnTable(tableName, "int", "varchar(32)", "decimal(10,2)"); + + List pk = Arrays.asList( + new SQLPrimaryKey(DB, tableName, pkColNames[1], 0, pkName, false, true, true), + new SQLPrimaryKey(DB, tableName, pkColNames[2], 1, pkName, false, true, true) + ); + + store.createTable(table); + store.addPrimaryKeys(pk); + + Assert.assertNotNull(pk); + Assert.assertEquals(2, pk.size()); + SQLPrimaryKey[] sorted = pk.toArray(new SQLPrimaryKey[2]); + Arrays.sort(sorted, new Comparator() { + @Override + public int compare(SQLPrimaryKey o1, SQLPrimaryKey o2) { + return o1.getColumn_name().compareTo(o2.getColumn_name()); + } + }); + for (int i = 0; i < 2; i++) { + Assert.assertEquals(DB, sorted[i].getTable_db()); + Assert.assertEquals(tableName, sorted[i].getTable_name()); + Assert.assertEquals(pkColNames[i+1], sorted[i].getColumn_name()); + Assert.assertEquals(i, sorted[i].getKey_seq()); + Assert.assertEquals(pkName, sorted[i].getPk_name()); + Assert.assertFalse(sorted[i].isEnable_cstr()); + Assert.assertTrue(sorted[i].isValidate_cstr()); + Assert.assertTrue(sorted[i].isRely_cstr()); + } + + } + + // Test that we can create a foreign key with multiple columns + @Test + public void addMultiColForeignKey() throws Exception { + String tableName = "mcfktable"; + String pkTable = "pktable"; + String pkName = "test_pk"; + String fkName = "test_fk"; + String fkColNames[] = { "col0", "col1", "col2" }; + String pkColNames[] = { "pcol0", "pcol1" }; + Table table = createMultiColumnTable(tableName, "int", "double", "timestamp"); + + List fk = Arrays.asList( + new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[1], 0, 1, 2, + fkName, pkName, true, false, false), + new SQLForeignKey(DB, pkTable, pkColNames[1], DB, tableName, fkColNames[2], 1, 1, 2, + fkName, pkName, true, false, false) + ); + + store.createTable(table); + store.addForeignKeys(fk); + + fk = store.getForeignKeys(DB, tableName, DB, pkTable); + + Assert.assertNotNull(fk); + Assert.assertEquals(2, fk.size()); + SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]); + Arrays.sort(sorted, new Comparator() { + @Override + public int compare(SQLForeignKey o1, SQLForeignKey o2) { + if (o1.getFk_name().equals(o2.getFk_name())) { + return o1.getFkcolumn_name().compareTo(o2.getFkcolumn_name()); + } else { + return o1.getFk_name().compareTo(o2.getFk_name()); + } + } + }); + + for (int i = 0; i < 2; i++) { + Assert.assertEquals(DB, sorted[i].getPktable_db()); + Assert.assertEquals(pkTable, sorted[i].getPktable_name()); + Assert.assertEquals(pkColNames[i], sorted[i].getPkcolumn_name()); + Assert.assertEquals(DB, sorted[i].getFktable_db()); + Assert.assertEquals(tableName, sorted[i].getFktable_name()); + Assert.assertEquals(fkColNames[i+1], sorted[i].getFkcolumn_name()); + Assert.assertEquals(i, sorted[i].getKey_seq()); + Assert.assertEquals(1, sorted[i].getUpdate_rule()); + Assert.assertEquals(2, sorted[i].getDelete_rule()); + Assert.assertEquals(fkName, sorted[i].getFk_name()); + Assert.assertEquals(pkName, sorted[i].getPk_name()); + Assert.assertTrue(sorted[i].isEnable_cstr()); + Assert.assertFalse(sorted[i].isValidate_cstr()); + Assert.assertFalse(sorted[i].isRely_cstr()); + } + + } + + // Test that we can add 2 foreign keys at once + @Test + public void addMultiForeignKeys() throws Exception { + String tableName = "mcfktable"; + String pkTable = "pktable"; + String pkTable2 = "pktable2"; + String pkName = "test_pk"; + String pkName2 = "test_pk2"; + String fkName = "test_fk"; + String fkName2 = "test_fk2"; + String fkColNames[] = { "col0", "col1", "col2" }; + String pkColNames[] = { "pcol0", "pcol1" }; + String pkColNames2[] = { "p2col0" }; + Table table = createMultiColumnTable(tableName, "int", "double", "timestamp"); + + List fk = Arrays.asList( + new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[1], 0, 1, 2, + fkName, pkName, true, false, true), + new SQLForeignKey(DB, pkTable, pkColNames[1], DB, tableName, fkColNames[2], 1, 1, 2, + fkName, pkName, true, false, true), + new SQLForeignKey(DB, pkTable2, pkColNames2[0], DB, tableName, fkColNames[0], 0, 1, 2, + fkName2, pkName2, true, false, true) + ); + + store.createTable(table); + store.addForeignKeys(fk); + + fk = store.getForeignKeys(DB, tableName, DB, pkTable); + + Assert.assertNotNull(fk); + Assert.assertEquals(3, fk.size()); + SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]); + Arrays.sort(sorted, new Comparator() { + @Override + public int compare(SQLForeignKey o1, SQLForeignKey o2) { + if (o1.getFk_name().equals(o2.getFk_name())) { + return o1.getFkcolumn_name().compareTo(o2.getFkcolumn_name()); + } else { + return o1.getFk_name().compareTo(o2.getFk_name()); + } + } + }); + + for (int i = 0; i < 2; i++) { + Assert.assertEquals(DB, sorted[i].getPktable_db()); + Assert.assertEquals(pkTable, sorted[i].getPktable_name()); + Assert.assertEquals(pkColNames[i], sorted[i].getPkcolumn_name()); + Assert.assertEquals(DB, sorted[i].getFktable_db()); + Assert.assertEquals(tableName, sorted[i].getFktable_name()); + Assert.assertEquals(fkColNames[i+1], sorted[i].getFkcolumn_name()); + Assert.assertEquals(i, sorted[i].getKey_seq()); + Assert.assertEquals(1, sorted[i].getUpdate_rule()); + Assert.assertEquals(2, sorted[i].getDelete_rule()); + Assert.assertEquals(fkName, sorted[i].getFk_name()); + Assert.assertEquals(pkName, sorted[i].getPk_name()); + Assert.assertTrue(sorted[i].isEnable_cstr()); + Assert.assertFalse(sorted[i].isValidate_cstr()); + Assert.assertTrue(sorted[i].isRely_cstr()); + } + Assert.assertEquals(DB, sorted[2].getPktable_db()); + Assert.assertEquals(pkTable2, sorted[2].getPktable_name()); + Assert.assertEquals(pkColNames2[0], sorted[2].getPkcolumn_name()); + Assert.assertEquals(DB, sorted[2].getFktable_db()); + Assert.assertEquals(tableName, sorted[2].getFktable_name()); + Assert.assertEquals(fkColNames[0], sorted[2].getFkcolumn_name()); + Assert.assertEquals(0, sorted[2].getKey_seq()); + Assert.assertEquals(1, sorted[2].getUpdate_rule()); + Assert.assertEquals(2, sorted[2].getDelete_rule()); + Assert.assertEquals(fkName2, sorted[2].getFk_name()); + Assert.assertEquals(pkName2, sorted[2].getPk_name()); + Assert.assertTrue(sorted[2].isEnable_cstr()); + Assert.assertFalse(sorted[2].isValidate_cstr()); + Assert.assertTrue(sorted[2].isRely_cstr()); + + } + + // Test that we can add a foreign key when one already exists + @Test + public void addSecondForeignKeys() throws Exception { + String tableName = "mcfktable"; + String pkTable = "pktable"; + String pkTable2 = "pktable2"; + String pkName = "test_pk"; + String pkName2 = "test_pk2"; + String fkName = "test_fk"; + String fkName2 = "test_fk2"; + String fkColNames[] = { "col0", "col1", "col2" }; + String pkColNames[] = { "pcol0", "pcol1" }; + String pkColNames2[] = { "p2col0" }; + Table table = createMultiColumnTable(tableName, "int", "double", "timestamp"); + + List fk = Arrays.asList( + new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[1], 0, 1, 2, + fkName, pkName, true, false, true), + new SQLForeignKey(DB, pkTable, pkColNames[1], DB, tableName, fkColNames[2], 1, 1, 2, + fkName, pkName, true, false, true) + ); + + store.createTable(table); + store.addForeignKeys(fk); + + fk = Arrays.asList( + new SQLForeignKey(DB, pkTable2, pkColNames2[0], DB, tableName, fkColNames[0], 0, 1, 2, + fkName2, pkName2, true, false, true) + ); + store.addForeignKeys(fk); + + fk = store.getForeignKeys(DB, tableName, DB, pkTable); + + Assert.assertNotNull(fk); + Assert.assertEquals(3, fk.size()); + SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]); + Arrays.sort(sorted, new Comparator() { + @Override + public int compare(SQLForeignKey o1, SQLForeignKey o2) { + if (o1.getFk_name().equals(o2.getFk_name())) { + return o1.getFkcolumn_name().compareTo(o2.getFkcolumn_name()); + } else { + return o1.getFk_name().compareTo(o2.getFk_name()); + } + } + }); + + for (int i = 0; i < 2; i++) { + Assert.assertEquals(DB, sorted[i].getPktable_db()); + Assert.assertEquals(pkTable, sorted[i].getPktable_name()); + Assert.assertEquals(pkColNames[i], sorted[i].getPkcolumn_name()); + Assert.assertEquals(DB, sorted[i].getFktable_db()); + Assert.assertEquals(tableName, sorted[i].getFktable_name()); + Assert.assertEquals(fkColNames[i+1], sorted[i].getFkcolumn_name()); + Assert.assertEquals(i, sorted[i].getKey_seq()); + Assert.assertEquals(1, sorted[i].getUpdate_rule()); + Assert.assertEquals(2, sorted[i].getDelete_rule()); + Assert.assertEquals(fkName, sorted[i].getFk_name()); + Assert.assertEquals(pkName, sorted[i].getPk_name()); + Assert.assertTrue(sorted[i].isEnable_cstr()); + Assert.assertFalse(sorted[i].isValidate_cstr()); + Assert.assertTrue(sorted[i].isRely_cstr()); + } + Assert.assertEquals(DB, sorted[2].getPktable_db()); + Assert.assertEquals(pkTable2, sorted[2].getPktable_name()); + Assert.assertEquals(pkColNames2[0], sorted[2].getPkcolumn_name()); + Assert.assertEquals(DB, sorted[2].getFktable_db()); + Assert.assertEquals(tableName, sorted[2].getFktable_name()); + Assert.assertEquals(fkColNames[0], sorted[2].getFkcolumn_name()); + Assert.assertEquals(0, sorted[2].getKey_seq()); + Assert.assertEquals(1, sorted[2].getUpdate_rule()); + Assert.assertEquals(2, sorted[2].getDelete_rule()); + Assert.assertEquals(fkName2, sorted[2].getFk_name()); + Assert.assertEquals(pkName2, sorted[2].getPk_name()); + Assert.assertTrue(sorted[2].isEnable_cstr()); + Assert.assertFalse(sorted[2].isValidate_cstr()); + Assert.assertTrue(sorted[2].isRely_cstr()); + + store.dropConstraint(DB, tableName, fkName); + + fk = store.getForeignKeys(DB, tableName, DB, pkTable); + Assert.assertNotNull(fk); + Assert.assertEquals(1, fk.size()); + Assert.assertEquals(DB, fk.get(0).getPktable_db()); + Assert.assertEquals(pkTable2, fk.get(0).getPktable_name()); + Assert.assertEquals(pkColNames2[0], fk.get(0).getPkcolumn_name()); + Assert.assertEquals(DB, fk.get(0).getFktable_db()); + Assert.assertEquals(tableName, fk.get(0).getFktable_name()); + Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name()); + Assert.assertEquals(0, fk.get(0).getKey_seq()); + Assert.assertEquals(1, fk.get(0).getUpdate_rule()); + Assert.assertEquals(2, fk.get(0).getDelete_rule()); + Assert.assertEquals(fkName2, fk.get(0).getFk_name()); + Assert.assertEquals(pkName2, fk.get(0).getPk_name()); + Assert.assertTrue(fk.get(0).isEnable_cstr()); + Assert.assertFalse(fk.get(0).isValidate_cstr()); + Assert.assertTrue(fk.get(0).isRely_cstr()); + + store.dropConstraint(DB, tableName, fkName2); + + fk = store.getForeignKeys(DB, tableName, DB, pkTable); + Assert.assertNull(fk); + } + + // Try adding a primary key when one already exists + @Test(expected= MetaException.class) + public void doublePrimaryKey() throws Exception { + String tableName = "pktable"; + String pkName = "test_pk"; + String pkColNames[] = { "col0" }; + Table table = createMultiColumnTable(tableName, "int"); + + List pk = Arrays.asList( + new SQLPrimaryKey(DB, tableName, pkColNames[0], 0, pkName, true, false, true)); + + store.createTableWithConstraints(table, pk, null); + + store.addPrimaryKeys(pk); + } + + + private Table createMockTableAndPartition(String partType, String partVal) throws Exception { List cols = new ArrayList(); cols.add(new FieldSchema("col1", partType, "")); @@ -1425,6 +1791,22 @@ private Table createMockTable(String type) throws Exception { store.createTable(table); return table; } + + private Table createMultiColumnTable(String tblName, String... types) throws Exception { + List cols = new ArrayList(); + for (int i = 0; i < types.length; i++) cols.add(new FieldSchema("col" + i, types[i], "")); + SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); + Map params = new HashMap(); + params.put("key", "value"); + StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 17, + serde, Arrays.asList("bucketcol"), Arrays.asList(new Order("sortcol", 1)), params); + int currentTime = (int)(System.currentTimeMillis() / 1000); + Table table = new Table(tblName, DB, "me", currentTime, currentTime, 0, sd, cols, + emptyParameters, null, null, null); + store.createTable(table); + return table; + } + /** * Returns a dummy table level ColumnStatisticsDesc with default values */